diff --git a/.ci.settings.xml b/.ci.settings.xml index b82c38710..f14990bdf 100644 --- a/.ci.settings.xml +++ b/.ci.settings.xml @@ -1,5 +1,19 @@ + + + + maven-dev + ${env.ARTIFACTORY_USER} + ${env.ARTIFACTORY_TOKEN} + + + + maven-all-virtual + ${env.ARTIFACTORY_USER} + ${env.ARTIFACTORY_TOKEN} + + org.sonarsource.scanner.maven @@ -10,15 +24,17 @@ true + java-client https://sonarqube.split-internal.com ${env.SONAR_TOKEN} - 300 + . + pom.xml,src/main/** + . + src/test/** + .csv + **/matchers/**/*.* https://travis-ci.com/splitio/java-client https://github.com/splitio/java-client - java-client - ./src - **/test/**/*.*,**/testing/**/*.* - **/ai/**/*.*,**/jdbc/**/*.*,**/mpt/**/*.*,**/jcr/**/*.*,**/JDBC* diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..9e3198100 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @splitio/sdk diff --git a/.github/linter/checkstyle-suppressions.xml b/.github/linter/checkstyle-suppressions.xml new file mode 100644 index 000000000..81d235392 --- /dev/null +++ b/.github/linter/checkstyle-suppressions.xml @@ -0,0 +1,9 @@ + + + + + + + \ No newline at end of file diff --git a/.github/linter/google-java-style.xml b/.github/linter/google-java-style.xml new file mode 100644 index 000000000..e885305ed --- /dev/null +++ b/.github/linter/google-java-style.xml @@ -0,0 +1,380 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml deleted file mode 100644 index 6ff03b06e..000000000 --- a/.github/workflows/ci-cd.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: ci -on: - push: - branches: - - master - pull_request: - branches: - - master - -jobs: - maven-install: - name: Build - runs-on: ubuntu-latest - services: - redis: - image: redis - ports: - - 6379:6379 - steps: - - name: Checkout code - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - - name: Set up JDK 8 - uses: actions/setup-java@v2 - with: - distribution: 'adopt' - java-version: '8' - - - name: Setup Environment - run: | - cp .ci.settings.xml ${HOME}/.m2/settings.xml - - - name: Maven install - run: mvn --batch-mode -T 1C -U clean install - env: - SONAR_TOKEN: ${{ secrets.SONARQUBE_TOKEN }} - MAVEN_OPTS: "-XX:InitialHeapSize=2G -XX:MaxHeapSize=2G -XX:+PrintCommandLineFlags -XX:ThreadStackSize=65536 -XX:-TieredCompilation -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn" - muteProps: "true" - - - name: SonarQube Scan (Pull Request) - if: github.event_name == 'pull_request' - env: - SONAR_TOKEN: ${{ secrets.SONARQUBE_TOKEN }} - run: | - mvn --batch-mode sonar:sonar -DskipTests -q \ - -Dsonar.pullrequest.key=${{ github.event.pull_request.number }} \ - -Dsonar.pullrequest.branch=${{ github.event.pull_request.head.ref }} \ - -Dsonar.pullrequest.base=${{ github.event.pull_request.base.ref }} \ - - - name: SonarQube Scan (Push) - if: github.event_name == 'push' - env: - SONAR_TOKEN: ${{ secrets.SONARQUBE_TOKEN }} - run: | - mvn --batch-mode sonar:sonar -DskipTests -q \ - -Dsonar.branch.name= ${{ github.event.pull_request.base.ref }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..e5333445a --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,78 @@ +name: ci + +on: + push: + branches: + - '**' + pull_request: + branches: + - master + - development + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} + cancel-in-progress: true + +jobs: + test: + name: Test + runs-on: ubuntu-latest + services: + redis: + image: redis + ports: + - 6379:6379 + strategy: + fail-fast: false + matrix: + jdk: + - '8' + - '11' + - '19' + env: + ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }} + ARTIFACTORY_TOKEN: ${{ secrets.ARTIFACTORY_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONARQUBE_TOKEN }} + MAVEN_OPTS: "-XX:InitialHeapSize=2G -XX:MaxHeapSize=2G -XX:+PrintCommandLineFlags -XX:ThreadStackSize=65536 -XX:-TieredCompilation -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn" + muteProps: "true" + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup JDK ${{ matrix.jdk }} + uses: actions/setup-java@v3 + with: + distribution: 'adopt' + java-version: ${{ matrix.jdk }} + + - name: Setup Maven + run: cp .ci.settings.xml ${HOME}/.m2/settings.xml + + - name: Test + if: matrix.jdk == '8' && github.event_name == 'pull_request' && github.ref != 'refs/heads/master' && github.ref != 'refs/heads/development' + run: mvn --batch-mode clean install + + - name: Linter + if: matrix.jdk == '8' && github.event_name == 'pull_request' && github.ref != 'refs/heads/master' && github.ref != 'refs/heads/development' + run: mvn checkstyle::check + +# - name: Deploy +# if: matrix.jdk == '8' && github.event_name == 'push' && github.ref != 'refs/heads/master' && github.ref != 'refs/heads/development' +# run: mvn --batch-mode deploy -P test + + - name: SonarQube Scan (Push) + if: matrix.jdk == '11' && github.event_name == 'push' && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/development') + run: | + mvn --batch-mode clean verify sonar:sonar \ + -Dsonar.branch.name=${{ github.ref_name }} + + - name: SonarQube Scan (Pull Request) + if: matrix.jdk == '11' && github.event_name == 'pull_request' + run: | + mvn --batch-mode clean verify sonar:sonar \ + -Dsonar.pullrequest.key=${{ github.event.pull_request.number }} \ + -Dsonar.pullrequest.branch=${{ github.event.pull_request.head.ref }} \ + -Dsonar.pullrequest.base=${{ github.event.pull_request.base.ref }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 22873164a..000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: "Code scanning - action" - -on: - push: - pull_request: - schedule: - - cron: '0 21 * * 3' - -jobs: - CodeQL-Build: - - # CodeQL runs on ubuntu-latest and windows-latest - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v2 - with: - # We must fetch at least the immediate parents so that if this is - # a pull request then we can checkout the head. - fetch-depth: 2 - - # If this run was triggered by a pull request event, then checkout - # the head of the pull request instead of the merge commit. - - run: git checkout HEAD^2 - if: ${{ github.event_name == 'pull_request' }} - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - # Override language selection by uncommenting this and choosing your languages - # with: - # languages: go, javascript, csharp, python, cpp, java - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v1 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - #- run: | - # make bootstrap - # make release - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/update-license-year.yml b/.github/workflows/update-license-year.yml index 0403624eb..7e0a945f9 100644 --- a/.github/workflows/update-license-year.yml +++ b/.github/workflows/update-license-year.yml @@ -13,18 +13,18 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: fetch-depth: 0 - + - name: Set Current year run: "echo CURRENT=$(date +%Y) >> $GITHUB_ENV" - + - name: Set Previous Year run: "echo PREVIOUS=$(($CURRENT-1)) >> $GITHUB_ENV" - name: Update LICENSE - uses: jacobtomlinson/gha-find-replace@v2 + uses: jacobtomlinson/gha-find-replace@v3 with: find: ${{ env.PREVIOUS }} replace: ${{ env.CURRENT }} @@ -38,7 +38,7 @@ jobs: git commit -m "Updated License Year" -a - name: Create Pull Request - uses: peter-evans/create-pull-request@v3 + uses: peter-evans/create-pull-request@v5 with: token: ${{ secrets.GITHUB_TOKEN }} title: Update License Year diff --git a/CHANGES.txt b/CHANGES.txt new file mode 100644 index 000000000..8973780ef --- /dev/null +++ b/CHANGES.txt @@ -0,0 +1,382 @@ +4.18.2 (Oct 15, 2025) +- Fixed an issue where Manager.splitNames() return incorrect formatted result using redis storage and no custom prefix. +- Added using String only parameter for treatments in FallbackTreatmentConfiguration class. + +4.18.1 (Sep 30, 2025) +- Fixed an issue where Streaming client hangs during token renew process. + +4.18.0 (Sep 12, 2025) +- Added new configuration for Fallback Treatments, which allows setting a treatment value and optional config to be returned in place of "control", either globally or by flag. Read more in our docs. + +4.17.0 (Aug 22, 2025) +- Added a maximum size payload when posting unique keys telemetry in batches +- Added ProxyConfiguration parameter to support proxies, including Harness Forward Proxy, allowing also for more secured authentication options: MTLS, Bearer token and user/password authentication. Read more in our docs. + +4.16.1 (Jul 21, 2025) +- Fixed vulnerabilities: + - Upgraded org.apache.commons-commons-lang3 to 3.18.0 + - Upgraded com.google.code.gson.gson to 2.13.1 + +4.16.0 (May 28, 2025) +- Added support for rule-based segments. These segments determine membership at runtime by evaluating their configured rules against the user attributes provided to the SDK. +- Added support for feature flag prerequisites. This allows customers to define dependency conditions between flags, which are evaluated before any allowlists or targeting rules. + +4.15.0 (Apr 18, 2025) +- Prevent polling threads from starting when the SDK calls destroy method. +- Added a new optional argument to the client `getTreatment` methods to allow passing additional evaluation options, such as a map of properties to append to the generated impressions sent to Split backend. Read more in our docs. + +4.14.0 (Jan 17, 2025) +- Added support for the new impressions tracking toggle available on feature flags, both respecting the setting and including the new field being returned on SplitView type objects. Read more in our docs. +- Cleaned unused imports to fix a collision issue. + +4.13.1 (Dec 5, 2024) +- Updated `org.apache.httpcomponents.client5` dependency to 5.4.1 to fix vulnerabilities. +- Updated `redis.clients` dependency to 4.4.8 to fix vulnerabilities. + +4.13.0 (Sep 13, 2024) +- Added support for Kerberos Proxy authentication. + +4.12.1 (Jun 10, 2024) +- Fixed deadlock for virtual thread in Push Manager and SSE Client. + +4.12.0 (May 15, 2024) +- Added support for targeting rules based on semantic versions (https://semver.org/). +- Added the logic to handle correctly when the SDK receives an unsupported Matcher type. +- Enhanced SDK Headers for Authorization Frameworks +- Cleaned unused imports and renaming some methods +- Fixed empty token handler thanks to @hpark-miovision + +4.11.1 (Feb 29, 2024) +- Fixed deadlock in UniqueKeysTracker when sending Unique Keys. + +4.11.0 (Jan 9, 2024) +- Added impressionsListener method in the IntegrationConfig builder to set Sync or Async Listener execution. +- Fixed localhost to read files with yml ending. + +4.10.2 (Dec 1, 2023) +- Added getTreatmentsByFlagSets without attributes. +- Fixed some issues for flag sets: Not logging a warning when using flag sets that don't contain cached feature flags. + +4.10.1 (Nov 8, 2023) +- Fixed handler for response http headers. + +4.10.0 (Nov 2, 2023) +- Added support for Flag Sets on the SDK, which enables grouping feature flags and interacting with the group rather than individually (more details in our documentation): + - Added new variations of the get treatment methods to support evaluating flags in given flag set/s. + - getTreatmentsByFlagSet and getTreatmentsByFlagSets + - getTreatmentWithConfigByFlagSets and getTreatmentsWithConfigByFlagSets + - Added a new optional Flag Sets Filter configuration option. This allows the SDK and Split services to only synchronize the flags in the specified flag sets, avoiding unused or unwanted flags from being synced on the SDK instance, bringing all the benefits from a reduced payload. + - Note: Only applicable when the SDK is in charge of the rollout data synchronization. When not applicable, the SDK will log a warning on init. + - Updated the following SDK manager methods to expose flag sets on flag views. +- Added `defaultTreatment` property to the `SplitView` object returned by the `split` and `splits` methods of the SDK manager. +- Added new `threadFactory` property in SDK config. It allows to use of Virtual Threading. + +4.9.0 (Sep 8, 2023) +- Added InputStream config for localhost mode providing a solution when the file is inside a jar. +- Fixed track impressions to send all impressions to the listener. +- Fixed SyncManager shutdown to stop SSE only when is streaming mode on. + +4.8.1 (Aug 1, 2023) +- Applied linting rules to the code. +- Fixed an issue when the prefix is empty for Redis settings. + +4.8.0 (Jul 18, 2023) +- Improved streaming architecture implementation to apply feature flag updates from the notification received which is now enhanced, improving efficiency and reliability of the whole update system. +- Updated `com.google.guava` dependence to 32.0.1 for fixing a vulnerability. +- Updated SegmentFetcher for better readability. + +4.7.2 (May 16, 2023) +- Updated default treatment to be control for yaml and json localhost. +- Updated terminology on the SDKs codebase to be more aligned with current standard without causing a breaking change. The core change is the term split for feature flag on things like logs and javadoc comments. + +4.7.1 (Apr 10, 2023) +- Added SHA for feature flag and segment fetcher in localhost json. +- Updated `org.yaml.snakeyaml` dependence to 2.0 for fixing a vulnerability. +- Fixed Redis integration, changing []dtos.Key to dtos.Key +- Fixed destroy for consumer mode. + +4.7.0 (Jan 30, 2023) +- Added support to use JSON files in localhost mode. +- Improved logs to have more information. +- Made streaming connection retryable in the case goes down. +- Fixed the logs messages in SplitClientImpl. + +4.6.0 (Nov 28, 2022) +- Added support redis cluster by providing JedisCluster object. +- Updated Jedis to 4.3.0 to support tls. + +4.5.0 (Oct 12, 2022) +- Added a new impressions mode for the SDK called NONE, to be used in factory when there is no desire to capture impressions on an SDK factory to feed Split's analytics engine. Running NONE mode, the SDK will only capture unique keys evaluated for a particular feature flag instead of full blown impressions. + +4.4.8 (Sep 16, 2022) +- Updated `org.yaml.snakeyaml` dependence to 1.32 for fixing a vulnerability. +- Improved SplitClientIntegrationTest removing ignore annotation in some test cases. + +4.4.7 (Sep 14, 2022) +- Fixed fromImpression in KeyImpression to map previous time. +- Updated `org.yaml.snakeyaml` dependence to 1.31 for fixing a vulnerability. +- Fixed buildKeyWithPrefix and added CommonRedis to share it. + +4.4.6 (Sep 6, 2022) +- Made junit as provided dependency. +- Fixed destroy logic to stop all the threads in execution. +- Refactored SyncManager to be able to start/stop tasks and removed that logic from destroy method. +- Updated EventTask, decoupled Storage logic, and added a new scheduler task in charge of flushing events. + +4.4.5 (Jul 29, 2022) +- Fixed in Synchronizer not properly synchronizing newly referenced segments. +- Update pom files to have profiles for releases. + +4.4.4 (May 24, 2022) +- Updated `com.google.code.gson` to 2.9.0 for fixing vulnerability. + +4.4.3 (May 9, 2022) +- Updated shaded dependencies: `org.apache`, `org.checkerframework` and `org.yaml.snakeyaml` +- Updated `org.slf4j` to 1.7.36 for fixing vulnerability. + +4.4.2 (Feb 22, 2022) +- Cleaned up log messages in segments logic. + +4.4.1 (Feb 08, 2022) +- Added improvements in segment fetcher to work as threading pool. +- Fixed backoff handler for streaming auth calls due to an issue that stopped retrying after multiple subsequent errors. +- Updated snakeyaml dependencies and reorganized shaded dependencies. + +4.4.0 (Jan 11, 2022) +- Added support for Redis to keep consistency across multiple SDK instances. +- Added logic to fetch multiple feature flags at once on GetTreatments/GetTreatmentsWithChanges. + +4.3.0 (Oct 19, 2021) +- Added support for the SDK to run with a custom implementation of it's internal storage modules, enabling customers to implement this caching in any storage technology of choice and connect it to the SDK instance itself which will use it instead of the in-memory structures. +- Fixed a possible OOM when there were too many Streaming events being captured, applying the expected limit of 20. +- Fixed an NPE caused in a race condition on telemetry which generated noisy logs. (https://github.com/splitio/java-client/issues/251)[#251] + +4.2.1 (Jun 18, 2021) +- Updated settings for httpClient instances to add stale check to more effectively manage connection pools after it was turned off by default on httpClient 5.x.x. +- Updated TelemetryRefreshRate setting default value to 3600s. + +4.2.0 (Jun 7, 2021) +- Updated SDK telemetry storage, metrics and updater to be more effective and send less often. +- Improved the synchronization flow to be more reliable in the event of an edge case generating delay in cache purge propagation, keeping the SDK cache properly synced. +- Fixed issue where the SDK was validating no Feature flag had over 50 conditions (legacy code). +- Bumped guava version to 30. + +4.1.6 (Apr 15, 2021) +- Updated log level and message in some messages. + +4.1.5 (Apr 6, 2021) +- Updated streaming logic to use limited fetch retry attempts. + +4.1.4 (Mar 19, 2021) +- Updated Internal cache structure refactor. +- Updated Streaming revamp with several bugfixes and improved log messages. +- Added Cache-Control header for on-demand requests to sdk-server. +- Updated Localhost Client revamp & bugfix for missing feature flags. + +4.1.3 (Dec 2, 2020) +- Fix Issue when closing SSE Connection +- Updated log-level for some messages + +4.1.2 (Nov 25, 2020) +- Updated junit from 4.12 to 4.13.1 +- Updated HttpClient from 4.5.2 to 5.0.3 + +4.1.1 (Sep 30, 2020) +- Fixed fetch retries after received an SPLIT_CHANGE. + +4.1.0 (Sep 25, 2020) +- Added `OPTIMIZED` and `DEBUG` modes in order to enabling/disabling how impressions are going to be sent into Split servers, + - `OPTIMIZED`: will send unique impressions in a timeframe in order to reduce how many times impressions are posted to Split. + - `DEBUG`: will send every impression generated to Split. + +4.0.1 (Sep 4, 2020) +- Remove jersey. Use custom SSE implementation +- Bumped guava version to 29 + +4.0.0 (Aug 19, 2020) +- Deprecated Java 7 support. Java 8 is the minimum supported version for this and future releases. +- Added support for the new Split streaming architecture. When enabled (default), the SDK will not poll for updates but instead receive notifications every time there's a change in your environments, allowing to process those much quicker. If disabled or in the event of an issue, the SDK will fallback to the known polling mechanism to provide a seamless experience. +- Updated the default of featuresRefreshRate to 60 seconds. + +3.3.4 (Jul 15, 2020) +- Added Impression observer. + +3.3.3 (Apr 7, 2020) + - Fix issue regarding special characters come from feature flags/segments fetchers. + +3.3.2 (Jan 24, 2020) + - Shade com.google.guava as well + +3.3.1 (Nov 1, 2019) + - Allow client to disable sending the IP address & hostname. + +3.3.0 (Sep 23, 2019) +- rename version.properties to splitversion.properties to avoid conflicts with other tools that use the former file name and causes this SDK to not properly report the version +- Adds New Relic Integration. Now if the New Relic agent is running, it will attach Impression data to the current transaction in New Relic, + +3.2.4 (Sep 9, 2019) +- Fix small issue regarding traffic type validation on .track() calls. + +3.2.3 (Aug 1, 2019) +- allow to push impressions more often than one every 30 seconds and events flush rate is now customizable + +3.2.2 +- log warn and not error when Feature flag doesn't exist in the environment + +3.2.1 (May 29, 2019) +- Fix issue with events not forwarding the value when sending properties as well. + +3.2.0 (May 24, 2019) +- Add ability to send events with properties + +3.1.1 (May 2, 2019) +- Fix issue with locating localhost mode in windows +- Fix issue when user falls out of traffic allocation and has config data + +3.1.0 (Apr 16, 2019) +- Add Dynamic Configurations +- Support Yaml files for localhost mode + +3.0.9 (Mar 21, 2019) +- Validate feature flag names on getTreatment and manager calls +- Validate traffic type name when calling track +- Check for multiple factories instantiated + +3.0.8 (Mar 7, 2019) +- Ability to disable automatic destroy() on graceful shutdowns. + +3.0.7 +- Removing unused thrown exceptions from build signature +- Setting feature refresh rate to 5 seconds by default + +3.0.6 +- Setting CookieSpec to Standard + +3.0.5 (Jan 5, 2019) +- Adding input validation for fast feedback of incorrect use of the client. + +3.0.4 (Dec 14, 2018) +- Fix potential race condition when segment wait for readiness before they are registered to the readiness gates + +3.0.3 (Dec 12, 2018) +- Expose param to increase thread pool size for segments + +3.0.2 (Dec 12, 2018) +- Fixed traffic allocation issue on 1% + +3.0.1 +- Fix Metric Counters when using Split Proxy. + +3.0.0 +- Moving block until ready out of the factory into the split client and manager client + +2.3.2 (Jul 18, 2018) +- Add support for handling different relative path in the endpoints of the Split Synchronizer + +2.3.1 (Apr 18, 2018) +- Added support for key overrides in localhost mode + +2.3.0 (Jan 15, 2018) +- Add support for .track() + +2.2.2 (Dec 22, 2017) +- Patch for localhost mode to daemonize the thread which currently prevents the JVM from exiting. + +2.2.1 (Dec 22, 2017) +- Transitioning from "no rule matched" to "default rule" and from "rules not found" to "definition not found" + +2.2.0 (Sep 22, 2017) +- Patch DependencyMatcherData to not error out when dependencies are used +- Default logging is less verbose now +- Add support for Proxy + +2.1.0 (July 19th, 2017) +- Add support for new boolean and regular expression matchers +- Support for Dependency matcher: 'in feature flag "xxx" treatments ["xxx","yyy"]' + +2.0.4 (June 2nd, 2017) +- Support to destroy all clients and background processes before shutdown via splitFactory.destroy() or splitClient.destroy() +- Define SLF4j in the root pom instead of twice. + +2.0.2 (May 16th, 2017) +- Support for Set based matchers: 'has any of', 'has all of', 'is equal to', and 'is part of' +- Support for string matchers: 'starts with', 'ends with', and 'contains' +- Added ability to attach metadata to impressions for getTreatment calls + +2.0.1 (May 3rd, 2017) +- Fix issue for Java 7 and compatibility with TLS1.2 + +2.0 (Apr 26, 2017) +- Transitioning to Murmur3 for key hashing +- Hashing Algorithm dependent on Split algo field + +1.1.7 (Mar 31, 2017) +- We can now specify a percentage of traffic that can be in the Split - i.e. traffic allocation. +- You can now register your own ImpressionListener. SplitClientConfig#impressionListener. +- You can no longer set any refresh rates to be less than 30 seconds. + +1.1.6 (Feb 27, 2017) +- Allowing users to set the Off-The-Grid directory +- Adding Live Reload of Split File in Off-The-Grid mode + +1.1.5 (Feb 8, 2017) +- Introducing SplitClientForTest and SplitTestRunner for JUnit. More information at split.io/blog/feature-flag-testing + +1.1.4 (Feb 8, 2017) +- Use less guava. + +1.1.3 (Jan 9, 2017) +- Impression to include the changeNumber of the Split that served a getTreatment call. +- expose disableLabels + +1.1.2 (Dec 19, 2016) +- Adding SplitManager.splitNames() support to retrieve feature names. +- Improving Java 1.7 compatibility for Hashcodes + +1.1.1 +- Consistently labeling Rules in messages + +1.1 +- Move java-client into it's own repo. + +1.0.10 +- The SDK now sends rule labels back to the server + +1.0.9 +- Introduced new API: getTreatment(Key key, String feature, Map attributes); +- Deprecated SplitClientBuilder. Clients are expected to use SplitFactoryBuilder instead. + +1.0.8 +- Fixed NPE in SplitManager + +1.0.7 +- Implement Factory pattern and expose Split manager +- Stop parsing a Feature flag when there are more than 50 conditions to prevent DDOS. + +1.0.6 +- Replaced Jersey with HttpComponents +- Dropped SplitClientConfig#eventsEndpoint +- Modified SplitClientConfig#endpoint to take in both sdk and events api. +- Move away from Guava cache for impressions and use an BlockingArrayList instead. + +1.0.5 +- shade jackson-databind to split.shade.xxxxx +- remove hamcrest and mockito from fat jar +- include only io.split, io.codigo and (shaded) guava in the fat jar +- Clean up JAVA 1.8 dependencies making sure they all are major version 51. + +1.0.4 +- blockUntilReady support +- impressions and metrics to events.split.io + +1.0.3 +- add https support + +1.0.2 +- corrects pom file issues + +1.0.1 +- add support for attributes + +1.0.0 +- First release diff --git a/LICENSE b/LICENSE index 051b5fd98..df08de3fb 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright © 2022 Split Software, Inc. +Copyright © 2025 Split Software, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index c361a6894..e7ebca564 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,13 @@ # Split Java SDK - -[![Build Status](https://api.travis-ci.com/splitio/java-client.svg?branch=master)](https://api.travis-ci.com/splitio/java-client) +![Build Status](https://github.com/splitio/java-client/actions/workflows/ci-cd.yml/badge.svg?branch=master) ## Overview -This SDK is designed to work with Split, the platform for controlled rollouts, serving features to your users via the Split feature flag to manage your complete customer experience. +This SDK is designed to work with Split, the platform for controlled rollouts, serving features to your users via feature flags to manage your complete customer experience. [![Twitter Follow](https://img.shields.io/twitter/follow/splitsoftware.svg?style=social&label=Follow&maxAge=1529000)](https://twitter.com/intent/follow?screen_name=splitsoftware) ## Compatibility -This SDK is compatible with Java 6 and higher. +This SDK is compatible with Java 8 and higher. ## Getting started Below is a simple example that describes the instantiation and most basic usage of our SDK: @@ -23,19 +22,19 @@ public class App { SplitClientConfig config = SplitClientConfig.builder() .setBlockUntilReadyTimeout(10000) .build(); - SplitFactory splitFactory = SplitFactoryBuilder.build("SDK_API_KEY", config); + SplitFactory splitFactory = SplitFactoryBuilder.build("YOUR_SDK_KEY", config); SplitClient client = splitFactory.client(); try { client.blockUntilReady(); } catch (TimeoutException | InterruptedException e) { - // log & handle + // log & handle } - String treatment = client.getTreatment("CUSTOMER_ID", "SPLIT_NAME"); + String treatment = client.getTreatment("CUSTOMER_ID", "FEATURE_FLAG_NAME"); if (treatment.equals("on")) { - // Feature is enabled for this user! + // Feature flag is enabled for this user! } else if (treatment.equals("off")) { - // Feature is disabled for this user! + // Feature flag is disabled for this user! } else { // Unable to perform evaluation. } @@ -44,7 +43,7 @@ public class App { ``` ## Submitting issues - + The Split team monitors all issues submitted to this [issue tracker](https://github.com/splitio/java-client/issues). We encourage you to use this issue tracker to submit any bug reports, feedback, and feature enhancements. We'll do our best to respond in a timely manner. ## Contributing @@ -54,15 +53,16 @@ Please see [Contributors Guide](CONTRIBUTORS-GUIDE.md) to find all you need to s Licensed under the Apache License, Version 2.0. See: [Apache License](http://www.apache.org/licenses/). ## About Split - + Split is the leading Feature Delivery Platform for engineering teams that want to confidently deploy features as fast as they can develop them. Split’s fine-grained management, real-time monitoring, and data-driven experimentation ensure that new features will improve the customer experience without breaking or degrading performance. Companies like Twilio, Salesforce, GoDaddy and WePay trust Split to power their feature delivery. - + To learn more about Split, contact hello@split.io, or get started with feature flags for free at https://www.split.io/signup. - + Split has built and maintains SDKs for: - + * Java [Github](https://github.com/splitio/java-client) [Docs](https://help.split.io/hc/en-us/articles/360020405151-Java-SDK) -* Javascript [Github](https://github.com/splitio/javascript-client) [Docs](https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK) +* JavaScript [Github](https://github.com/splitio/javascript-client) [Docs](https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK) +* JavaScript for Browser [Github](https://github.com/splitio/javascript-browser-client) [Docs](https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK) * Node [Github](https://github.com/splitio/javascript-client) [Docs](https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK) * .NET [Github](https://github.com/splitio/dotnet-client) [Docs](https://help.split.io/hc/en-us/articles/360020240172--NET-SDK) * Ruby [Github](https://github.com/splitio/ruby-client) [Docs](https://help.split.io/hc/en-us/articles/360020673251-Ruby-SDK) @@ -71,9 +71,13 @@ Split has built and maintains SDKs for: * GO [Github](https://github.com/splitio/go-client) [Docs](https://help.split.io/hc/en-us/articles/360020093652-Go-SDK) * Android [Github](https://github.com/splitio/android-client) [Docs](https://help.split.io/hc/en-us/articles/360020343291-Android-SDK) * iOS [Github](https://github.com/splitio/ios-client) [Docs](https://help.split.io/hc/en-us/articles/360020401491-iOS-SDK) - +* Angular [Github](https://github.com/splitio/angular-sdk-plugin) [Docs](https://help.split.io/hc/en-us/articles/6495326064397-Angular-utilities) +* React [Github](https://github.com/splitio/react-client) [Docs](https://help.split.io/hc/en-us/articles/360038825091-React-SDK) +* React Native [Github](https://github.com/splitio/react-native-client) [Docs](https://help.split.io/hc/en-us/articles/4406066357901-React-Native-SDK) +* Redux [Github](https://github.com/splitio/redux-client) [Docs](https://help.split.io/hc/en-us/articles/360038851551-Redux-SDK) + For a comprehensive list of open source projects visit our [Github page](https://github.com/splitio?utf8=%E2%9C%93&query=%20only%3Apublic%20). - + **Learn more about Split:** - + Visit [split.io/product](https://www.split.io/product) for an overview of Split, or visit our documentation at [help.split.io](http://help.split.io) for more detailed information. diff --git a/client/CHANGES.txt b/client/CHANGES.txt deleted file mode 100644 index cae3c09b5..000000000 --- a/client/CHANGES.txt +++ /dev/null @@ -1,244 +0,0 @@ -4.4.2 (Feb 22, 2022) -- Cleaned up log messages in segments logic. - -4.4.1 (Feb 08, 2022) -- Added improvements in segment fetcher to work as threading pool. -- Fixed backoff handler for streaming auth calls due to an issue that stopped retrying after multiple subsequent errors. -- Updated snakeyaml dependencies and reorganized shaded dependencies. - -4.4.0 (Jan 11, 2022) -- Added support for Redis to keep consistency across multiple SDK instances. -- Added logic to fetch multiple splits at once on GetTreatments/GetTreatmentsWithChanges. - -4.3.0 (Oct 19, 2021) -- Added support for the SDK to run with a custom implementation of it's internal storage modules, enabling customers to implement this caching in any storage technology of choice and connect it to the SDK instance itself which will use it instead of the in-memory structures. -- Fixed a possible OOM when there were too many Streaming events being captured, applying the expected limit of 20. -- Fixed an NPE caused in a race condition on telemetry which generated noisy logs. (https://github.com/splitio/java-client/issues/251)[#251] - -4.2.1 (Jun 18, 2021) -- Updated settings for httpClient instances to add stale check to more effectively manage connection pools after it was turned off by default on httpClient 5.x.x. -- Updated TelemetryRefreshRate setting default value to 3600s. - -4.2.0 (Jun 7, 2021) -- Updated SDK telemetry storage, metrics and updater to be more effective and send less often. -- Improved the synchronization flow to be more reliable in the event of an edge case generating delay in cache purge propagation, keeping the SDK cache properly synced. -- Fixed issue where the SDK was validating no Split had over 50 conditions (legacy code). -- Bumped guava version to 30. - -4.1.6 (Apr 15, 2021) -- Updated log level and message in some messages. - -4.1.5 (Apr 6, 2021) -- Updated streaming logic to use limited fetch retry attempts. - -4.1.4 (Mar 19, 2021) -- Updated Internal cache structure refactor. -- Updated Streaming revamp with several bugfixes and improved log messages. -- Added Cache-Control header for on-demand requests to sdk-server. -- Updated Localhost Client revamp & bugfix for missing splits. - -4.1.3 (Dec 2, 2020) -- Fix Issue when closing SSE Connection -- Updated log-level for some messages - -4.1.2 (Nov 25, 2020) -- Updated junit from 4.12 to 4.13.1 -- Updated HttpClient from 4.5.2 to 5.0.3 - -4.1.1 (Sep 30, 2020) -- Fixed fetch retries after received an SPLIT_CHANGE. - -4.1.0 (Sep 25, 2020) -- Add local impressions deduping (enabled by default) - -4.0.1 (Sep 4, 2020) -- Remove jersey. Use custom SSE implementation -- Bumped guava version to 29 - -4.0.0 (Aug 19, 2020) -- Deprecated Java 7 support. Java 8 is the minimum supported version for this and future releases. -- Added support for the new Split streaming architecture. When enabled (default), the SDK will not poll for updates but instead receive notifications every time there's a change in your environments, allowing to process those much quicker. If disabled or in the event of an issue, the SDK will fallback to the known polling mechanism to provide a seamless experience. -- Updated the default of featuresRefreshRate to 60 seconds. - -3.3.4 (Jul 15, 2020) -- Added Impression observer. - -3.3.3 (Apr 7, 2020) - - Fix issue regarding special characters come from split/segments fetchers. - -3.3.2 (Jan 24, 2020) - - Shade com.google.guava as well - -3.3.1 (Nov 1, 2019) - - Allow client to disable sending the IP address & hostname. - -3.3.0 (Sep 23, 2019) -- rename version.properties to splitversion.properties to avoid conflicts with other tools that use the former file name and causes this SDK to not properly report the version -- Adds New Relic Integration. Now if the New Relic agent is running, it will attach Impression data to the current transaction in New Relic, - -3.2.4 (Sep 9, 2019) -- Fix small issue regarding traffic type validation on .track() calls. - -3.2.3 (Aug 1, 2019) -- allow to push impressions more often than one every 30 seconds and events flush rate is now customizable - -3.2.2 -- log warn and not error when Split doesn't exist in the environment - -3.2.1 (May 29, 2019) -- Fix issue with events not forwarding the value when sending properties as well. - -3.2.0 (May 24, 2019) -- Add ability to send events with properties - -3.1.1 (May 2, 2019) -- Fix issue with locating localhost mode in windows -- Fix issue when user falls out of traffic allocation and has config data - -3.1.0 (Apr 16, 2019) -- Add Dynamic Configurations -- Support Yaml files for localhost mode - -3.0.9 (Mar 21, 2019) -- Validate split names on getTreatment and manager calls -- Validate traffic type name when calling track -- Check for multiple factories instantiated - -3.0.8 (Mar 7, 2019) -- Ability to disable automatic destroy() on graceful shutdowns. - -3.0.7 -- Removing unused thrown exceptions from build signature -- Setting feature refresh rate to 5 seconds by default - -3.0.6 -- Setting CookieSpec to Standard - -3.0.5 (Jan 5, 2019) -- Adding input validation for fast feedback of incorrect use of the client. - -3.0.4 (Dec 14, 2018) -- Fix potential race condition when segment wait for readiness before they are registered to the readiness gates - -3.0.3 (Dec 12, 2018) -- Expose param to increase thread pool size for segments - -3.0.2 (Dec 12, 2018) -- Fixed traffic allocation issue on 1% - -3.0.1 -- Fix Metric Counters when using Split Proxy. - -3.0.0 -- Moving block until ready out of the factory into the split client and manager client - -2.3.2 (Jul 18, 2018) -- Add support for handling different relative path in the endpoints of the Split Synchronizer - -2.3.1 (Apr 18, 2018) -- Added support for key overrides in localhost mode - -2.3.0 (Jan 15, 2018) -- Add support for .track() - -2.2.2 (Dec 22, 2017) -- Patch for localhost mode to daemonize the thread which currently prevents the JVM from exiting. - -2.2.1 (Dec 22, 2017) -- Transitioning from "no rule matched" to "default rule" and from "rules not found" to "definition not found" - -2.2.0 (Sep 22, 2017) -- Patch DependencyMatcherData to not error out when dependencies are used -- Default logging is less verbose now -- Add support for Proxy - -2.1.0 (July 19th, 2017) -- Add support for new boolean and regular expression matchers -- Support for Dependency matcher: 'in split "xxx" treatments ["xxx","yyy"]' - -2.0.4 (June 2nd, 2017) -- Support to destroy all clients and background processes before shutdown via splitFactory.destroy() or splitClient.destroy() -- Define SLF4j in the root pom instead of twice. - -2.0.2 (May 16th, 2017) -- Support for Set based matchers: 'has any of', 'has all of', 'is equal to', and 'is part of' -- Support for string matchers: 'starts with', 'ends with', and 'contains' -- Added ability to attach metadata to impressions for getTreatment calls - -2.0.1 (May 3rd, 2017) -- Fix issue for Java 7 and compatibility with TLS1.2 - -2.0 (Apr 26, 2017) -- Transitioning to Murmur3 for key hashing -- Hashing Algorithm dependent on Split algo field - -1.1.7 (Mar 31, 2017) -- We can now specify a percentage of traffic that can be in the Split - i.e. traffic allocation. -- You can now register your own ImpressionListener. SplitClientConfig#impressionListener. -- You can no longer set any refresh rates to be less than 30 seconds. - -1.1.6 (Feb 27, 2017) -- Allowing users to set the Off-The-Grid directory -- Adding Live Reload of Split File in Off-The-Grid mode - -1.1.5 (Feb 8, 2017) -- Introducing SplitClientForTest and SplitTestRunner for JUnit. More information at split.io/blog/feature-flag-testing - -1.1.4 (Feb 8, 2017) -- Use less guava. - -1.1.3 (Jan 9, 2017) -- Impression to include the changeNumber of the Split that served a getTreatment call. -- expose disableLabels - -1.1.2 (Dec 19, 2016) -- Adding SplitManager.splitNames() support to retrieve feature names. -- Improving Java 1.7 compatibility for Hashcodes - -1.1.1 -- Consistently labeling Rules in messages - -1.1 -- Move java-client into it's own repo. - -1.0.10 -- The SDK now sends rule labels back to the server - -1.0.9 -- Introduced new API: getTreatment(Key key, String feature, Map attributes); -- Deprecated SplitClientBuilder. Clients are expected to use SplitFactoryBuilder instead. - -1.0.8 -- Fixed NPE in SplitManager - -1.0.7 -- Implement Factory pattern and expose Split manager -- Stop parsing a Split when there are more than 50 conditions to prevent DDOS. - -1.0.6 -- Replaced Jersey with HttpComponents -- Dropped SplitClientConfig#eventsEndpoint -- Modified SplitClientConfig#endpoint to take in both sdk and events api. -- Move away from Guava cache for impressions and use an BlockingArrayList instead. - -1.0.5 -- shade jackson-databind to split.shade.xxxxx -- remove hamcrest and mockito from fat jar -- include only io.split, io.codigo and (shaded) guava in the fat jar -- Clean up JAVA 1.8 dependencies making sure they all are major version 51. - -1.0.4 -- blockUntilReady support -- impressions and metrics to events.split.io - -1.0.3 -- add https support - -1.0.2 -- corrects pom file issues - -1.0.1 -- add support for attributes - -1.0.0 -- First release diff --git a/client/pom.xml b/client/pom.xml index 3a9e56430..198f80a3d 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -5,13 +5,36 @@ io.split.client java-client-parent - 4.4.2 + 4.18.2 + 4.18.2 java-client jar Java Client Java SDK for Split + + + release + + + + org.sonatype.central + central-publishing-maven-plugin + 0.8.0 + true + + false + central + false + published + + + + + + + @@ -20,8 +43,8 @@ 3.3 false - 8 - 8 + ${maven.compiler.source} + ${maven.compiler.target} @@ -45,7 +68,9 @@ io.split.schemas:* io.codigo.grammar:* org.apache.httpcomponents.* - com.google.* + org.apache.hc.* + com.google.code.gson:gson + com.google.guava:guava org.yaml:snakeyaml:* @@ -61,8 +86,16 @@ - org.apache.http - split.org.apache.http + org.apache + split.org.apache + + + org.checkerframework + split.org.checkerframework + + + org.yaml.snakeyaml + split.org.yaml.snakeyaml com.google @@ -100,19 +133,6 @@ - - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.3 - true - - ossrh - https://oss.sonatype.org/ - true - false - - src/main/resources/splitversion.properties @@ -124,43 +144,51 @@ + + + 1.7.36 + 8 + 8 + + io.split.client pluggable-storage - 1.0.0 + 2.1.0 compile com.google.guava guava - 30.0-jre + 32.0.1-jre org.slf4j slf4j-api + ${slf4j.api.version} org.apache.httpcomponents.client5 httpclient5 - 5.0.3 + 5.5 com.google.code.gson gson - 2.6.2 + 2.13.1 org.yaml snakeyaml - 1.26 + 2.0 org.apache.commons commons-lang3 - 3.4 + 3.18.0 test @@ -183,7 +211,7 @@ org.slf4j slf4j-log4j12 - 1.7.21 + ${slf4j.api.version} test @@ -216,5 +244,17 @@ 4.0.3 test + + org.powermock + powermock-module-junit4 + 1.7.4 + test + + + org.powermock + powermock-api-mockito + 1.7.4 + test + diff --git a/client/src/main/java/io/split/Spec.java b/client/src/main/java/io/split/Spec.java new file mode 100644 index 000000000..b2c7de4b3 --- /dev/null +++ b/client/src/main/java/io/split/Spec.java @@ -0,0 +1,12 @@ +package io.split; + +public final class Spec { + + private Spec() { + // restrict instantiation + } + + public static final String SPEC_1_3 = "1.3"; + public static final String SPEC_1_1 = "1.1"; +} + diff --git a/client/src/main/java/io/split/client/AbstractLocalhostSplitFile.java b/client/src/main/java/io/split/client/AbstractLocalhostSplitFile.java deleted file mode 100644 index 2027c6b0f..000000000 --- a/client/src/main/java/io/split/client/AbstractLocalhostSplitFile.java +++ /dev/null @@ -1,104 +0,0 @@ -package io.split.client; - -import com.google.common.base.Preconditions; -import com.sun.nio.file.SensitivityWatchEventModifier; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.nio.file.FileSystems; -import java.nio.file.Path; -import java.nio.file.StandardWatchEventKinds; -import java.nio.file.WatchEvent; -import java.nio.file.WatchKey; -import java.nio.file.WatchService; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; - -public abstract class AbstractLocalhostSplitFile extends Thread { - private static final Logger _log = LoggerFactory.getLogger(AbstractLocalhostSplitFile.class); - - protected final LocalhostSplitFactory _splitFactory; - protected final File _file; - protected final WatchService _watcher; - protected final AtomicBoolean _stop; - - public AbstractLocalhostSplitFile(LocalhostSplitFactory splitFactory, String directory, String fileName) throws IOException { - Preconditions.checkNotNull(directory); - Preconditions.checkNotNull(fileName); - - _splitFactory = Preconditions.checkNotNull(splitFactory); - - // If no directory is set, instantiate the file without parent, otherwise the path separator is inserted - // before the filename in the java.io.File.File(java.lang.String, java.lang.String) class (see line 319). - _file = (directory.length() > 0) ? - new File(directory, fileName) : - new File(fileName); - - _watcher = FileSystems.getDefault().newWatchService(); - _stop = new AtomicBoolean(false); - } - - public boolean isStopped() { - return _stop.get(); - } - - public void stopThread() { - _stop.set(true); - } - - public void registerWatcher() throws IOException { - Path path = _file.toPath().toAbsolutePath().getParent(); - path.register(_watcher, new WatchEvent.Kind[]{StandardWatchEventKinds.ENTRY_MODIFY}, SensitivityWatchEventModifier.HIGH); - } - - @Override - public void run() { - try { - while (!isStopped()) { - WatchKey key; - try { - key = _watcher.poll(250, TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - stopThread(); - return; - } - if (key == null) { - Thread.yield(); - continue; - } - - for (WatchEvent event : key.pollEvents()) { - WatchEvent.Kind kind = event.kind(); - - @SuppressWarnings("unchecked") - WatchEvent ev = (WatchEvent) event; - Path filename = ev.context(); - - if (kind == StandardWatchEventKinds.OVERFLOW) { - Thread.yield(); - continue; - } else if (kind == StandardWatchEventKinds.ENTRY_MODIFY - && filename.toString().equals(_file.getName())) { - Map featureToSplitMap = readOnSplits(); - _splitFactory.updateFeatureToTreatmentMap(featureToSplitMap); - _log.info("Detected change in Local Splits file - Splits Reloaded! file={}", _file.getPath()); - } - boolean valid = key.reset(); - if (!valid) { - break; - } - } - Thread.yield(); - } - } catch (IOException e) { - _log.error("Error reading file: path={}", _file.getPath(), e); - stopThread(); - } - } - - public abstract Map readOnSplits() throws IOException; - -} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/ApiKeyCounter.java b/client/src/main/java/io/split/client/ApiKeyCounter.java index fe65325c8..4ac8c16c5 100644 --- a/client/src/main/java/io/split/client/ApiKeyCounter.java +++ b/client/src/main/java/io/split/client/ApiKeyCounter.java @@ -26,13 +26,13 @@ private static class ApyKeyCounterHolder private static final ApiKeyCounter INSTANCE = new ApiKeyCounter(); } - public void add(String apiKey) { + public void add(String sdkKey) { String message; - if (USED_API_KEYS.contains(apiKey)) { - message = String.format("factory instantiation: You already have %s with this API Key. " + + if (USED_API_KEYS.contains(sdkKey)) { + message = String.format("factory instantiation: You already have %s with this SDK Key. " + "We recommend keeping only one instance of the factory at all times (Singleton pattern) and reusing " + "it throughout your application.", - USED_API_KEYS.count(apiKey) == 1 ? "1 factory" : String.format("%s factories", USED_API_KEYS.count(apiKey))); + USED_API_KEYS.count(sdkKey) == 1 ? "1 factory" : String.format("%s factories", USED_API_KEYS.count(sdkKey))); _log.warn(message); } else if (!USED_API_KEYS.isEmpty()) { message = "factory instantiation: You already have an instance of the Split factory. " + @@ -40,31 +40,31 @@ public void add(String apiKey) { "the factory at all times (Singleton pattern) and reusing it throughout your application.“"; _log.warn(message); } - USED_API_KEYS.add(apiKey); + USED_API_KEYS.add(sdkKey); } - public void remove(String apiKey) { - USED_API_KEYS.remove(apiKey); + public void remove(String sdkKey) { + USED_API_KEYS.remove(sdkKey); } /** * Just for test - * @param apiKey + * @param sdkKey * @return */ @VisibleForTesting - boolean isApiKeyPresent(String apiKey) { - return USED_API_KEYS.contains(apiKey); + boolean isApiKeyPresent(String sdkKey) { + return USED_API_KEYS.contains(sdkKey); } /** * Just for test - * @param apiKey + * @param sdkKey * @return */ @VisibleForTesting - int getCount(String apiKey) { - return USED_API_KEYS.count(apiKey); + int getCount(String sdkKey) { + return USED_API_KEYS.count(sdkKey); } public Map getFactoryInstances() { @@ -79,4 +79,4 @@ public Map getFactoryInstances() { public void clearApiKeys() { USED_API_KEYS.clear(); } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/CacheUpdaterService.java b/client/src/main/java/io/split/client/CacheUpdaterService.java index 516ae2389..63b426634 100644 --- a/client/src/main/java/io/split/client/CacheUpdaterService.java +++ b/client/src/main/java/io/split/client/CacheUpdaterService.java @@ -11,10 +11,10 @@ import io.split.engine.matchers.CombiningMatcher; import io.split.engine.matchers.strings.WhitelistMatcher; import io.split.grammar.Treatments; -import io.split.storages.SplitCacheConsumer; import io.split.storages.SplitCacheProducer; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -51,7 +51,7 @@ public void updateCache(Map map) { String treatment = conditions.size() > 0 ? Treatments.CONTROL : localhostSplit.treatment; configurations.put(localhostSplit.treatment, localhostSplit.config); - split = new ParsedSplit(splitName, 0, false, treatment,conditions, LOCALHOST, 0, 100, 0, 0, configurations); + split = new ParsedSplit(splitName, 0, false, treatment,conditions, LOCALHOST, 0, 100, 0, 0, configurations, new HashSet<>(), true, null); parsedSplits.removeIf(parsedSplit -> parsedSplit.feature().equals(splitName)); parsedSplits.add(split); } diff --git a/client/src/main/java/io/split/client/CustomHeaderDecorator.java b/client/src/main/java/io/split/client/CustomHeaderDecorator.java new file mode 100644 index 000000000..934c43681 --- /dev/null +++ b/client/src/main/java/io/split/client/CustomHeaderDecorator.java @@ -0,0 +1,15 @@ +package io.split.client; + +import io.split.client.dtos.RequestContext; + +import java.util.Map; +import java.util.List; + +public interface CustomHeaderDecorator +{ + /** + * Get the additional headers needed for all http operations + * @return HashMap of addition headers + */ + Map> getHeaderOverrides(RequestContext context); +} diff --git a/client/src/main/java/io/split/client/HttpClientDynamicCredentials.java b/client/src/main/java/io/split/client/HttpClientDynamicCredentials.java new file mode 100644 index 000000000..ebcbe6676 --- /dev/null +++ b/client/src/main/java/io/split/client/HttpClientDynamicCredentials.java @@ -0,0 +1,26 @@ +package io.split.client; + +import io.split.client.dtos.BearerCredentialsProvider; +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.BearerToken; +import org.apache.hc.client5.http.auth.Credentials; +import org.apache.hc.core5.http.protocol.HttpContext; + +class HttpClientDynamicCredentials implements org.apache.hc.client5.http.auth.CredentialsProvider { + + private final BearerCredentialsProvider _bearerCredentialsProvider; + + public HttpClientDynamicCredentials (BearerCredentialsProvider bearerCredentialsProvider) { + _bearerCredentialsProvider = bearerCredentialsProvider; + } + + @Override + public Credentials getCredentials(AuthScope authScope, HttpContext context) { + + // This Provider is invoked every time a request is made. + // This should invoke a user-custom provider responsible for: + return new BearerToken(_bearerCredentialsProvider.getToken()); + } + +} + diff --git a/client/src/main/java/io/split/client/HttpSegmentChangeFetcher.java b/client/src/main/java/io/split/client/HttpSegmentChangeFetcher.java index a9475320f..bd365590c 100644 --- a/client/src/main/java/io/split/client/HttpSegmentChangeFetcher.java +++ b/client/src/main/java/io/split/client/HttpSegmentChangeFetcher.java @@ -2,30 +2,23 @@ import com.google.common.annotations.VisibleForTesting; import io.split.client.dtos.SegmentChange; +import io.split.client.dtos.SplitHttpResponse; import io.split.client.utils.Json; import io.split.client.utils.Utils; import io.split.engine.common.FetchOptions; -import io.split.engine.metrics.Metrics; import io.split.engine.segments.SegmentChangeFetcher; +import io.split.service.SplitHttpClient; import io.split.telemetry.domain.enums.HTTPLatenciesEnum; import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; import io.split.telemetry.domain.enums.ResourceEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.classic.methods.HttpGet; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; -import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; import org.apache.hc.core5.http.HttpStatus; -import org.apache.hc.core5.http.Header; -import org.apache.hc.core5.http.io.entity.EntityUtils; import org.apache.hc.core5.net.URIBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.URI; import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkNotNull; @@ -37,22 +30,17 @@ public final class HttpSegmentChangeFetcher implements SegmentChangeFetcher { private static final String SINCE = "since"; private static final String TILL = "till"; - private static final String PREFIX = "segmentChangeFetcher"; - private static final String CACHE_CONTROL_HEADER_NAME = "Cache-Control"; - private static final String CACHE_CONTROL_HEADER_VALUE = "no-cache"; - private static final String HEADER_FASTLY_DEBUG_NAME = "Fastly-Debug"; - private static final String HEADER_FASTLY_DEBUG_VALUE = "1"; - - private final CloseableHttpClient _client; + private final SplitHttpClient _client; private final URI _target; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; - public static HttpSegmentChangeFetcher create(CloseableHttpClient client, URI root, TelemetryRuntimeProducer telemetryRuntimeProducer) throws URISyntaxException { + public static HttpSegmentChangeFetcher create(SplitHttpClient client, URI root, TelemetryRuntimeProducer telemetryRuntimeProducer) + throws URISyntaxException { return new HttpSegmentChangeFetcher(client, Utils.appendPath(root, "api/segmentChanges"), telemetryRuntimeProducer); } - private HttpSegmentChangeFetcher(CloseableHttpClient client, URI uri, TelemetryRuntimeProducer telemetryRuntimeProducer) { + private HttpSegmentChangeFetcher(SplitHttpClient client, URI uri, TelemetryRuntimeProducer telemetryRuntimeProducer) { _client = client; _target = uri; checkNotNull(_target); @@ -63,8 +51,6 @@ private HttpSegmentChangeFetcher(CloseableHttpClient client, URI uri, TelemetryR public SegmentChange fetch(String segmentName, long since, FetchOptions options) { long start = System.currentTimeMillis(); - CloseableHttpResponse response = null; - try { String path = _target.getPath() + "/" + segmentName; URIBuilder uriBuilder = new URIBuilder(_target) @@ -75,52 +61,31 @@ public SegmentChange fetch(String segmentName, long since, FetchOptions options) } URI uri = uriBuilder.build(); - HttpGet request = new HttpGet(uri); - - if(options.cacheControlHeadersEnabled()) { - request.setHeader(CACHE_CONTROL_HEADER_NAME, CACHE_CONTROL_HEADER_VALUE); - } - - if (options.fastlyDebugHeaderEnabled()) { - request.addHeader(HEADER_FASTLY_DEBUG_NAME, HEADER_FASTLY_DEBUG_VALUE); - } - response = _client.execute(request); - options.handleResponseHeaders(Arrays.stream(response.getHeaders()) - .collect(Collectors.toMap(Header::getName, Header::getValue))); + SplitHttpResponse response = _client.get(uri, options, null); - int statusCode = response.getCode(); - - if (statusCode < HttpStatus.SC_OK || statusCode >= HttpStatus.SC_MULTIPLE_CHOICES) { - _telemetryRuntimeProducer.recordSyncError(ResourceEnum.SEGMENT_SYNC, statusCode); - _log.error("Response status was: " + statusCode); - if (statusCode == HttpStatus.SC_FORBIDDEN) { - _log.error("factory instantiation: you passed a browser type api_key, " + - "please grab an api key from the Split console that is of type sdk"); + if (response.statusCode() < HttpStatus.SC_OK || response.statusCode() >= HttpStatus.SC_MULTIPLE_CHOICES) { + _telemetryRuntimeProducer.recordSyncError(ResourceEnum.SEGMENT_SYNC, response.statusCode()); + if (response.statusCode() == HttpStatus.SC_FORBIDDEN) { + _log.error("factory instantiation: you passed a client side type sdkKey, " + + "please grab an sdk key from the Split user interface that is of type server side"); } - throw new IllegalStateException("Could not retrieve segment changes for " + segmentName + "; http return code " + statusCode); + throw new IllegalStateException(String.format("Could not retrieve segment changes for %s, since %s; http return code %s", + segmentName, since, response.statusCode())); } - _telemetryRuntimeProducer.recordSuccessfulSync(LastSynchronizationRecordsEnum.SEGMENTS, System.currentTimeMillis()); - String json = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); - if (_log.isDebugEnabled()) { - _log.debug("Received json: " + json); - } - - return Json.fromJson(json, SegmentChange.class); - } catch (Throwable t) { - throw new IllegalStateException("Problem fetching segmentChanges: " + t.getMessage(), t); + return Json.fromJson(response.body(), SegmentChange.class); + } catch (Exception e) { + throw new IllegalStateException(String.format("Error occurred when trying to sync segment: %s, since: %s. Details: %s", + segmentName, since, e), e); } finally { _telemetryRuntimeProducer.recordSyncLatency(HTTPLatenciesEnum.SEGMENTS, System.currentTimeMillis()-start); - Utils.forceClose(response); } - - } @VisibleForTesting URI getTarget() { return _target; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/HttpSplitChangeFetcher.java b/client/src/main/java/io/split/client/HttpSplitChangeFetcher.java index 9d77654e5..49eb66a99 100644 --- a/client/src/main/java/io/split/client/HttpSplitChangeFetcher.java +++ b/client/src/main/java/io/split/client/HttpSplitChangeFetcher.java @@ -1,33 +1,31 @@ package io.split.client; import com.google.common.annotations.VisibleForTesting; + +import io.split.Spec; import io.split.client.dtos.SplitChange; +import io.split.client.dtos.SplitHttpResponse; +import io.split.client.dtos.SplitChangesOldPayloadDto; +import io.split.client.exceptions.UriTooLongException; import io.split.client.utils.Json; import io.split.client.utils.Utils; import io.split.engine.common.FetchOptions; import io.split.engine.experiments.SplitChangeFetcher; -import io.split.engine.metrics.Metrics; +import io.split.service.SplitHttpClient; import io.split.telemetry.domain.enums.HTTPLatenciesEnum; -import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; import io.split.telemetry.domain.enums.ResourceEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.classic.methods.HttpGet; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; -import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; import org.apache.hc.core5.http.HttpStatus; -import org.apache.hc.core5.http.Header; -import org.apache.hc.core5.http.io.entity.EntityUtils; import org.apache.hc.core5.net.URIBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.URI; import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkNotNull; +import static io.split.Spec.SPEC_1_3; +import static io.split.Spec.SPEC_1_1; /** * Created by adilaijaz on 5/30/15. @@ -36,28 +34,30 @@ public final class HttpSplitChangeFetcher implements SplitChangeFetcher { private static final Logger _log = LoggerFactory.getLogger(HttpSplitChangeFetcher.class); private static final String SINCE = "since"; + private static final String RB_SINCE = "rbSince"; private static final String TILL = "till"; - private static final String PREFIX = "splitChangeFetcher"; - - private static final String HEADER_CACHE_CONTROL_NAME = "Cache-Control"; - private static final String HEADER_CACHE_CONTROL_VALUE = "no-cache"; - - private static final String HEADER_FASTLY_DEBUG_NAME = "Fastly-Debug"; - private static final String HEADER_FASTLY_DEBUG_VALUE = "1"; - - private final CloseableHttpClient _client; + private static final String SETS = "sets"; + private static final String SPEC = "s"; + private String specVersion = SPEC_1_3; + private int PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 24 * 60 * 60 * 1000; + private Long _lastProxyCheckTimestamp = 0L; + private final SplitHttpClient _client; private final URI _target; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; + private final boolean _rootURIOverriden; - public static HttpSplitChangeFetcher create(CloseableHttpClient client, URI root, TelemetryRuntimeProducer telemetryRuntimeProducer) throws URISyntaxException { - return new HttpSplitChangeFetcher(client, Utils.appendPath(root, "api/splitChanges"), telemetryRuntimeProducer); + public static HttpSplitChangeFetcher create(SplitHttpClient client, URI root, TelemetryRuntimeProducer telemetryRuntimeProducer, + boolean rootURIOverriden) + throws URISyntaxException { + return new HttpSplitChangeFetcher(client, Utils.appendPath(root, "api/splitChanges"), telemetryRuntimeProducer, rootURIOverriden); } - private HttpSplitChangeFetcher(CloseableHttpClient client, URI uri, TelemetryRuntimeProducer telemetryRuntimeProducer) { + private HttpSplitChangeFetcher(SplitHttpClient client, URI uri, TelemetryRuntimeProducer telemetryRuntimeProducer, boolean rootURIOverriden) { _client = client; _target = uri; checkNotNull(_target); _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); + _rootURIOverriden = rootURIOverriden; } long makeRandomTill() { @@ -66,56 +66,70 @@ long makeRandomTill() { } @Override - public SplitChange fetch(long since, FetchOptions options) { - + public SplitChange fetch(long since, long sinceRBS, FetchOptions options) { long start = System.currentTimeMillis(); - - CloseableHttpResponse response = null; - try { - URIBuilder uriBuilder = new URIBuilder(_target).addParameter(SINCE, "" + since); - if (options.hasCustomCN()) { - uriBuilder.addParameter(TILL, "" + options.targetCN()); - } - URI uri = uriBuilder.build(); - - HttpGet request = new HttpGet(uri); - if(options.cacheControlHeadersEnabled()) { - request.setHeader(HEADER_CACHE_CONTROL_NAME, HEADER_CACHE_CONTROL_VALUE); + URI uri = buildURL(options, since, sinceRBS); + if (specVersion.equals(SPEC_1_1) && (System.currentTimeMillis() - _lastProxyCheckTimestamp >= PROXY_CHECK_INTERVAL_MILLISECONDS_SS)) { + _log.info("Switching to new Feature flag spec ({}) and fetching.", SPEC_1_3); + specVersion = SPEC_1_3; + uri = buildURL(options, -1,-1); } - if (options.fastlyDebugHeaderEnabled()) { - request.addHeader(HEADER_FASTLY_DEBUG_NAME, HEADER_FASTLY_DEBUG_VALUE); + SplitHttpResponse response = _client.get(uri, options, null); + if (response.statusCode() < HttpStatus.SC_OK || response.statusCode() >= HttpStatus.SC_MULTIPLE_CHOICES) { + if (response.statusCode() == HttpStatus.SC_REQUEST_URI_TOO_LONG) { + _log.error("The amount of flag sets provided are big causing uri length error."); + throw new UriTooLongException(String.format("Status code: %s. Message: %s", response.statusCode(), response.statusMessage())); + } + + if (response.statusCode() == HttpStatus.SC_BAD_REQUEST && specVersion.equals(Spec.SPEC_1_3) && _rootURIOverriden) { + specVersion = Spec.SPEC_1_1; + _log.warn("Detected proxy without support for Feature flags spec {} version, will switch to spec version {}", + SPEC_1_3, SPEC_1_1); + _lastProxyCheckTimestamp = System.currentTimeMillis(); + return fetch(since, sinceRBS, options); + } + + _telemetryRuntimeProducer.recordSyncError(ResourceEnum.SPLIT_SYNC, response.statusCode()); + throw new IllegalStateException( + String.format("Could not retrieve splitChanges since %s; http return code %s", since, response.statusCode()) + ); } - response = _client.execute(request); - options.handleResponseHeaders(Arrays.stream(response.getHeaders()) - .collect(Collectors.toMap(Header::getName, Header::getValue))); - - int statusCode = response.getCode(); - - if (statusCode < HttpStatus.SC_OK || statusCode >= HttpStatus.SC_MULTIPLE_CHOICES) { - _telemetryRuntimeProducer.recordSyncError(ResourceEnum.SPLIT_SYNC, statusCode); - throw new IllegalStateException("Could not retrieve splitChanges; http return code " + statusCode); + if (specVersion.equals(Spec.SPEC_1_1)) { + return Json.fromJson(response.body(), SplitChangesOldPayloadDto.class).toSplitChange(); } + SplitChange splitChange = Json.fromJson(response.body(), SplitChange.class); + splitChange.clearCache = _lastProxyCheckTimestamp != 0; + _lastProxyCheckTimestamp = 0L; + return splitChange; + } catch (Exception e) { + throw new IllegalStateException(String.format("Problem fetching splitChanges since %s: %s", since, e), e); + } finally { + _telemetryRuntimeProducer.recordSyncLatency(HTTPLatenciesEnum.SPLITS, System.currentTimeMillis() - start); + } + } - String json = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); - if (_log.isDebugEnabled()) { - _log.debug("Received json: " + json); - } - return Json.fromJson(json, SplitChange.class); - } catch (Throwable t) { - throw new IllegalStateException("Problem fetching splitChanges: " + t.getMessage(), t); - } finally { - _telemetryRuntimeProducer.recordSyncLatency(HTTPLatenciesEnum.SPLITS, System.currentTimeMillis()-start); - Utils.forceClose(response); + private URI buildURL(FetchOptions options, long since, long sinceRBS) throws URISyntaxException { + URIBuilder uriBuilder = new URIBuilder(_target).addParameter(SPEC, "" + specVersion); + uriBuilder.addParameter(SINCE, "" + since); + if (specVersion.equals(SPEC_1_3)) { + uriBuilder.addParameter(RB_SINCE, "" + sinceRBS); + } + if (!options.flagSetsFilter().isEmpty()) { + uriBuilder.addParameter(SETS, "" + options.flagSetsFilter()); + } + if (options.hasCustomCN()) { + uriBuilder.addParameter(TILL, "" + options.targetCN()); } + return uriBuilder.build(); } @VisibleForTesting URI getTarget() { return _target; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/JsonLocalhostSplitChangeFetcher.java b/client/src/main/java/io/split/client/JsonLocalhostSplitChangeFetcher.java new file mode 100644 index 000000000..03530d099 --- /dev/null +++ b/client/src/main/java/io/split/client/JsonLocalhostSplitChangeFetcher.java @@ -0,0 +1,90 @@ +package io.split.client; + +import com.google.gson.JsonObject; +import com.google.gson.stream.JsonReader; +import io.split.client.dtos.SplitChange; +import io.split.client.dtos.SplitChangesOldPayloadDto; +import io.split.client.utils.InputStreamProvider; +import io.split.client.utils.Json; +import io.split.client.utils.LocalhostSanitizer; +import io.split.engine.common.FetchOptions; +import io.split.engine.experiments.SplitChangeFetcher; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; + +import static io.split.client.utils.Utils.checkExitConditions; + +public class JsonLocalhostSplitChangeFetcher implements SplitChangeFetcher { + + private static final Logger _log = LoggerFactory.getLogger(JsonLocalhostSplitChangeFetcher.class); + private final InputStreamProvider _inputStreamProvider; + private byte [] lastHashFeatureFlags; + private byte [] lastHashRuleBasedSegments; + + public JsonLocalhostSplitChangeFetcher(InputStreamProvider inputStreamProvider) { + _inputStreamProvider = inputStreamProvider; + lastHashFeatureFlags = new byte[0]; + lastHashRuleBasedSegments = new byte[0]; + } + + @Override + public SplitChange fetch(long since, long sinceRBS, FetchOptions options) { + try { + JsonReader jsonReader = new JsonReader(new BufferedReader(new InputStreamReader(_inputStreamProvider.get(), StandardCharsets.UTF_8))); + if (checkOldSpec(new JsonReader(new BufferedReader(new InputStreamReader(_inputStreamProvider.get(), StandardCharsets.UTF_8))))) { + return Json.fromJson(jsonReader, SplitChangesOldPayloadDto.class).toSplitChange(); + } + SplitChange splitChange = Json.fromJson(jsonReader, SplitChange.class); + return processSplitChange(splitChange, since, sinceRBS); + } catch (Exception e) { + throw new IllegalStateException("Problem fetching splitChanges: " + e.getMessage(), e); + } + } + + private boolean checkOldSpec(JsonReader jsonReader) { + return Json.fromJson(jsonReader, JsonObject.class).has("splits"); + } + + private SplitChange processSplitChange(SplitChange splitChange, long changeNumber, long changeNumberRBS) throws NoSuchAlgorithmException { + SplitChange splitChangeToProcess = LocalhostSanitizer.sanitization(splitChange); + // if the till is less than storage CN and different from the default till ignore the change + if (checkExitConditions(splitChangeToProcess.featureFlags, changeNumber) || + checkExitConditions(splitChangeToProcess.ruleBasedSegments, changeNumberRBS)) { + _log.warn("The till is lower than the change number or different to -1"); + return null; + } + + byte [] currHashFeatureFlags = getStringDigest(splitChange.featureFlags.d.toString()); + byte [] currHashRuleBasedSegments = getStringDigest(splitChange.ruleBasedSegments.d.toString()); + //if sha exist and is equal to before sha, or if till is equal to default till returns the same segmentChange with till equals to storage CN + if (Arrays.equals(lastHashFeatureFlags, currHashFeatureFlags) || splitChangeToProcess.featureFlags.t == -1) { + splitChangeToProcess.featureFlags.t = changeNumber; + } + if (Arrays.equals(lastHashRuleBasedSegments, currHashRuleBasedSegments) || splitChangeToProcess.ruleBasedSegments.t == -1) { + splitChangeToProcess.ruleBasedSegments.t = changeNumberRBS; + } + + lastHashFeatureFlags = currHashFeatureFlags; + lastHashRuleBasedSegments = currHashRuleBasedSegments; + splitChangeToProcess.featureFlags.s = changeNumber; + splitChangeToProcess.ruleBasedSegments.s = changeNumberRBS; + + return splitChangeToProcess; + } + + private byte[] getStringDigest(String json) throws NoSuchAlgorithmException { + MessageDigest digest = MessageDigest.getInstance("SHA-1"); + digest.reset(); + digest.update(json.getBytes()); + // calculate the json sha + return digest.digest(); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/LegacyLocalhostSplitChangeFetcher.java b/client/src/main/java/io/split/client/LegacyLocalhostSplitChangeFetcher.java new file mode 100644 index 000000000..c67055ec8 --- /dev/null +++ b/client/src/main/java/io/split/client/LegacyLocalhostSplitChangeFetcher.java @@ -0,0 +1,111 @@ +package io.split.client; + +import io.split.client.dtos.Condition; +import io.split.client.dtos.ConditionType; +import io.split.client.dtos.Split; +import io.split.client.dtos.SplitChange; +import io.split.client.dtos.Status; +import io.split.client.dtos.ChangeDto; +import io.split.client.utils.LocalhostConstants; +import io.split.client.utils.LocalhostSanitizer; +import io.split.engine.common.FetchOptions; +import io.split.engine.experiments.SplitChangeFetcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Optional; + +public class LegacyLocalhostSplitChangeFetcher implements SplitChangeFetcher { + + private static final Logger _log = LoggerFactory.getLogger(YamlLocalhostSplitChangeFetcher.class); + static final String FILENAME = ".split"; + private final File _splitFile; + + public LegacyLocalhostSplitChangeFetcher(String directory) { + if (directory == null || directory.isEmpty()){ + directory = System.getProperty("user.home"); + } + _splitFile = new File(directory, FILENAME); + } + + @Override + public SplitChange fetch(long since, long sinceRBS, FetchOptions options) { + + try (BufferedReader reader = new BufferedReader(new FileReader(_splitFile))) { + SplitChange splitChange = new SplitChange(); + splitChange.featureFlags = new ChangeDto<>(); + splitChange.featureFlags.d = new ArrayList<>(); + for (String line = reader.readLine(); line != null; line = reader.readLine()) { + String lineTrim = line.trim(); + if (lineTrim.isEmpty() || lineTrim.startsWith("#")) { + continue; + } + + String[] featureTreatment = lineTrim.split("\\s+"); + + if (featureTreatment.length < 2 || featureTreatment.length > 3) { + _log.info("Ignoring line since it does not have 2 or 3 columns: " + lineTrim); + continue; + } + Optional splitOptional = splitChange.featureFlags.d.stream(). + filter(split -> split.name.equals(featureTreatment[0])).findFirst(); + Split split = splitOptional.orElse(null); + if(split == null) { + split = new Split(); + split.name = featureTreatment[0]; + split.configurations = new HashMap<>(); + split.conditions = new ArrayList<>(); + } else { + splitChange.featureFlags.d.remove(split); + } + split.status = Status.ACTIVE; + split.defaultTreatment = featureTreatment[1]; + split.trafficTypeName = LocalhostConstants.USER; + split.trafficAllocation = LocalhostConstants.SIZE_100; + split.trafficAllocationSeed = LocalhostConstants.SIZE_1; + + Condition condition = checkCondition(featureTreatment); + if(condition.conditionType != ConditionType.ROLLOUT){ + split.conditions.add(0, condition); + } else { + split.conditions.add(condition); + } + splitChange.featureFlags.d.add(split); + } + splitChange.featureFlags.t = since; + splitChange.featureFlags.s = since; + splitChange.ruleBasedSegments = new ChangeDto<>(); + splitChange.ruleBasedSegments.s = -1; + splitChange.ruleBasedSegments.t = -1; + splitChange.ruleBasedSegments.d = new ArrayList<>(); + return splitChange; + } catch (FileNotFoundException f) { + _log.warn("There was no file named " + _splitFile.getPath() + " found. " + + "We created a split client that returns default treatments for all feature flags for all of your users. " + + "If you wish to return a specific treatment for a feature flag, enter the name of that feature flag name and " + + "treatment name separated by whitespace in " + _splitFile.getPath() + + "; one pair per line. Empty lines or lines starting with '#' are considered comments", f); + throw new IllegalStateException("Problem fetching splitChanges: " + f.getMessage(), f); + } catch (Exception e) { + _log.warn(String.format("Problem to fetch split change using the file %s", + _splitFile.getPath()), e); + throw new IllegalStateException("Problem fetching splitChanges: " + e.getMessage(), e); + } + } + + private Condition checkCondition(String[] featureTreatment) { + Condition condition; + if (featureTreatment.length == 2) { + condition = LocalhostSanitizer.createCondition(null, featureTreatment[1]); + } else { + condition = LocalhostSanitizer.createCondition(featureTreatment[2], featureTreatment[1]); + } + return condition; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/LegacyLocalhostSplitFile.java b/client/src/main/java/io/split/client/LegacyLocalhostSplitFile.java deleted file mode 100644 index 720064188..000000000 --- a/client/src/main/java/io/split/client/LegacyLocalhostSplitFile.java +++ /dev/null @@ -1,57 +0,0 @@ -package io.split.client; - -import com.google.common.collect.Maps; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.util.Map; - -public class LegacyLocalhostSplitFile extends AbstractLocalhostSplitFile { - private static final Logger _log = LoggerFactory.getLogger(LegacyLocalhostSplitFile.class); - - public LegacyLocalhostSplitFile(LocalhostSplitFactory splitFactory, String directory, String fileName) throws IOException { - super(splitFactory, directory, fileName); - } - - - public Map readOnSplits() throws IOException { - Map onSplits = Maps.newHashMap(); - - try (BufferedReader reader = new BufferedReader(new FileReader(_file))) { - for (String line = reader.readLine(); line != null; line = reader.readLine()) { - line = line.trim(); - if (line.isEmpty() || line.startsWith("#")) { - continue; - } - - String[] feature_treatment = line.split("\\s+"); - - if (feature_treatment.length < 2 || feature_treatment.length > 3) { - _log.info("Ignoring line since it does not have 2 or 3 columns: " + line); - continue; - } - - SplitAndKey splitAndKey = null; - if (feature_treatment.length == 2) { - splitAndKey = SplitAndKey.of(feature_treatment[0]); - } else { - splitAndKey = SplitAndKey.of(feature_treatment[0], feature_treatment[2]); - } - - onSplits.put(splitAndKey, new LocalhostSplit(feature_treatment[1], null)); - } - } catch (FileNotFoundException e) { - _log.warn("There was no file named " + _file.getPath() + " found. " + - "We created a split client that returns default treatments for all features for all of your users. " + - "If you wish to return a specific treatment for a feature, enter the name of that feature name and " + - "treatment name separated by whitespace in " + _file.getPath() + - "; one pair per line. Empty lines or lines starting with '#' are considered comments", e); - } - - return onSplits; - } -} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/LocalhostSegmentChangeFetcher.java b/client/src/main/java/io/split/client/LocalhostSegmentChangeFetcher.java new file mode 100644 index 000000000..d3c57506a --- /dev/null +++ b/client/src/main/java/io/split/client/LocalhostSegmentChangeFetcher.java @@ -0,0 +1,73 @@ +package io.split.client; + +import com.google.gson.stream.JsonReader; +import io.split.client.dtos.SegmentChange; +import io.split.client.utils.Json; +import io.split.client.utils.LocalhostSanitizer; +import io.split.engine.common.FetchOptions; +import io.split.engine.segments.SegmentChangeFetcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +public class LocalhostSegmentChangeFetcher implements SegmentChangeFetcher { + + private static final Logger _log = LoggerFactory.getLogger(LocalhostSegmentChangeFetcher.class); + private final File _file; + + private Map lastHash; + + public LocalhostSegmentChangeFetcher(String filePath){ + _file = new File(filePath); + lastHash = new HashMap(); + } + + @Override + public SegmentChange fetch(String segmentName, long changesSinceThisChangeNumber, FetchOptions options) { + try { + JsonReader jsonReader = new JsonReader(new FileReader(String.format("%s/%s.json", _file, segmentName))); + SegmentChange segmentChange = Json.fromJson(jsonReader, SegmentChange.class); + return processSegmentChange(segmentName, changesSinceThisChangeNumber, segmentChange); + } catch (FileNotFoundException f){ + _log.warn(String.format("There was no file named %s/%s found.", _file.getPath(), segmentName), f); + throw new IllegalStateException(String.format("Problem fetching segment %s: %s", segmentName, f.getMessage()), f); + } catch (Exception e) { + _log.warn(String.format("Problem to fetch segment change for the segment %s in the directory %s.", segmentName, _file.getPath()), e); + throw new IllegalStateException(String.format("Problem fetching segment %s: %s", segmentName, e.getMessage()), e); + } + } + + private SegmentChange processSegmentChange(String segmentName, long changeNumber, SegmentChange segmentChange) throws NoSuchAlgorithmException { + SegmentChange segmentChangeToProcess = LocalhostSanitizer.sanitization(segmentChange); + if (segmentChangeToProcess == null){ + return null; + } + // if the till is less than storage CN and different from the default till ignore the change + if (segmentChangeToProcess.till < changeNumber && segmentChangeToProcess.till != -1){ + _log.warn("The segmentChange till is lower than the change number or different to -1"); + return null; + } + String toHash = segmentChangeToProcess.added.toString() + segmentChangeToProcess.removed.toString(); + MessageDigest digest = MessageDigest.getInstance("SHA-1"); + digest.reset(); + digest.update(toHash.getBytes()); + // calculate the json sha + byte [] currHash = digest.digest(); + //if sha exist and is equal to before sha, or if till is equal to default till returns the same segmentChange with till equals to storage CN + if ((lastHash.containsKey(segmentName) && Arrays.equals((byte[]) lastHash.get(segmentName), currHash)) || + segmentChangeToProcess.till == -1) { + segmentChangeToProcess.till = changeNumber; + } + lastHash.put(segmentName, currHash); + segmentChangeToProcess.since = changeNumber; + return segmentChangeToProcess; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/LocalhostSegmentFetcherNoop.java b/client/src/main/java/io/split/client/LocalhostSegmentFetcherNoop.java new file mode 100644 index 000000000..03285c49f --- /dev/null +++ b/client/src/main/java/io/split/client/LocalhostSegmentFetcherNoop.java @@ -0,0 +1,13 @@ +package io.split.client; + +import io.split.client.dtos.SegmentChange; +import io.split.engine.common.FetchOptions; +import io.split.engine.segments.SegmentChangeFetcher; + +public class LocalhostSegmentFetcherNoop implements SegmentChangeFetcher { + + @Override + public SegmentChange fetch(String segmentName, long changesSinceThisChangeNumber, FetchOptions options) { + return new SegmentChange(); + } +} diff --git a/client/src/main/java/io/split/client/LocalhostSplitFactory.java b/client/src/main/java/io/split/client/LocalhostSplitFactory.java deleted file mode 100644 index 48142a0ff..000000000 --- a/client/src/main/java/io/split/client/LocalhostSplitFactory.java +++ /dev/null @@ -1,98 +0,0 @@ -package io.split.client; - -import io.split.storages.SegmentCacheConsumer; -import io.split.storages.memory.InMemoryCacheImp; -import io.split.storages.SplitCache; -import io.split.client.events.NoopEventsStorageImp; -import io.split.client.impressions.ImpressionsManager; -import io.split.engine.SDKReadinessGates; -import io.split.engine.evaluator.EvaluatorImp; -import io.split.storages.memory.SegmentCacheInMemoryImpl; -import io.split.telemetry.storage.NoopTelemetryStorage; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.Map; - -/** - * An implementation of SplitClient that considers all partitions - * passed in the constructor to be 100% on for all users, and - * any other split to be 100% off for all users. This implementation - * is useful for using Split in localhost environment. - * - * The startup order is as follows: - * - Split will use config.splitFile (full path) if set and will look for a yaml (new) format. - * - otherwise Split will look for $user.home/.split file if it exists (for backward compatibility with older versions) - * - */ -public final class LocalhostSplitFactory implements SplitFactory { - private static final Logger _log = LoggerFactory.getLogger(LocalhostSplitFactory.class); - - static final String FILENAME = ".split"; - static final String LOCALHOST = "localhost"; - - private final SplitClient _client; - private final LocalhostSplitManager _manager; - private final AbstractLocalhostSplitFile _splitFile; - private final CacheUpdaterService _cacheUpdaterService; - - public static LocalhostSplitFactory createLocalhostSplitFactory(SplitClientConfig config) throws IOException { - String directory = System.getProperty("user.home"); - return new LocalhostSplitFactory(directory, config.splitFile()); - } - - public LocalhostSplitFactory(String directory, String file) throws IOException { - - if (file != null && !file.isEmpty() && (file.endsWith(".yaml") || file.endsWith(".yml"))) { - _splitFile = new YamlLocalhostSplitFile(this, "", file); - _log.info("Starting Split in localhost mode with file at " + _splitFile._file.getAbsolutePath()); - } else { - _splitFile = new LegacyLocalhostSplitFile(this, directory, FILENAME); - _log.warn("(Deprecated) Starting Split in localhost mode using legacy file located at " + _splitFile._file.getAbsolutePath() - + "\nPlease set SplitClientConfig.builder().splitFile(...) to point to the new split.yaml location."); - } - - Map splitAndKeyToTreatment = _splitFile.readOnSplits(); - SplitCache splitCache = new InMemoryCacheImp(); - SegmentCacheConsumer segmentCache = new SegmentCacheInMemoryImpl(); - SDKReadinessGates sdkReadinessGates = new SDKReadinessGates(); - - _cacheUpdaterService = new CacheUpdaterService(splitCache); - _cacheUpdaterService.updateCache(splitAndKeyToTreatment); - sdkReadinessGates.sdkInternalReady(); - _client = new SplitClientImpl(this, splitCache, - new ImpressionsManager.NoOpImpressionsManager(), new NoopEventsStorageImp(), - SplitClientConfig.builder().setBlockUntilReadyTimeout(1).build(), sdkReadinessGates, new EvaluatorImp(splitCache, segmentCache), new NoopTelemetryStorage(), new NoopTelemetryStorage()); - _manager = LocalhostSplitManager.of(splitAndKeyToTreatment); - - _splitFile.registerWatcher(); - _splitFile.setDaemon(true); - _splitFile.start(); - } - - @Override - public SplitClient client() { - return _client; - } - - @Override - public SplitManager manager() { - return _manager; - } - - @Override - public void destroy() { - _splitFile.stopThread(); - } - - @Override - public boolean isDestroyed() { - return _splitFile.isStopped(); - } - - public void updateFeatureToTreatmentMap(Map featureToTreatmentMap) { - _cacheUpdaterService.updateCache(featureToTreatmentMap); - _manager.updateFeatureToTreatmentMap(featureToTreatmentMap); - } -} diff --git a/client/src/main/java/io/split/client/LocalhostSplitManager.java b/client/src/main/java/io/split/client/LocalhostSplitManager.java index dc72ea4d1..f117338d5 100644 --- a/client/src/main/java/io/split/client/LocalhostSplitManager.java +++ b/client/src/main/java/io/split/client/LocalhostSplitManager.java @@ -76,12 +76,12 @@ public void blockUntilReady() throws TimeoutException, InterruptedException { } @Override - public SplitView split(String featureName) { - if (!_splitToTreatmentsMap.containsKey(featureName)) { + public SplitView split(String featureFlagName) { + if (!_splitToTreatmentsMap.containsKey(featureFlagName)) { return null; } - return toSplitView(featureName, _splitToTreatmentsMap.get(featureName)); + return toSplitView(featureFlagName, _splitToTreatmentsMap.get(featureFlagName)); } void updateFeatureToTreatmentMap(Map featureToTreatmentMap) { @@ -91,9 +91,9 @@ void updateFeatureToTreatmentMap(Map featureToTreat } - private SplitView toSplitView(String featureName, Set treatments) { + private SplitView toSplitView(String featureFlagName, Set treatments) { SplitView view = new SplitView(); - view.name = featureName; + view.name = featureFlagName; view.killed = false; view.trafficType = null; view.changeNumber = 0; diff --git a/client/src/main/java/io/split/client/NoOpHeaderDecorator.java b/client/src/main/java/io/split/client/NoOpHeaderDecorator.java new file mode 100644 index 000000000..8ce04fdbc --- /dev/null +++ b/client/src/main/java/io/split/client/NoOpHeaderDecorator.java @@ -0,0 +1,18 @@ +package io.split.client; + +import io.split.client.CustomHeaderDecorator; +import io.split.client.dtos.RequestContext; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +class NoOpHeaderDecorator implements CustomHeaderDecorator { + public NoOpHeaderDecorator() { + } + + @Override + public Map> getHeaderOverrides(RequestContext context) { + return new HashMap<>(); + } +} diff --git a/client/src/main/java/io/split/client/RequestDecorator.java b/client/src/main/java/io/split/client/RequestDecorator.java new file mode 100644 index 000000000..33059e617 --- /dev/null +++ b/client/src/main/java/io/split/client/RequestDecorator.java @@ -0,0 +1,47 @@ +package io.split.client; + +import io.split.client.dtos.RequestContext; + + +import java.util.HashSet; +import java.util.Map; +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +public final class RequestDecorator { + CustomHeaderDecorator _headerDecorator; + + private static final Set forbiddenHeaders = new HashSet<>(Arrays.asList( + "splitsdkversion", + "splitmachineip", + "splitmachinename", + "splitimpressionsmode", + "host", + "referrer", + "content-type", + "content-length", + "content-encoding", + "accept", + "keep-alive", + "x-fastly-debug")); + + public RequestDecorator(CustomHeaderDecorator headerDecorator) { + _headerDecorator = (headerDecorator == null) + ? new NoOpHeaderDecorator() + : headerDecorator; + } + + public RequestContext decorateHeaders(RequestContext request) { + try { + return new RequestContext(_headerDecorator.getHeaderOverrides(request) + .entrySet() + .stream() + .filter(e -> !forbiddenHeaders.contains(e.getKey().toLowerCase())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + } catch (Exception e) { + throw new IllegalArgumentException( + String.format("Problem adding custom headers to request decorator: %s", e), e); + } + } +} diff --git a/client/src/main/java/io/split/client/SplitClient.java b/client/src/main/java/io/split/client/SplitClient.java index da3debb63..3a21ca1b0 100644 --- a/client/src/main/java/io/split/client/SplitClient.java +++ b/client/src/main/java/io/split/client/SplitClient.java @@ -2,6 +2,7 @@ import io.split.client.api.Key; import io.split.client.api.SplitResult; +import io.split.client.dtos.EvaluationOptions; import java.util.List; import java.util.Map; @@ -13,36 +14,36 @@ public interface SplitClient { /** - * Returns the treatment to show this key for this feature. The set of treatments - * for a feature can be configured on the Split web console. + * Returns the treatment to show this key for this feature flag. The set of treatments + * for a feature flag can be configured on the Split user interface. *

*

* This method returns the string 'control' if: *

    *
  1. Any of the parameters were null
  2. *
  3. There was an exception in evaluating the treatment
  4. - *
  5. The SDK does not know of the existence of this feature
  6. - *
  7. The feature was deleted through the web console.
  8. + *
  9. The SDK does not know of the existence of this feature flag
  10. + *
  11. The feature flag was deleted through the Split user interface.
  12. *
* 'control' is a reserved treatment (you cannot create a treatment with the * same name) to highlight these exceptional circumstances. *

*

- * The sdk returns the default treatment of this feature if: + * The sdk returns the default treatment of this feature flag if: *

    - *
  1. The feature was killed
  2. - *
  3. The key did not match any of the conditions in the feature roll-out plan
  4. + *
  5. The feature flag was killed
  6. + *
  7. The key did not match any of the conditions in the feature flag roll-out plan
  8. *
- * The default treatment of a feature is set on the Split web console. + * The default treatment of a feature flag is set on the Split user interface. *

*

* This method does not throw any exceptions. It also never returns null. * - * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. - * @param split the feature we want to evaluate. MUST NOT be null. - * @return the evaluated treatment, the default treatment of this feature, or 'control'. + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. + * @return the evaluated treatment, the default treatment of this feature flag, or 'control'. */ - String getTreatment(String key, String split); + String getTreatment(String key, String featureFlagName); /** * This method is useful when you want to determine the treatment to show @@ -54,18 +55,18 @@ public interface SplitClient { * vs. premium plan. Another example is to show a different treatment * to users created after a certain date. * - * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. - * @param split the feature we want to evaluate. MUST NOT be null. + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. - * @return the evaluated treatment, the default treatment of this feature, or 'control'. + * @return the evaluated treatment, the default treatment of this feature flag, or 'control'. */ - String getTreatment(String key, String split, Map attributes); + String getTreatment(String key, String featureFlagName, Map attributes); /** - * To understand why this method is useful, consider the following simple Split as an example: + * To understand why this method is useful, consider the following simple Feature Flag as an example: * - * if user is in segment employees then split 100%:on - * else if user is in segment all then split 20%:on,80%:off + * if user is in segment employees then feature flag 100%:on + * else if user is in segment all then feature flag 20%:on,80%:off * * There are two concepts here: matching and bucketing. Matching * refers to ‘user is in segment employees’ or ‘user is in segment @@ -87,90 +88,90 @@ public interface SplitClient { * * * @param key the matching and bucketing keys. MUST NOT be null. - * @param split the feature we want to evaluate. MUST NOT be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. * @param attributes of the entity (user, account etc.) to use in evaluation. Can be null or empty. * - * @return the evaluated treatment, the default treatment of this feature, or 'control'. + * @return the evaluated treatment, the default treatment of this feature flag, or 'control'. */ - String getTreatment(Key key, String split, Map attributes); + String getTreatment(Key key, String featureFlagName, Map attributes); /** * Same as {@link #getTreatment(String, String)} but it returns the configuration associated to the - * matching treatment if any. Otherwise {@link SplitResult.configurations()} will be null. + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. *

*

* Examples include showing a different treatment to users on trial plan * vs. premium plan. Another example is to show a different treatment * to users created after a certain date. * - * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. - * @param split the feature we want to evaluate. MUST NOT be null. - * @return SplitResult containing the evaluated treatment (the default treatment of this feature, or 'control') and + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. + * @return SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and * a configuration associated to this treatment if set. */ - SplitResult getTreatmentWithConfig(String key, String split); + SplitResult getTreatmentWithConfig(String key, String featureFlagName); /** * Same as {@link #getTreatment(String, String, Map)} but it returns the configuration associated to the - * matching treatment if any. Otherwise {@link SplitResult.configurations()} will be null. + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. *

*

* Examples include showing a different treatment to users on trial plan * vs. premium plan. Another example is to show a different treatment * to users created after a certain date. * - * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. - * @param split the feature we want to evaluate. MUST NOT be null. + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. - * @return SplitResult containing the evaluated treatment (the default treatment of this feature, or 'control') and + * @return SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and * a configuration associated to this treatment if set. */ - SplitResult getTreatmentWithConfig(String key, String split, Map attributes); + SplitResult getTreatmentWithConfig(String key, String featureFlagName, Map attributes); /** * Same as {@link #getTreatment(Key, String, Map)} but it returns the configuration associated to the - * matching treatment if any. Otherwise {@link SplitResult.configurations()} will be null. + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. * * @param key the matching and bucketing keys. MUST NOT be null. - * @param split the feature we want to evaluate. MUST NOT be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. * @param attributes of the entity (user, account etc.) to use in evaluation. Can be null or empty. * - * @return SplitResult containing the evaluated treatment (the default treatment of this feature, or 'control') and + * @return SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and * a configuration associated to this treatment if set. */ - SplitResult getTreatmentWithConfig(Key key, String split, Map attributes); + SplitResult getTreatmentWithConfig(Key key, String featureFlagName, Map attributes); /** - * Returns a map of feature name and treatments to show this key for these features. The set of treatments - * for a feature can be configured on the Split web console. + * Returns a map of feature flag name and treatments to show this key for these feature flags. The set of treatments + * for a feature flag can be configured on the Split user interface. *

*

- * This method returns for each feature the string 'control' if: + * This method returns for each feature flag the string 'control' if: *

    *
  1. Any of the parameters were null
  2. *
  3. There was an exception in evaluating the treatment
  4. - *
  5. The SDK does not know of the existence of this feature
  6. - *
  7. The feature was deleted through the web console.
  8. + *
  9. The SDK does not know of the existence of this feature flag
  10. + *
  11. The feature flag was deleted through the Split user interface.
  12. *
* 'control' is a reserved treatment (you cannot create a treatment with the * same name) to highlight these exceptional circumstances. *

*

- * The sdk returns for each feature the default treatment of this feature if: + * The sdk returns for each feature flag the default treatment of this feature flag if: *

    - *
  1. The feature was killed
  2. - *
  3. The key did not match any of the conditions in the feature roll-out plan
  4. + *
  5. The feature flag was killed
  6. + *
  7. The key did not match any of the conditions in the feature flag roll-out plan
  8. *
- * The default treatment of a feature is set on the Split web console. + * The default treatment of a feature flag is set on the Split user interface. *

*

* This method does not throw any exceptions. It also never returns null. * - * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. - * @param splits the features we want to evaluate. MUST NOT be null. - * @return for each feature the evaluated treatment, the default treatment for each feature, or 'control'. + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. + * @return for each feature flag the evaluated treatment, the default treatment for each feature flag, or 'control'. */ - Map getTreatments(String key, List splits); + Map getTreatments(String key, List featureFlagNames); /** * This method is useful when you want to determine the treatments to show @@ -182,18 +183,18 @@ public interface SplitClient { * vs. premium plan. Another example is to show different treatments * to users created after a certain date. * - * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. - * @param splits the features we want to evaluate. MUST NOT be null. + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. - * @return the evaluated treatment, the default treatment of this feature, or 'control'. + * @return the evaluated treatment, the default treatment of this feature flag, or 'control'. */ - Map getTreatments(String key, List splits, Map attributes); + Map getTreatments(String key, List featureFlagNames, Map attributes); /** - * To understand why this method is useful, consider the following simple Split as an example: + * To understand why this method is useful, consider the following simple Feature Flag as an example: * - * if user is in segment employees then split 100%:on - * else if user is in segment all then split 20%:on,80%:off + * if user is in segment employees then feature flag 100%:on + * else if user is in segment all then feature flag 20%:on,80%:off * * There are two concepts here: matching and bucketing. Matching * refers to ‘user is in segment employees’ or ‘user is in segment @@ -215,58 +216,734 @@ public interface SplitClient { * * * @param key the matching and bucketing keys. MUST NOT be null. - * @param splits the features we want to evaluate. MUST NOT be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. * @param attributes of the entity (user, account etc.) to use in evaluation. Can be null or empty. * - * @return for each feature the evaluated treatment, the default treatment of the feature, or 'control'. + * @return for each feature flag the evaluated treatment, the default treatment of the feature flag, or 'control'. */ - Map getTreatments(Key key, List splits, Map attributes); + Map getTreatments(Key key, List featureFlagNames, Map attributes); /** * Same as {@link #getTreatments(String, List)} but it returns the configuration associated to the - * matching treatments if any. Otherwise {@link SplitResult.configurations()} will be null. + * matching treatments if any. Otherwise {@link SplitResult.config()} will be null. *

*

* Examples include showing a different treatment to users on trial plan * vs. premium plan. Another example is to show a different treatment * to users created after a certain date. * - * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. - * @param splits the features we want to evaluate. MUST NOT be null. - * @return Map containing for each feature the evaluated treatment (the default treatment of this feature, or 'control') and + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. + * @return Map containing for each feature flag the evaluated treatment (the default treatment of + * this feature flag, or 'control') and a configuration associated to this treatment if set. + */ + Map getTreatmentsWithConfig(String key, List featureFlagNames); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @return for each feature flag a SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and + * a configuration associated to this treatment if set. + */ + Map getTreatmentsWithConfig(String key, List featureFlagNames, Map attributes); + + /** + * Same as {@link #getTreatments(Key, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + * + * @param key the matching and bucketing keys. MUST NOT be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. + * @param attributes of the entity (user, account etc.) to use in evaluation. Can be null or empty. + * + * @return for each feature flag a SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and + * a configuration associated to this treatment if set. + */ + Map getTreatmentsWithConfig(Key key, List featureFlagNames, Map attributes); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSet(String key, String flagSet); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSet(String key, String flagSet, Map attributes); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key the matching and bucketing keys. MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSet(Key key, String flagSet, Map attributes); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSets(String key, List flagSets); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSets(String key, List flagSets, Map attributes); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key the matching and bucketing keys. MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSets(Key key, List flagSets, Map attributes); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSet(String key, String flagSet); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSet(String key, String flagSet, Map attributes); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key the matching and bucketing keys. MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSet(Key key, String flagSet, Map attributes); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSets(String key, List flagSets); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSets(String key, List flagSets, Map attributes); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key the matching and bucketing keys. MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSets(Key key, List flagSets, Map attributes); + + /** + * Returns the treatment to show this key for this feature flag. The set of treatments + * for a feature flag can be configured on the Split user interface. + *

+ *

+ * This method returns the string 'control' if: + *

    + *
  1. Any of the parameters were null
  2. + *
  3. There was an exception in evaluating the treatment
  4. + *
  5. The SDK does not know of the existence of this feature flag
  6. + *
  7. The feature flag was deleted through the Split user interface.
  8. + *
+ * 'control' is a reserved treatment (you cannot create a treatment with the + * same name) to highlight these exceptional circumstances. + *

+ *

+ * The sdk returns the default treatment of this feature flag if: + *

    + *
  1. The feature flag was killed
  2. + *
  3. The key did not match any of the conditions in the feature flag roll-out plan
  4. + *
+ * The default treatment of a feature flag is set on the Split user interface. + *

+ *

+ * This method does not throw any exceptions. It also never returns null. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. + * @param evaluationOptions additional data for evaluation. + * @return the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + String getTreatment(String key, String featureFlagName, EvaluationOptions evaluationOptions); + + /** + * This method is useful when you want to determine the treatment to show + * to an customer (user, account etc.) based on an attribute of that customer + * instead of it's key. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + String getTreatment(String key, String featureFlagName, Map attributes, EvaluationOptions evaluationOptions); + + /** + * To understand why this method is useful, consider the following simple Feature Flag as an example: + * + * if user is in segment employees then feature flag 100%:on + * else if user is in segment all then feature flag 20%:on,80%:off + * + * There are two concepts here: matching and bucketing. Matching + * refers to ‘user is in segment employees’ or ‘user is in segment + * all’ whereas bucketing refers to ‘100%:on’ or ‘20%:on,80%:off’. + * + * By default, the same customer key is used for both matching and + * bucketing. However, for some advanced use cases, you may want + * to use different keys. For such cases, use this method. + * + * As an example, suppose you want to rollout to percentages of + * users in specific accounts. You can achieve that by matching + * via account id, but bucketing by user id. + * + * Another example is when you want to ensure that a user continues to get + * the same treatment after they sign up for your product that they used + * to get when they were simply a visitor to your site. In that case, + * before they sign up, you can use their visitor id for both matching and bucketing, but + * post log-in you can use their user id for matching and visitor id for bucketing. + * + * + * @param key the matching and bucketing keys. MUST NOT be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. + * @param attributes of the entity (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * + * @return the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + String getTreatment(Key key, String featureFlagName, Map attributes, EvaluationOptions evaluationOptions); + + /** + * Returns a map of feature flag name and treatments to show this key for these feature flags. The set of treatments + * for a feature flag can be configured on the Split user interface. + *

+ *

+ * This method returns for each feature flag the string 'control' if: + *

    + *
  1. Any of the parameters were null
  2. + *
  3. There was an exception in evaluating the treatment
  4. + *
  5. The SDK does not know of the existence of this feature flag
  6. + *
  7. The feature flag was deleted through the Split user interface.
  8. + *
+ * 'control' is a reserved treatment (you cannot create a treatment with the + * same name) to highlight these exceptional circumstances. + *

+ *

+ * The sdk returns for each feature flag the default treatment of this feature flag if: + *

    + *
  1. The feature flag was killed
  2. + *
  3. The key did not match any of the conditions in the feature flag roll-out plan
  4. + *
+ * The default treatment of a feature flag is set on the Split user interface. + *

+ *

+ * This method does not throw any exceptions. It also never returns null. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment, the default treatment for each feature flag, or 'control'. + */ + Map getTreatments(String key, List featureFlagNames, EvaluationOptions evaluationOptions); + + /** + * This method is useful when you want to determine the treatments to show + * to a customer (user, account etc.) based on an attribute of that customer + * instead of their key. + *

+ *

+ * Examples include showing different treatments to users on trial plan + * vs. premium plan. Another example is to show different treatments + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatments(String key, List featureFlagNames, Map attributes, EvaluationOptions evaluationOptions); + + /** + * To understand why this method is useful, consider the following simple Feature Flag as an example: + * + * if user is in segment employees then feature flag 100%:on + * else if user is in segment all then feature flag 20%:on,80%:off + * + * There are two concepts here: matching and bucketing. Matching + * refers to ‘user is in segment employees’ or ‘user is in segment + * all’ whereas bucketing refers to ‘100%:on’ or ‘20%:on,80%:off’. + * + * By default, the same customer key is used for both matching and + * bucketing. However, for some advanced use cases, you may want + * to use different keys. For such cases, use this method. + * + * As an example, suppose you want to rollout to percentages of + * users in specific accounts. You can achieve that by matching + * via account id, but bucketing by user id. + * + * Another example is when you want to ensure that a user continues to get + * the same treatment after they sign up for your product that they used + * to get when they were simply a visitor to your site. In that case, + * before they sign up, you can use their visitor id for both matching and bucketing, but + * post log-in you can use their user id for matching and visitor id for bucketing. + * + * + * @param key the matching and bucketing keys. MUST NOT be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. + * @param attributes of the entity (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * + * @return for each feature flag the evaluated treatment, the default treatment of the feature flag, or 'control'. + */ + Map getTreatments(Key key, List featureFlagNames, Map attributes, EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatment(String, String)} but it returns the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. + * @param evaluationOptions additional data for evaluation + * @return SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and + * a configuration associated to this treatment if set. + */ + SplitResult getTreatmentWithConfig(String key, String featureFlagName, EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatment(Key, String, Map)} but it returns the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + * + * @param key the matching and bucketing keys. MUST NOT be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. + * @param attributes of the entity (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * + * @return SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and * a configuration associated to this treatment if set. */ - Map getTreatmentsWithConfig(String key, List splits); + SplitResult getTreatmentWithConfig(Key key, String featureFlagName, Map attributes, EvaluationOptions evaluationOptions); /** - * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature the configuration associated to the - * matching treatment if any. Otherwise {@link SplitResult.configurations()} will be null. + * Same as {@link #getTreatment(String, String, Map)} but it returns the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. *

*

* Examples include showing a different treatment to users on trial plan * vs. premium plan. Another example is to show a different treatment * to users created after a certain date. * - * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. - * @param splits the features we want to evaluate. MUST NOT be null. + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagName the name of the feature flag we want to evaluate. MUST NOT be null. * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. - * @return for each feature a SplitResult containing the evaluated treatment (the default treatment of this feature, or 'control') and + * @param evaluationOptions additional data for evaluation + * @return SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and * a configuration associated to this treatment if set. */ - Map getTreatmentsWithConfig(String key, List splits, Map attributes); + SplitResult getTreatmentWithConfig(String key, String featureFlagName, Map attributes, EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag a SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and + * a configuration associated to this treatment if set. + */ + Map getTreatmentsWithConfig(String key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List)} but it returns the configuration associated to the + * matching treatments if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. + * @param evaluationOptions additional data for evaluation + * @return Map containing for each feature flag the evaluated treatment (the default treatment of + * this feature flag, or 'control') and a configuration associated to this treatment if set. + */ + Map getTreatmentsWithConfig(String key, List featureFlagNames, EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSet(String key, String flagSet, Map attributes, EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSets(String key, List flagSets, EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSets(String key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSet(String key, String flagSet, EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSets(String key, List flagSets, EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSets(String key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSet(String key, String flagSet, EvaluationOptions evaluationOptions); /** - * Same as {@link #getTreatments(Key, List, Map)} but it returns for each feature the configuration associated to the - * matching treatment if any. Otherwise {@link SplitResult.configurations()} will be null. + * Same as {@link #getTreatments(Key, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. * * @param key the matching and bucketing keys. MUST NOT be null. - * @param splits the features we want to evaluate. MUST NOT be null. + * @param featureFlagNames the names of the feature flags we want to evaluate. MUST NOT be null. * @param attributes of the entity (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation * - * @return for each feature a SplitResult containing the evaluated treatment (the default treatment of this feature, or 'control') and + * @return for each feature flag a SplitResult containing the evaluated treatment (the default treatment of this feature flag, or 'control') and * a configuration associated to this treatment if set. */ - Map getTreatmentsWithConfig(Key key, List splits, Map attributes); + Map getTreatmentsWithConfig(Key key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key a unique key of your customer (e.g. user_id, user_email, account_id, etc.) MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSet(String key, String flagSet, Map attributes, + EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key the matching and bucketing keys. MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSet(Key key, String flagSet, Map attributes, EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key the matching and bucketing keys. MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment, the default treatment of this feature flag, or 'control'. + */ + Map getTreatmentsByFlagSets(Key key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key the matching and bucketing keys. MUST not be null or empty. + * @param flagSet the Flag Set name that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSet(Key key, String flagSet, Map attributes, + EvaluationOptions evaluationOptions); + + /** + * Same as {@link #getTreatments(String, List, Map)} but it returns for each feature flag the configuration associated to the + * matching treatment if any. Otherwise {@link SplitResult.config()} will be null. + *

+ *

+ * Examples include showing a different treatment to users on trial plan + * vs. premium plan. Another example is to show a different treatment + * to users created after a certain date. + * + * @param key the matching and bucketing keys. MUST not be null or empty. + * @param flagSets the names of Flag Sets that you want to evaluate. MUST not be null or empty. + * @param attributes of the customer (user, account etc.) to use in evaluation. Can be null or empty. + * @param evaluationOptions additional data for evaluation + * @return for each feature flag the evaluated treatment (the default treatment of this feature flag, or 'control') and a configuration + * associated to this treatment if set. + */ + Map getTreatmentsWithConfigByFlagSets(Key key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions); /** * Destroys the background processes and clears the cache, releasing the resources used by @@ -312,7 +989,6 @@ public interface SplitClient { * @param key the identifier of the entity * @param trafficType the type of the event * @param eventType the type of the event - * @param value the value of the event * * @return true if the track was successful, false otherwise */ @@ -346,4 +1022,4 @@ public interface SplitClient { *

*/ void blockUntilReady() throws TimeoutException, InterruptedException; -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/SplitClientConfig.java b/client/src/main/java/io/split/client/SplitClientConfig.java index d19311eee..3a38fccdb 100644 --- a/client/src/main/java/io/split/client/SplitClientConfig.java +++ b/client/src/main/java/io/split/client/SplitClientConfig.java @@ -1,16 +1,31 @@ package io.split.client; - +import io.split.client.dtos.FallbackTreatment; +import io.split.client.dtos.FallbackTreatmentsConfiguration; +import io.split.client.dtos.ProxyConfiguration; import io.split.client.impressions.ImpressionListener; import io.split.client.impressions.ImpressionsManager; +import io.split.client.utils.FileTypeEnum; import io.split.integrations.IntegrationsConfig; +import io.split.service.CustomHttpModule; import io.split.storages.enums.OperationMode; import io.split.storages.enums.StorageMode; import org.apache.hc.core5.http.HttpHost; +import org.slf4j.LoggerFactory; import pluggable.CustomStorageWrapper; import java.io.IOException; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; import java.util.Properties; +import java.util.concurrent.ThreadFactory; +import java.io.InputStream; + +import static io.split.inputValidation.FallbackTreatmentValidator.isValidByFlagTreatment; +import static io.split.inputValidation.FallbackTreatmentValidator.isValidTreatment; +import static io.split.inputValidation.FlagSetsValidator.cleanup; /** * Configurations for the SplitClient. @@ -19,6 +34,7 @@ */ public class SplitClientConfig { + private static final org.slf4j.Logger _log = LoggerFactory.getLogger(SplitClientConfig.class); public static final String LOCALHOST_DEFAULT_FILE = "split.yaml"; public static final String SDK_ENDPOINT = "https://sdk.split.io"; public static final String EVENTS_ENDPOINT = "https://events.split.io"; @@ -26,6 +42,14 @@ public class SplitClientConfig { public static final String STREAMING_ENDPOINT = "https://streaming.split.io/sse"; public static final String TELEMETRY_ENDPOINT = "https://telemetry.split.io/api/v1"; + public static class HttpScheme { + private HttpScheme() { + throw new IllegalStateException("Utility class"); + } + public static final String HTTP = "http"; + public static final String HTTPS = "https"; + } + private final String _endpoint; private final String _eventsEndpoint; @@ -41,13 +65,17 @@ public class SplitClientConfig { private final boolean _debugEnabled; private final boolean _labelsEnabled; private final boolean _ipAddressEnabled; + private final boolean _localhostRefreshEnabled; private final int _ready; private final int _waitBeforeShutdown; private final int _eventsQueueSize; - private final long _eventFlushIntervalInMillis; + private final long _eventSendIntervalInMillis; private final int _maxStringLength; private final boolean _destroyOnShutDown; private final String _splitFile; + private final FileTypeEnum _fileType; + private final InputStream _inputStream; + private final String _segmentDirectory; private final IntegrationsConfig _integrationsConfig; private final boolean _streamingEnabled; private final int _authRetryBackoffBase; @@ -59,20 +87,30 @@ public class SplitClientConfig { private final int _onDemandFetchRetryDelayMs; private final int _onDemandFetchMaxRetries; private final int _failedAttemptsBeforeLogging; - private final boolean _cdnDebugLogging; + private final int _uniqueKeysRefreshRateInMemory; + private final int _uniqueKeysRefreshRateRedis; + private static int _filterUniqueKeysRefreshRate; private final OperationMode _operationMode; private long _validateAfterInactivityInMillis; + private final long _startingSyncCallBackoffBaseMs; private final CustomStorageWrapper _customStorageWrapper; private final StorageMode _storageMode; + private final ThreadFactory _threadFactory; + private final FallbackTreatmentsConfiguration _fallbackTreatments; // Proxy configs + private final ProxyConfiguration _proxyConfiguration; private final HttpHost _proxy; private final String _proxyUsername; private final String _proxyPassword; // To be set during startup public static String splitSdkVersion; - + private final long _lastSeenCacheSize; + private final HashSet _flagSetsFilter; + private final int _invalidSets; + private final CustomHeaderDecorator _customHeaderDecorator; + private final CustomHttpModule _alternativeHTTPModule; public static Builder builder() { return new Builder(); @@ -93,15 +131,20 @@ private SplitClientConfig(String endpoint, boolean debugEnabled, boolean labelsEnabled, boolean ipAddressEnabled, + boolean localhostRefreshEnabled, int waitBeforeShutdown, HttpHost proxy, String proxyUsername, String proxyPassword, + ProxyConfiguration proxyConfiguration, int eventsQueueSize, - long eventFlushIntervalInMillis, + long eventSendIntervalInMillis, int maxStringLength, boolean destroyOnShutDown, String splitFile, + FileTypeEnum fileType, + InputStream inputStream, + String segmentDirectory, IntegrationsConfig integrationsConfig, boolean streamingEnabled, int authRetryBackoffBase, @@ -113,11 +156,21 @@ private SplitClientConfig(String endpoint, int onDemandFetchRetryDelayMs, int onDemandFetchMaxRetries, int failedAttemptsBeforeLogging, - boolean cdnDebugLogging, OperationMode operationMode, long validateAfterInactivityInMillis, + long startingSyncCallBackoffBaseMs, CustomStorageWrapper customStorageWrapper, - StorageMode storageMode) { + StorageMode storageMode, + int uniqueKeysRefreshRateInMemory, + int uniqueKeysRefreshRateRedis, + int filterUniqueKeysRefreshRate, + long lastSeenCacheSize, + ThreadFactory threadFactory, + HashSet flagSetsFilter, + int invalidSets, + CustomHeaderDecorator customHeaderDecorator, + CustomHttpModule alternativeHTTPModule, + FallbackTreatmentsConfiguration fallbackTreatments) { _endpoint = endpoint; _eventsEndpoint = eventsEndpoint; _featuresRefreshRate = pollForFeatureChangesEveryNSeconds; @@ -133,15 +186,20 @@ private SplitClientConfig(String endpoint, _debugEnabled = debugEnabled; _labelsEnabled = labelsEnabled; _ipAddressEnabled = ipAddressEnabled; + _localhostRefreshEnabled = localhostRefreshEnabled; _waitBeforeShutdown = waitBeforeShutdown; _proxy = proxy; _proxyUsername = proxyUsername; _proxyPassword = proxyPassword; + _proxyConfiguration = proxyConfiguration; _eventsQueueSize = eventsQueueSize; - _eventFlushIntervalInMillis = eventFlushIntervalInMillis; + _eventSendIntervalInMillis = eventSendIntervalInMillis; _maxStringLength = maxStringLength; _destroyOnShutDown = destroyOnShutDown; _splitFile = splitFile; + _fileType = fileType; + _inputStream = inputStream; + _segmentDirectory = segmentDirectory; _integrationsConfig = integrationsConfig; _streamingEnabled = streamingEnabled; _authRetryBackoffBase = authRetryBackoffBase; @@ -150,14 +208,24 @@ private SplitClientConfig(String endpoint, _streamingServiceURL = streamingServiceURL; _telemetryURL = telemetryURL; _telemetryRefreshRate = telemetryRefreshRate; + _uniqueKeysRefreshRateInMemory = uniqueKeysRefreshRateInMemory; + _uniqueKeysRefreshRateRedis = uniqueKeysRefreshRateRedis; + _filterUniqueKeysRefreshRate = filterUniqueKeysRefreshRate; _onDemandFetchRetryDelayMs = onDemandFetchRetryDelayMs; _onDemandFetchMaxRetries = onDemandFetchMaxRetries; _failedAttemptsBeforeLogging = failedAttemptsBeforeLogging; - _cdnDebugLogging = cdnDebugLogging; _operationMode = operationMode; _storageMode = storageMode; _validateAfterInactivityInMillis = validateAfterInactivityInMillis; + _startingSyncCallBackoffBaseMs = startingSyncCallBackoffBaseMs; _customStorageWrapper = customStorageWrapper; + _lastSeenCacheSize = lastSeenCacheSize; + _threadFactory = threadFactory; + _flagSetsFilter = flagSetsFilter; + _invalidSets = invalidSets; + _customHeaderDecorator = customHeaderDecorator; + _alternativeHTTPModule = alternativeHTTPModule; + _fallbackTreatments = fallbackTreatments; Properties props = new Properties(); try { @@ -196,6 +264,16 @@ public int impressionsRefreshRate() { return _impressionsRefreshRate; } + public int uniqueKeysRefreshRateInMemory() { + return _uniqueKeysRefreshRateInMemory; + } + public int uniqueKeysRefreshRateRedis() { + return _uniqueKeysRefreshRateRedis; + } + public static int filterUniqueKeysRefreshRate() { + return _filterUniqueKeysRefreshRate; + } + public int impressionsQueueSize() { return _impressionsQueueSize; } @@ -222,6 +300,10 @@ public boolean debugEnabled() { public boolean ipAddressEnabled() { return _ipAddressEnabled; } + public boolean localhostRefreshEnabled() { + return _localhostRefreshEnabled; + } + public int blockUntilReady() { return _ready; } @@ -242,8 +324,12 @@ public String proxyPassword() { return _proxyPassword; } - public long eventFlushIntervalInMillis() { - return _eventFlushIntervalInMillis; + public ProxyConfiguration proxyConfiguration() { + return _proxyConfiguration; + } + + public long eventSendIntervalInMillis() { + return _eventSendIntervalInMillis; } public int eventsQueueSize() { @@ -262,6 +348,18 @@ public String splitFile() { return _splitFile; } + public FileTypeEnum fileType() { + return _fileType; + } + + public InputStream inputStream(){ + return _inputStream; + } + + public String segmentDirectory() { + return _segmentDirectory; + } + public IntegrationsConfig integrationsConfig() { return _integrationsConfig; } @@ -290,22 +388,29 @@ public String telemetryURL() { return _telemetryURL; } + /** + * @deprecated As of release 4.X.X, replaced by {@link #getTelemetryRefreshRate()} } //todo update version + **/ + @Deprecated public int get_telemetryRefreshRate() { return _telemetryRefreshRate; } + + public int getTelemetryRefreshRate() { + return _telemetryRefreshRate; + } public int streamingRetryDelay() {return _onDemandFetchRetryDelayMs;} public int streamingFetchMaxRetries() {return _onDemandFetchMaxRetries;} public int failedAttemptsBeforeLogging() {return _failedAttemptsBeforeLogging;} - public boolean cdnDebugLogging() { return _cdnDebugLogging; } - public OperationMode operationMode() { return _operationMode;} public long validateAfterInactivityInMillis() { return _validateAfterInactivityInMillis; } + public long startingSyncCallBackoffBaseMs(){ return _startingSyncCallBackoffBaseMs;} public CustomStorageWrapper customStorageWrapper() { return _customStorageWrapper; @@ -313,8 +418,35 @@ public CustomStorageWrapper customStorageWrapper() { public StorageMode storageMode() { return _storageMode;} - public static final class Builder { + public long getLastSeenCacheSize() { + return _lastSeenCacheSize; + } + + public ThreadFactory getThreadFactory() { + return _threadFactory; + } + + public HashSet getSetsFilter() { + return _flagSetsFilter; + } + + public int getInvalidSets() { + return _invalidSets; + } + + public CustomHeaderDecorator customHeaderDecorator() { + return _customHeaderDecorator; + } + public boolean isSdkEndpointOverridden() { + return !_endpoint.equals(SDK_ENDPOINT); + } + + public CustomHttpModule alternativeHTTPModule() { return _alternativeHTTPModule; } + + public FallbackTreatmentsConfiguration fallbackTreatments() { return _fallbackTreatments; } + + public static final class Builder { private String _endpoint = SDK_ENDPOINT; private boolean _endpointSet = false; private String _eventsEndpoint = EVENTS_ENDPOINT; @@ -331,17 +463,22 @@ public static final class Builder { private int _ready = -1; // -1 means no blocking private int _metricsRefreshRate = 60; private boolean _labelsEnabled = true; - private boolean _ipAddressEnabled = true; + private boolean _ipAddressEnabled = true; + private boolean _localhostRefreshEnable = false; private int _waitBeforeShutdown = 5000; private String _proxyHost = "localhost"; private int _proxyPort = -1; private String _proxyUsername; private String _proxyPassword; + private ProxyConfiguration _proxyConfiguration; private int _eventsQueueSize = 500; - private long _eventFlushIntervalInMillis = 30 * 1000; + private long _eventSendIntervalInMillis = 30 * (long)1000; private int _maxStringLength = 250; private boolean _destroyOnShutDown = true; private String _splitFile = null; + private FileTypeEnum _fileType = null; + private InputStream _inputStream = null; + private String _segmentDirectory = null; private IntegrationsConfig _integrationsConfig = null; private boolean _streamingEnabled = true; private int _authRetryBackoffBase = 1; @@ -349,15 +486,25 @@ public static final class Builder { private String _authServiceURL = AUTH_ENDPOINT; private String _streamingServiceURL = STREAMING_ENDPOINT; private String _telemetryURl = TELEMETRY_ENDPOINT; - private int _telemetryRefreshRate = 3600; + private int _telemetryRefreshRate = 600; + private final int _uniqueKeysRefreshRateInMemory = 900; + private final int _uniqueKeysRefreshRateRedis = 300; + private final int _filterUniqueKeysRefreshRate = 86400; private int _onDemandFetchRetryDelayMs = 50; private final int _onDemandFetchMaxRetries = 10; private final int _failedAttemptsBeforeLogging = 10; - private final boolean _cdnDebugLogging = true; private OperationMode _operationMode = OperationMode.STANDALONE; private long _validateAfterInactivityInMillis = 1000; + private static final long STARTING_SYNC_CALL_BACKOFF_BASE_MS = 1000; //backoff base starting at 1 seconds private CustomStorageWrapper _customStorageWrapper; private StorageMode _storageMode = StorageMode.MEMORY; + private final long _lastSeenCacheSize = 500000; + private ThreadFactory _threadFactory; + private HashSet _flagSetsFilter = new HashSet<>(); + private int _invalidSetsCount = 0; + private CustomHeaderDecorator _customHeaderDecorator = null; + private CustomHttpModule _alternativeHTTPModule = null; + private FallbackTreatmentsConfiguration _fallbackTreatments; public Builder() { } @@ -397,7 +544,7 @@ public Builder eventsQueueSize(int eventsQueueSize) { * @return this builder */ public Builder eventFlushIntervalInMillis(long eventFlushIntervalInMillis) { - _eventFlushIntervalInMillis = eventFlushIntervalInMillis; + _eventSendIntervalInMillis = eventFlushIntervalInMillis; return this; } @@ -617,10 +764,14 @@ public Builder waitBeforeShutdown(int waitTime) { /** * The host location of the proxy. Default is localhost. + * @deprecated + * This method is deprecated. + *

Use {@link ProxyConfiguration)} instead. * * @param proxyHost location of the proxy * @return this builder */ + @Deprecated public Builder proxyHost(String proxyHost) { _proxyHost = proxyHost; return this; @@ -628,10 +779,14 @@ public Builder proxyHost(String proxyHost) { /** * The port of the proxy. Default is -1. + * @deprecated + * This method is deprecated. + *

Use {@link ProxyConfiguration)} instead. * * @param proxyPort port for the proxy * @return this builder */ + @Deprecated public Builder proxyPort(int proxyPort) { _proxyPort = proxyPort; return this; @@ -639,10 +794,14 @@ public Builder proxyPort(int proxyPort) { /** * Set the username for authentication against the proxy (if proxy settings are enabled). (Optional). + * @deprecated + * This method is deprecated. + *

Use {@link ProxyConfiguration)} instead. * * @param proxyUsername * @return this builder */ + @Deprecated public Builder proxyUsername(String proxyUsername) { _proxyUsername = proxyUsername; return this; @@ -659,6 +818,17 @@ public Builder proxyPassword(String proxyPassword) { return this; } + /** + * Set the mtls authentication against the proxy (if proxy settings are enabled). (Optional). + * + * @param proxyConfiguration + * @return this builder + */ + public Builder proxyConfiguration(ProxyConfiguration proxyConfiguration) { + _proxyConfiguration = proxyConfiguration; + return this; + } + /** * Disables running destroy() on shutdown by default. * @@ -689,6 +859,24 @@ public Builder splitFile(String splitFile) { return this; } + public Builder splitFile(InputStream inputStream, FileTypeEnum fileType) { + _fileType = fileType; + _inputStream = inputStream; + return this; + } + + /** + * Set the location of the directory where are the segment json files for localhost mode. + * This setting is optional. + * + * @param sementDirectory location + * @return this builder + */ + public Builder segmentDirectory(String sementDirectory){ + _segmentDirectory = sementDirectory; + return this; + } + /** * Sets up integrations for the Split SDK (Currently Impressions outgoing integrations supported only). * @param config @@ -709,6 +897,16 @@ public Builder streamingEnabled(boolean streamingEnabled) { return this; } + /** + * Set if refresh is enabled or not for localhost mode. Default is false. + * @param localhostRefreshEnable + * @return + */ + public Builder localhostRefreshEnable(boolean localhostRefreshEnable) { + _localhostRefreshEnable = localhostRefreshEnable; + return this; + } + /** * Set how many seconds to wait before re attempting to authenticate for push notifications. Default 1 second. Minimum 1 second. * @param authRetryBackoffBase @@ -782,7 +980,7 @@ public Builder operationMode(OperationMode mode) { /** * - * @param storage mode + * @param mode * @return this builder */ public Builder storageMode(StorageMode mode) { @@ -801,7 +999,63 @@ public Builder customStorageWrapper(CustomStorageWrapper customStorageWrapper) { return this; } - public SplitClientConfig build() { + /** + * Flag Sets Filter + * + * @param flagSetsFilter + * @return this builder + */ + public Builder flagSetsFilter(List flagSetsFilter) { + _flagSetsFilter = new LinkedHashSet<>(cleanup(flagSetsFilter)); + _invalidSetsCount = flagSetsFilter.size() - _flagSetsFilter.size(); + return this; + } + + /** + * User Custom Header Decorator + * + * @param customHeaderDecorator + * @return this builder + */ + public Builder customHeaderDecorator(CustomHeaderDecorator customHeaderDecorator) { + _customHeaderDecorator = customHeaderDecorator; + return this; + } + + /** + * Alternative Http Client + * + * @param alternativeHTTPModule + * @return this builder + */ + public Builder alternativeHTTPModule(CustomHttpModule alternativeHTTPModule) { + _alternativeHTTPModule = alternativeHTTPModule; + return this; + } + + /** + * Fallback Treatments + * + * @param fallbackTreatments + * @return this builder + */ + public Builder fallbackTreatments(FallbackTreatmentsConfiguration fallbackTreatments) { + _fallbackTreatments = fallbackTreatments; + return this; + } + + /** + * Thread Factory + * + * @param threadFactory + * @return this builder + */ + public Builder threadFactory(ThreadFactory threadFactory) { + _threadFactory = threadFactory; + return this; + } + + private void verifyRates() { if (_featuresRefreshRate < 5 ) { throw new IllegalArgumentException("featuresRefreshRate must be >= 5: " + _featuresRefreshRate); } @@ -810,35 +1064,20 @@ public SplitClientConfig build() { throw new IllegalArgumentException("segmentsRefreshRate must be >= 30: " + _segmentsRefreshRate); } - switch (_impressionsMode) { - case OPTIMIZED: - _impressionsRefreshRate = (_impressionsRefreshRate <= 0) ? 300 : Math.max(60, _impressionsRefreshRate); - break; - case DEBUG: - _impressionsRefreshRate = (_impressionsRefreshRate <= 0) ? 60 : _impressionsRefreshRate; - break; - } - - if (_eventFlushIntervalInMillis < 1000) { - throw new IllegalArgumentException("_eventFlushIntervalInMillis must be >= 1000: " + _eventFlushIntervalInMillis); + if (_eventSendIntervalInMillis < 1000) { + throw new IllegalArgumentException("_eventSendIntervalInMillis must be >= 1000: " + _eventSendIntervalInMillis); } if (_metricsRefreshRate < 30) { throw new IllegalArgumentException("metricsRefreshRate must be >= 30: " + _metricsRefreshRate); } - if (_impressionsQueueSize <=0 ) { - throw new IllegalArgumentException("impressionsQueueSize must be > 0: " + _impressionsQueueSize); - } - - if (_connectionTimeout <= 0) { - throw new IllegalArgumentException("connectionTimeOutInMs must be > 0: " + _connectionTimeout); - } - - if (_readTimeout <= 0) { - throw new IllegalArgumentException("readTimeout must be > 0: " + _readTimeout); + if(_telemetryRefreshRate < 60) { + throw new IllegalStateException("_telemetryRefreshRate must be >= 60"); } + } + private void verifyEndPoints() { if (_endpoint == null) { throw new IllegalArgumentException("endpoint must not be null"); } @@ -851,18 +1090,6 @@ public SplitClientConfig build() { throw new IllegalArgumentException("If endpoint is set, you must also set the events endpoint"); } - if (_numThreadsForSegmentFetch <= 0) { - throw new IllegalArgumentException("Number of threads for fetching segments MUST be greater than zero"); - } - - if (_authRetryBackoffBase <= 0) { - throw new IllegalArgumentException("authRetryBackoffBase: must be >= 1"); - } - - if (_streamingReconnectBackoffBase <= 0) { - throw new IllegalArgumentException("streamingReconnectBackoffBase: must be >= 1"); - } - if (_authServiceURL == null) { throw new IllegalArgumentException("authServiceURL must not be null"); } @@ -874,22 +1101,26 @@ public SplitClientConfig build() { if (_telemetryURl == null) { throw new IllegalArgumentException("telemetryURl must not be null"); } + } - if (_onDemandFetchRetryDelayMs <= 0) { - throw new IllegalStateException("streamingRetryDelay must be > 0"); + private void verifyAllModes() { + switch (_impressionsMode) { + case OPTIMIZED: + _impressionsRefreshRate = (_impressionsRefreshRate <= 0) ? 300 : Math.max(60, _impressionsRefreshRate); + break; + case DEBUG: + _impressionsRefreshRate = (_impressionsRefreshRate <= 0) ? 60 : _impressionsRefreshRate; + break; + case NONE: + break; } - if(_onDemandFetchMaxRetries <= 0) { - throw new IllegalStateException("_onDemandFetchMaxRetries must be > 0"); + if (_impressionsQueueSize <=0 ) { + throw new IllegalArgumentException("impressionsQueueSize must be > 0: " + _impressionsQueueSize); } - if(_storageMode == null) { _storageMode = StorageMode.MEMORY; } - - if(_telemetryRefreshRate < 60) { - throw new IllegalStateException("_telemetryRefreshRate must be >= 60"); - } if(OperationMode.CONSUMER.equals(_operationMode)){ if(_customStorageWrapper == null) { @@ -897,8 +1128,99 @@ public SplitClientConfig build() { } _storageMode = StorageMode.PLUGGABLE; } + } + + private void verifyNetworkParams() { + if (_connectionTimeout <= 0) { + throw new IllegalArgumentException("connectionTimeOutInMs must be > 0: " + _connectionTimeout); + } + + if (_readTimeout <= 0) { + throw new IllegalArgumentException("readTimeout must be > 0: " + _readTimeout); + } + if (_authRetryBackoffBase <= 0) { + throw new IllegalArgumentException("authRetryBackoffBase: must be >= 1"); + } + + if (_streamingReconnectBackoffBase <= 0) { + throw new IllegalArgumentException("streamingReconnectBackoffBase: must be >= 1"); + } + + if (_onDemandFetchRetryDelayMs <= 0) { + throw new IllegalStateException("streamingRetryDelay must be > 0"); + } + + if(_onDemandFetchMaxRetries <= 0) { + throw new IllegalStateException("_onDemandFetchMaxRetries must be > 0"); + } + } + + private void verifyAlternativeClient() { + if (_alternativeHTTPModule != null && _streamingEnabled) { + throw new IllegalArgumentException("Streaming feature is not supported with Alternative HTTP Client"); + } + } + + private void verifyProxy() { + if (_proxyConfiguration == null) + return; + + if (_proxyPort != -1) { + _log.warn("Both the deprecated proxy configuration methods (`proxyHost`, `proxyPort`, `proxyUsername`, or `proxyPassword`) " + + "and the new `ProxyConfiguration` builder are being used. `ProxyConfiguration` will take precedence."); + } + + if (!(_proxyConfiguration.getHost().getSchemeName().equals(HttpScheme.HTTP) || + _proxyConfiguration.getHost().getSchemeName().equals(HttpScheme.HTTPS))) { + throw new IllegalArgumentException("Proxy scheme must be either http or https."); + } + + if ((_proxyConfiguration.getP12File() != null && _proxyConfiguration.getPassKey() == null) || + (_proxyConfiguration.getP12File() == null && _proxyConfiguration.getPassKey() != null)) { + throw new IllegalArgumentException("Proxy mTLS must have p12 file path and name, and pass phrase."); + } + } + + private void verifyFallbackTreatments() { + if (_fallbackTreatments == null) + return; + + FallbackTreatment processedGlobalFallbackTreatment = _fallbackTreatments.getGlobalFallbackTreatment(); + Map processedByFlagFallbackTreatment = _fallbackTreatments.getByFlagFallbackTreatment(); + + if (_fallbackTreatments.getGlobalFallbackTreatment() != null) { + processedGlobalFallbackTreatment = new FallbackTreatment( + isValidTreatment(_fallbackTreatments.getGlobalFallbackTreatment().getTreatment(), "Fallback treatments"), + _fallbackTreatments.getGlobalFallbackTreatment().getConfig()); + } + + if (_fallbackTreatments.getByFlagFallbackTreatment() != null) { + processedByFlagFallbackTreatment = isValidByFlagTreatment(_fallbackTreatments.getByFlagFallbackTreatment(), "config"); + } + _fallbackTreatments = new FallbackTreatmentsConfiguration(processedGlobalFallbackTreatment, processedByFlagFallbackTreatment); + } + + public SplitClientConfig build() { + + verifyRates(); + + verifyAllModes(); + + verifyEndPoints(); + + verifyNetworkParams(); + + verifyAlternativeClient(); + + verifyProxy(); + + verifyFallbackTreatments(); + + if (_numThreadsForSegmentFetch <= 0) { + throw new IllegalArgumentException("Number of threads for fetching segments MUST be greater than zero"); + } - return new SplitClientConfig( + return new SplitClientConfig( _endpoint, _eventsEndpoint, _featuresRefreshRate, @@ -914,15 +1236,20 @@ public SplitClientConfig build() { _debugEnabled, _labelsEnabled, _ipAddressEnabled, + _localhostRefreshEnable, _waitBeforeShutdown, proxy(), _proxyUsername, _proxyPassword, + _proxyConfiguration, _eventsQueueSize, - _eventFlushIntervalInMillis, + _eventSendIntervalInMillis, _maxStringLength, _destroyOnShutDown, _splitFile, + _fileType, + _inputStream, + _segmentDirectory, _integrationsConfig, _streamingEnabled, _authRetryBackoffBase, @@ -934,11 +1261,21 @@ public SplitClientConfig build() { _onDemandFetchRetryDelayMs, _onDemandFetchMaxRetries, _failedAttemptsBeforeLogging, - _cdnDebugLogging, _operationMode, _validateAfterInactivityInMillis, + STARTING_SYNC_CALL_BACKOFF_BASE_MS, _customStorageWrapper, - _storageMode); + _storageMode, + _uniqueKeysRefreshRateInMemory, + _uniqueKeysRefreshRateRedis, + _filterUniqueKeysRefreshRate, + _lastSeenCacheSize, + _threadFactory, + _flagSetsFilter, + _invalidSetsCount, + _customHeaderDecorator, + _alternativeHTTPModule, + _fallbackTreatments); } } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/SplitClientImpl.java b/client/src/main/java/io/split/client/SplitClientImpl.java index 3a455be2c..38ab5e6ba 100644 --- a/client/src/main/java/io/split/client/SplitClientImpl.java +++ b/client/src/main/java/io/split/client/SplitClientImpl.java @@ -1,20 +1,22 @@ package io.split.client; +import com.google.gson.GsonBuilder; import io.split.client.api.Key; import io.split.client.api.SplitResult; -import io.split.client.dtos.Event; +import io.split.client.dtos.*; import io.split.client.events.EventsStorageProducer; import io.split.client.impressions.Impression; import io.split.client.impressions.ImpressionsManager; +import io.split.client.interceptors.FlagSetsFilter; import io.split.engine.SDKReadinessGates; import io.split.engine.evaluator.Evaluator; import io.split.engine.evaluator.EvaluatorImp; import io.split.engine.evaluator.Labels; -import io.split.grammar.Treatments; import io.split.inputValidation.EventsValidator; import io.split.inputValidation.KeyValidator; import io.split.inputValidation.SplitNameValidator; import io.split.inputValidation.TrafficTypeValidator; +import io.split.inputValidation.ImpressionPropertiesValidator; import io.split.storages.SplitCacheConsumer; import io.split.telemetry.domain.enums.MethodEnum; import io.split.telemetry.storage.TelemetryConfigProducer; @@ -23,16 +25,20 @@ import org.slf4j.LoggerFactory; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.google.common.base.Preconditions.checkNotNull; +import static io.split.inputValidation.FlagSetsValidator.cleanup; /** * A basic implementation of SplitClient. @@ -40,7 +46,9 @@ * @author adil */ public final class SplitClientImpl implements SplitClient { - public static final SplitResult SPLIT_RESULT_CONTROL = new SplitResult(Treatments.CONTROL, null); + private static final String CLIENT_DESTROY = "Client has already been destroyed - no calls possible"; + private static final String CATCHALL_EXCEPTION = "CatchAll Exception"; + private static final String MATCHING_KEY = "matchingKey"; private static final Logger _log = LoggerFactory.getLogger(SplitClientImpl.class); @@ -53,6 +61,8 @@ public final class SplitClientImpl implements SplitClient { private final Evaluator _evaluator; private final TelemetryEvaluationProducer _telemetryEvaluationProducer; private final TelemetryConfigProducer _telemetryConfigProducer; + private final FlagSetsFilter _flagSetsFilter; + private final FallbackTreatmentCalculator _fallbackTreatmentCalculator; public SplitClientImpl(SplitFactory container, SplitCacheConsumer splitCacheConsumer, @@ -62,7 +72,9 @@ public SplitClientImpl(SplitFactory container, SDKReadinessGates gates, Evaluator evaluator, TelemetryEvaluationProducer telemetryEvaluationProducer, - TelemetryConfigProducer telemetryConfigProducer) { + TelemetryConfigProducer telemetryConfigProducer, + FlagSetsFilter flagSetsFilter, + FallbackTreatmentCalculator fallbackTreatmentCalculator) { _container = container; _splitCacheConsumer = checkNotNull(splitCacheConsumer); _impressionManager = checkNotNull(impressionManager); @@ -72,70 +84,321 @@ public SplitClientImpl(SplitFactory container, _evaluator = checkNotNull(evaluator); _telemetryEvaluationProducer = checkNotNull(telemetryEvaluationProducer); _telemetryConfigProducer = checkNotNull(telemetryConfigProducer); + _flagSetsFilter = flagSetsFilter; + _fallbackTreatmentCalculator = fallbackTreatmentCalculator; } @Override - public String getTreatment(String key, String split) { - return getTreatment(key, split, Collections.emptyMap()); + public String getTreatment(String key, String featureFlagName) { + return getTreatment(key, featureFlagName, Collections.emptyMap()); } @Override - public String getTreatment(String key, String split, Map attributes) { - return getTreatmentWithConfigInternal(key, null, split, attributes, MethodEnum.TREATMENT).treatment(); + public String getTreatment(String key, String featureFlagName, Map attributes) { + return getTreatmentWithConfigInternal(key, null, featureFlagName, attributes, new EvaluationOptions(null), MethodEnum.TREATMENT).treatment(); } @Override - public String getTreatment(Key key, String split, Map attributes) { - return getTreatmentWithConfigInternal(key.matchingKey(), key.bucketingKey(), split, attributes, MethodEnum.TREATMENT).treatment(); + public String getTreatment(Key key, String featureFlagName, Map attributes) { + return getTreatmentWithConfigInternal(key.matchingKey(), key.bucketingKey(), featureFlagName, attributes, new EvaluationOptions(null), + MethodEnum.TREATMENT).treatment(); } @Override - public SplitResult getTreatmentWithConfig(String key, String split) { - return getTreatmentWithConfigInternal(key, null, split, Collections.emptyMap(), MethodEnum.TREATMENT_WITH_CONFIG); + public SplitResult getTreatmentWithConfig(String key, String featureFlagName) { + return getTreatmentWithConfigInternal(key, null, featureFlagName, Collections.emptyMap(), new EvaluationOptions(null), + MethodEnum.TREATMENT_WITH_CONFIG); } @Override - public SplitResult getTreatmentWithConfig(String key, String split, Map attributes) { - return getTreatmentWithConfigInternal(key, null, split, attributes, MethodEnum.TREATMENT_WITH_CONFIG); + public SplitResult getTreatmentWithConfig(String key, String featureFlagName, Map attributes) { + return getTreatmentWithConfigInternal(key, null, featureFlagName, attributes, new EvaluationOptions(null), MethodEnum.TREATMENT_WITH_CONFIG); } @Override - public SplitResult getTreatmentWithConfig(Key key, String split, Map attributes) { - return getTreatmentWithConfigInternal(key.matchingKey(), key.bucketingKey(), split, attributes, MethodEnum.TREATMENT_WITH_CONFIG); + public SplitResult getTreatmentWithConfig(Key key, String featureFlagName, Map attributes) { + return getTreatmentWithConfigInternal(key.matchingKey(), key.bucketingKey(), featureFlagName, attributes, new EvaluationOptions(null), + MethodEnum.TREATMENT_WITH_CONFIG); } @Override - public Map getTreatments(String key, List splits) { - return getTreatments(key, splits, Collections.emptyMap()); + public Map getTreatments(String key, List featureFlagNames) { + return getTreatments(key, featureFlagNames, Collections.emptyMap()); } @Override - public Map getTreatments(String key, List splits, Map attributes) { - return getTreatmentsWithConfigInternal(key, null, splits, attributes, MethodEnum.TREATMENTS) + public Map getTreatments(String key, List featureFlagNames, Map attributes) { + return getTreatmentsWithConfigInternal(key, null, featureFlagNames, attributes, new EvaluationOptions(null), MethodEnum.TREATMENTS) .entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); } @Override - public Map getTreatments(Key key, List splits, Map attributes) { - return getTreatmentsWithConfigInternal(key.matchingKey(), key.bucketingKey(), splits, attributes, MethodEnum.TREATMENTS) + public Map getTreatments(Key key, List featureFlagNames, Map attributes) { + return getTreatmentsWithConfigInternal(key.matchingKey(), key.bucketingKey(), featureFlagNames, attributes, + new EvaluationOptions(null), MethodEnum.TREATMENTS) .entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); } @Override - public Map getTreatmentsWithConfig(String key, List splits) { - return getTreatmentsWithConfigInternal(key, null, splits, Collections.emptyMap(), MethodEnum.TREATMENTS_WITH_CONFIG); + public Map getTreatmentsWithConfig(String key, List featureFlagNames) { + return getTreatmentsWithConfigInternal(key, null, featureFlagNames, Collections.emptyMap(), new EvaluationOptions(null), + MethodEnum.TREATMENTS_WITH_CONFIG); } @Override - public Map getTreatmentsWithConfig(String key, List splits, Map attributes) { - return getTreatmentsWithConfigInternal(key, null, splits, attributes, MethodEnum.TREATMENTS_WITH_CONFIG); + public Map getTreatmentsWithConfig(String key, List featureFlagNames, Map attributes) { + return getTreatmentsWithConfigInternal(key, null, featureFlagNames, attributes, + new EvaluationOptions(null), MethodEnum.TREATMENTS_WITH_CONFIG); } @Override - public Map getTreatmentsWithConfig(Key key, List splits, Map attributes) { - return getTreatmentsWithConfigInternal(key.matchingKey(), key.bucketingKey(), splits, attributes, MethodEnum.TREATMENTS_WITH_CONFIG); + public Map getTreatmentsWithConfig(Key key, List featureFlagNames, Map attributes) { + return getTreatmentsWithConfigInternal(key.matchingKey(), key.bucketingKey(), featureFlagNames, attributes, new EvaluationOptions(null), + MethodEnum.TREATMENTS_WITH_CONFIG); + } + + @Override + public Map getTreatmentsByFlagSet(String key, String flagSet) { + return getTreatmentsBySetsWithConfigInternal(key, null, new ArrayList<>(Arrays.asList(flagSet)), + null, new EvaluationOptions(null), MethodEnum.TREATMENTS_BY_FLAG_SET).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsByFlagSet(String key, String flagSet, Map attributes) { + return getTreatmentsBySetsWithConfigInternal(key, null, new ArrayList<>(Arrays.asList(flagSet)), + attributes, new EvaluationOptions(null), MethodEnum.TREATMENTS_BY_FLAG_SET).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsByFlagSet(Key key, String flagSet, Map attributes) { + return getTreatmentsBySetsWithConfigInternal(key.matchingKey(), key.bucketingKey(), new ArrayList<>(Arrays.asList(flagSet)), + attributes, new EvaluationOptions(null), MethodEnum.TREATMENTS_BY_FLAG_SET).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsByFlagSets(String key, List flagSets) { + return getTreatmentsBySetsWithConfigInternal(key, null, flagSets, + null, new EvaluationOptions(null), MethodEnum.TREATMENTS_BY_FLAG_SETS).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsByFlagSets(String key, List flagSets, Map attributes) { + return getTreatmentsBySetsWithConfigInternal(key, null, flagSets, + attributes, new EvaluationOptions(null), MethodEnum.TREATMENTS_BY_FLAG_SETS).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsByFlagSets(Key key, List flagSets, Map attributes) { + return getTreatmentsBySetsWithConfigInternal(key.matchingKey(), key.bucketingKey(), flagSets, + attributes, new EvaluationOptions(null), MethodEnum.TREATMENTS_BY_FLAG_SETS).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(String key, String flagSet) { + return getTreatmentsBySetsWithConfigInternal(key, null, new ArrayList<>(Arrays.asList(flagSet)), + null, new EvaluationOptions(null), MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(String key, String flagSet, Map attributes) { + return getTreatmentsBySetsWithConfigInternal(key, null, new ArrayList<>(Arrays.asList(flagSet)), + attributes, new EvaluationOptions(null), MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(Key key, String flagSet, Map attributes) { + return getTreatmentsBySetsWithConfigInternal(key.matchingKey(), key.bucketingKey(), new ArrayList<>(Arrays.asList(flagSet)), + attributes, new EvaluationOptions(null), MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(String key, List flagSets) { + return getTreatmentsBySetsWithConfigInternal(key, null, flagSets, + null, new EvaluationOptions(null), MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(String key, List flagSets, Map attributes) { + return getTreatmentsBySetsWithConfigInternal(key, null, flagSets, + attributes, new EvaluationOptions(null), MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(Key key, List flagSets, Map attributes) { + return getTreatmentsBySetsWithConfigInternal(key.matchingKey(), key.bucketingKey(), flagSets, + attributes, new EvaluationOptions(null), MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS); + } + + @Override + public String getTreatment(String key, String featureFlagName, EvaluationOptions evaluationOptions) { + return getTreatment(key, featureFlagName, Collections.emptyMap(), evaluationOptions); + } + + @Override + public String getTreatment(String key, String featureFlagName, Map attributes, EvaluationOptions evaluationOptions) { + return getTreatmentWithConfigInternal(key, null, featureFlagName, attributes, evaluationOptions, MethodEnum.TREATMENT).treatment(); + } + + @Override + public String getTreatment(Key key, String featureFlagName, Map attributes, EvaluationOptions evaluationOptions) { + return getTreatmentWithConfigInternal(key.matchingKey(), key.bucketingKey(), featureFlagName, attributes, evaluationOptions, + MethodEnum.TREATMENT).treatment(); + } + + @Override + public Map getTreatments(String key, List featureFlagNames, + EvaluationOptions evaluationOptions) { + return getTreatments(key, featureFlagNames, Collections.emptyMap(), evaluationOptions); + } + + @Override + public Map getTreatments(String key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsWithConfigInternal(key, null, featureFlagNames, attributes, evaluationOptions, MethodEnum.TREATMENTS) + .entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatments(Key key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsWithConfigInternal(key.matchingKey(), key.bucketingKey(), featureFlagNames, attributes, evaluationOptions, + MethodEnum.TREATMENTS) + .entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public SplitResult getTreatmentWithConfig(String key, String featureFlagName, EvaluationOptions evaluationOptions) { + return getTreatmentWithConfigInternal(key, null, featureFlagName, Collections.emptyMap(), evaluationOptions, + MethodEnum.TREATMENT_WITH_CONFIG); + } + + @Override + public SplitResult getTreatmentWithConfig(Key key, String featureFlagName, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentWithConfigInternal(key.matchingKey(), key.bucketingKey(), featureFlagName, attributes, evaluationOptions, + MethodEnum.TREATMENT_WITH_CONFIG); + } + + @Override + public SplitResult getTreatmentWithConfig(String key, String featureFlagName, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentWithConfigInternal(key, null, featureFlagName, attributes, evaluationOptions, + MethodEnum.TREATMENT_WITH_CONFIG); + } + + @Override + public Map getTreatmentsWithConfig(String key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsWithConfigInternal(key, null, featureFlagNames, attributes, evaluationOptions, + MethodEnum.TREATMENTS_WITH_CONFIG); + } + + @Override + public Map getTreatmentsWithConfig(String key, List featureFlagNames, EvaluationOptions evaluationOptions) { + return getTreatmentsWithConfigInternal(key, null, featureFlagNames, null, evaluationOptions, + MethodEnum.TREATMENTS_WITH_CONFIG); + } + + @Override + public Map getTreatmentsByFlagSet(String key, String flagSet, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key, null, new ArrayList<>(Arrays.asList(flagSet)), + attributes, evaluationOptions, MethodEnum.TREATMENTS_BY_FLAG_SET).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsByFlagSets(String key, List flagSets, EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key, null, flagSets, + null, evaluationOptions, MethodEnum.TREATMENTS_BY_FLAG_SETS).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsByFlagSets(String key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key, null, flagSets, + attributes, evaluationOptions, MethodEnum.TREATMENTS_BY_FLAG_SETS).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(String key, String flagSet, EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key, null, new ArrayList<>(Arrays.asList(flagSet)), + null, evaluationOptions, MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(String key, String flagSet, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key, null, new ArrayList<>(Arrays.asList(flagSet)), + attributes, evaluationOptions, MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(String key, List flagSets, EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key, null, flagSets, + null, evaluationOptions, MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(String key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key, null, flagSets, + attributes, evaluationOptions, MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS); + } + + @Override + public Map getTreatmentsByFlagSet(String key, String flagSet, EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key, null, new ArrayList<>(Arrays.asList(flagSet)), + null, evaluationOptions, MethodEnum.TREATMENTS_BY_FLAG_SET).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsWithConfig(Key key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsWithConfigInternal(key.matchingKey(), key.bucketingKey(), featureFlagNames, attributes, evaluationOptions, + MethodEnum.TREATMENTS_WITH_CONFIG); + } + + @Override + public Map getTreatmentsByFlagSet(Key key, String flagSet, Map attributes, EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key.matchingKey(), key.bucketingKey(), new ArrayList<>(Arrays.asList(flagSet)), + attributes, evaluationOptions, MethodEnum.TREATMENTS_BY_FLAG_SET).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsByFlagSets(Key key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key.matchingKey(), key.bucketingKey(), flagSets, + attributes, evaluationOptions, MethodEnum.TREATMENTS_BY_FLAG_SETS).entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().treatment())); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(Key key, String flagSet, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key.matchingKey(), key.bucketingKey(), new ArrayList<>(Arrays.asList(flagSet)), + attributes, evaluationOptions, MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(Key key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions) { + return getTreatmentsBySetsWithConfigInternal(key.matchingKey(), key.bucketingKey(), flagSets, + attributes, evaluationOptions, MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS); } @Override @@ -174,7 +437,7 @@ public void blockUntilReady() throws TimeoutException, InterruptedException { throw new IllegalArgumentException("setBlockUntilReadyTimeout must be positive but in config was: " + _config.blockUntilReady()); } if (!_gates.waitUntilInternalReady(_config.blockUntilReady())) { - throw new TimeoutException("SDK was not ready in " + _config.blockUntilReady()+ " milliseconds"); + throw new TimeoutException("SDK was not ready in " + _config.blockUntilReady() + " milliseconds"); } _log.debug(String.format("Split SDK ready in %d ms", (System.currentTimeMillis() - startTime))); } @@ -187,7 +450,7 @@ public void destroy() { private boolean track(Event event) { long initTime = System.currentTimeMillis(); if (_container.isDestroyed()) { - _log.error("Client has already been destroyed - no calls possible"); + _log.error(CLIENT_DESTROY); return false; } @@ -220,127 +483,237 @@ private boolean track(Event event) { return _eventsStorageProducer.track(event, propertiesResult.getEventSize()); } - private SplitResult getTreatmentWithConfigInternal(String matchingKey, String bucketingKey, String split, Map attributes, MethodEnum methodEnum) { + private SplitResult getTreatmentWithConfigInternal(String matchingKey, String bucketingKey, String featureFlag, Map attributes, EvaluationOptions evaluationOptions, MethodEnum methodEnum) { long initTime = System.currentTimeMillis(); try { - checkSDKReady(methodEnum); + checkSDKReady(methodEnum, Arrays.asList(featureFlag)); if (_container.isDestroyed()) { - _log.error("Client has already been destroyed - no calls possible"); - return SPLIT_RESULT_CONTROL; + _log.error(CLIENT_DESTROY); + return checkFallbackTreatment(featureFlag); } - if (!KeyValidator.isValid(matchingKey, "matchingKey", _config.maxStringLength(), methodEnum.getMethod())) { - return SPLIT_RESULT_CONTROL; + if (!KeyValidator.isValid(matchingKey, MATCHING_KEY, _config.maxStringLength(), methodEnum.getMethod())) { + return checkFallbackTreatment(featureFlag); } if (!KeyValidator.bucketingKeyIsValid(bucketingKey, _config.maxStringLength(), methodEnum.getMethod())) { - return SPLIT_RESULT_CONTROL; + return checkFallbackTreatment(featureFlag); } - Optional splitNameResult = SplitNameValidator.isValid(split, methodEnum.getMethod()); + Optional splitNameResult = SplitNameValidator.isValid(featureFlag, methodEnum.getMethod()); if (!splitNameResult.isPresent()) { - return SPLIT_RESULT_CONTROL; + return checkFallbackTreatment(featureFlag); } - split = splitNameResult.get(); - + featureFlag = splitNameResult.get(); long start = System.currentTimeMillis(); - EvaluatorImp.TreatmentLabelAndChangeNumber result = _evaluator.evaluateFeature(matchingKey, bucketingKey, split, attributes); - - if (result.treatment.equals(Treatments.CONTROL) && result.label.equals(Labels.DEFINITION_NOT_FOUND) && _gates.isSDKReady()) { - _log.warn( - "%s: you passed \"" + split + "\" that does not exist in this environment, " + - "please double check what Splits exist in the web console.", methodEnum.getMethod()); - return SPLIT_RESULT_CONTROL; + EvaluatorImp.TreatmentLabelAndChangeNumber result = _evaluator.evaluateFeature(matchingKey, bucketingKey, featureFlag, attributes); + String label = result.label; + if (result.label != null && result.label.contains(Labels.DEFINITION_NOT_FOUND)) { + if (_gates.isSDKReady()) { + _log.warn(String.format( + "%s: you passed \"%s\" that does not exist in this environment, " + + "please double check what feature flags exist in the Split user interface.", methodEnum.getMethod(), featureFlag)); + return checkFallbackTreatment(featureFlag); + } + label = result.label.replace(Labels.DEFINITION_NOT_FOUND, Labels.NOT_READY); } recordStats( matchingKey, bucketingKey, - split, + featureFlag, start, result.treatment, String.format("sdk.%s", methodEnum.getMethod()), - _config.labelsEnabled() ? result.label : null, + _config.labelsEnabled() ? label : null, result.changeNumber, - attributes + attributes, + result.track, + validateProperties(evaluationOptions.getProperties()) ); - _telemetryEvaluationProducer.recordLatency(methodEnum, System.currentTimeMillis()-initTime); + _telemetryEvaluationProducer.recordLatency(methodEnum, System.currentTimeMillis() - initTime); return new SplitResult(result.treatment, result.configurations); } catch (Exception e) { try { _telemetryEvaluationProducer.recordException(methodEnum); - _log.error("CatchAll Exception", e); + _log.error(CATCHALL_EXCEPTION, e); } catch (Exception e1) { // ignore } - return SPLIT_RESULT_CONTROL; + return checkFallbackTreatment(featureFlag); } } - private Map getTreatmentsWithConfigInternal(String matchingKey, String bucketingKey, List splits, Map attributes, MethodEnum methodEnum) { + private SplitResult checkFallbackTreatment(String featureName) { + FallbackTreatment fallbackTreatment = _fallbackTreatmentCalculator.resolve(featureName, ""); + String config = null; + if (fallbackTreatment.getConfig() != null) { + config = fallbackTreatment.getConfig(); + } + return new SplitResult(fallbackTreatment.getTreatment(), config); + } + + private String validateProperties(Map properties) { + if (properties == null){ + return null; + } + + ImpressionPropertiesValidator.ImpressionPropertiesValidatorResult iPValidatorResult = ImpressionPropertiesValidator.propertiesAreValid( + properties); + return new GsonBuilder().create().toJson(iPValidatorResult.getValue()); + } + + private Map getTreatmentsWithConfigInternal(String matchingKey, String bucketingKey, List featureFlagNames, + Map attributes, + EvaluationOptions evaluationOptions, MethodEnum methodEnum) { long initTime = System.currentTimeMillis(); - if(splits == null) { - _log.error("%s: split_names must be a non-empty array", methodEnum.getMethod()); + if (featureFlagNames == null) { + _log.error(String.format("%s: featureFlagNames must be a non-empty array", methodEnum.getMethod())); return new HashMap<>(); } - try{ - checkSDKReady(methodEnum); - if (_container.isDestroyed()) { - _log.error("Client has already been destroyed - no calls possible"); - return createMapControl(splits); - } - if (!KeyValidator.isValid(matchingKey, "matchingKey", _config.maxStringLength(), methodEnum.getMethod())) { - return createMapControl(splits); - } - - if (!KeyValidator.bucketingKeyIsValid(bucketingKey, _config.maxStringLength(), methodEnum.getMethod())) { - return createMapControl(splits); - } - else if(splits.isEmpty()) { - _log.error("%s: split_names must be a non-empty array", methodEnum.getMethod()); - return new HashMap<>(); + try { + checkSDKReady(methodEnum, featureFlagNames); + Map result = validateBeforeEvaluate(featureFlagNames, matchingKey, methodEnum, bucketingKey); + if(result != null) { + return result; } - splits = SplitNameValidator.areValid(splits, methodEnum.getMethod()); - Map evaluatorResult = _evaluator.evaluateFeatures(matchingKey, bucketingKey, splits, attributes); - List impressions = new ArrayList<>(); - Map result = new HashMap<>(); - evaluatorResult.keySet().forEach(t -> { - if (evaluatorResult.get(t).treatment.equals(Treatments.CONTROL) && evaluatorResult.get(t).label.equals(Labels.DEFINITION_NOT_FOUND) && _gates.isSDKReady()) { - _log.warn( - "%s: you passed \"" + t + "\" that does not exist in this environment, " + - "please double check what Splits exist in the web console.", methodEnum.getMethod()); - result.put(t, SPLIT_RESULT_CONTROL); - } - else { - result.put(t,new SplitResult(evaluatorResult.get(t).treatment, evaluatorResult.get(t).configurations)); - impressions.add(new Impression(matchingKey, bucketingKey, t, evaluatorResult.get(t).treatment, System.currentTimeMillis(), evaluatorResult.get(t).label, evaluatorResult.get(t).changeNumber, attributes)); - } - }); + featureFlagNames = SplitNameValidator.areValid(featureFlagNames, methodEnum.getMethod()); + Map evaluatorResult = _evaluator.evaluateFeatures(matchingKey, + bucketingKey, featureFlagNames, attributes); - _telemetryEvaluationProducer.recordLatency(methodEnum, System.currentTimeMillis()-initTime); - //Track of impressions - if(impressions.size() > 0) { - _impressionManager.track(impressions); - } - return result; + return processEvaluatorResult(evaluatorResult, methodEnum, matchingKey, bucketingKey, attributes, initTime, + validateProperties(evaluationOptions.getProperties())); } catch (Exception e) { try { _telemetryEvaluationProducer.recordException(methodEnum); - _log.error("CatchAll Exception", e); + _log.error(CATCHALL_EXCEPTION, e); } catch (Exception e1) { // ignore } - return createMapControl(splits); + return createMapControl(featureFlagNames); + } + } + + private Map getTreatmentsBySetsWithConfigInternal(String matchingKey, String bucketingKey, + List sets, Map attributes, + EvaluationOptions evaluationOptions, + MethodEnum methodEnum) { + + long initTime = System.currentTimeMillis(); + if (sets == null || sets.isEmpty()) { + _log.warn(String.format("%s: sets must be a non-empty array", methodEnum.getMethod())); + return new HashMap<>(); + } + Set cleanFlagSets = cleanup(sets); + cleanFlagSets = filterSetsAreInConfig(cleanFlagSets, methodEnum); + if (cleanFlagSets.isEmpty()) { + return new HashMap<>(); + } + checkSDKReady(methodEnum); + Map result = validateBeforeEvaluateByFlagSets(matchingKey, methodEnum,bucketingKey); + if(result != null) { + return result; + } + Map evaluatorResult = _evaluator.evaluateFeaturesByFlagSets(matchingKey, + bucketingKey, new ArrayList<>(cleanFlagSets), attributes); + + evaluatorResult.entrySet().forEach(flag -> { + if (flag.getValue().label != null && + flag.getValue().label.contains(io.split.engine.evaluator.Labels.EXCEPTION)) { + _telemetryEvaluationProducer.recordException(methodEnum); + } + }); + return processEvaluatorResult(evaluatorResult, methodEnum, matchingKey, bucketingKey, attributes, initTime, + validateProperties(evaluationOptions.getProperties())); + } + + private Map processEvaluatorResult(Map evaluatorResult, + MethodEnum methodEnum, String matchingKey, String bucketingKey, Map attributes, long initTime, String properties){ + List decoratedImpressions = new ArrayList<>(); + Map result = new HashMap<>(); + evaluatorResult.keySet().forEach(flag -> { + String label = evaluatorResult.get(flag).label; + if (evaluatorResult.get(flag).label != null && + evaluatorResult.get(flag).label.contains(Labels.DEFINITION_NOT_FOUND)) { + if (_gates.isSDKReady()) { + _log.warn(String.format("%s: you passed \"%s\" that does not exist in this environment please double check " + + "what feature flags exist in the Split user interface.", methodEnum.getMethod(), flag)); + result.put(flag, checkFallbackTreatment(flag)); + return; + } + label = evaluatorResult.get(flag).label.replace(Labels.DEFINITION_NOT_FOUND, Labels.NOT_READY); + } + result.put(flag, new SplitResult(evaluatorResult.get(flag).treatment, evaluatorResult.get(flag).configurations)); + decoratedImpressions.add( + new DecoratedImpression( + new Impression(matchingKey, bucketingKey, flag, evaluatorResult.get(flag).treatment, System.currentTimeMillis(), + label, evaluatorResult.get(flag).changeNumber, attributes, properties), + evaluatorResult.get(flag).track)); + }); + _telemetryEvaluationProducer.recordLatency(methodEnum, System.currentTimeMillis() - initTime); + if (!decoratedImpressions.isEmpty()) { + _impressionManager.track(decoratedImpressions); } + return result; + } + + private Map validateBeforeEvaluateByFlagSets(String matchingKey, MethodEnum methodEnum, + String bucketingKey) { + if (_container.isDestroyed()) { + _log.error(CLIENT_DESTROY); + return new HashMap<>(); + } + if (!KeyValidator.isValid(matchingKey, MATCHING_KEY, _config.maxStringLength(), methodEnum.getMethod())) { + return new HashMap<>(); + } + if (!KeyValidator.bucketingKeyIsValid(bucketingKey, _config.maxStringLength(), methodEnum.getMethod())) { + return new HashMap<>(); + } + return null; } - private void recordStats(String matchingKey, String bucketingKey, String split, long start, String result, - String operation, String label, Long changeNumber, Map attributes) { + private Map validateBeforeEvaluate(List featureFlagNames, String matchingKey, MethodEnum methodEnum, + String bucketingKey) { + if (_container.isDestroyed()) { + _log.error(CLIENT_DESTROY); + return createMapControl(featureFlagNames); + } + if (!KeyValidator.isValid(matchingKey, MATCHING_KEY, _config.maxStringLength(), methodEnum.getMethod())) { + return createMapControl(featureFlagNames); + } + if (!KeyValidator.bucketingKeyIsValid(bucketingKey, _config.maxStringLength(), methodEnum.getMethod())) { + return createMapControl(featureFlagNames); + } else if (featureFlagNames.isEmpty()) { + _log.error(String.format("%s: featureFlagNames must be a non-empty array", methodEnum.getMethod())); + return new HashMap<>(); + } + return null; + } + private Set filterSetsAreInConfig(Set sets, MethodEnum methodEnum) { + Set setsToReturn = new HashSet<>(); + for (String set : sets) { + if (!_flagSetsFilter.intersect(set)) { + _log.warn(String.format("%s: you passed %s which is not part of the configured FlagSetsFilter, " + + "ignoring Flag Set.", methodEnum, set)); + continue; + } + setsToReturn.add(set); + } + return setsToReturn; + } + private void recordStats(String matchingKey, String bucketingKey, String featureFlagName, long start, String result, + String operation, String label, Long changeNumber, Map attributes, boolean track, String properties) { try { - _impressionManager.track(Stream.of(new Impression(matchingKey, bucketingKey, split, result, System.currentTimeMillis(), label, changeNumber, attributes)).collect(Collectors.toList())); + _impressionManager.track(Stream.of( + new DecoratedImpression( + new Impression(matchingKey, bucketingKey, featureFlagName, result, System.currentTimeMillis(), + label, changeNumber, attributes, properties), + track)).collect(Collectors.toList())); } catch (Throwable t) { _log.error("Exception", t); } @@ -355,16 +728,26 @@ private Event createEvent(String key, String trafficType, String eventType) { return event; } + private void checkSDKReady(MethodEnum methodEnum, List featureFlagNames) { + String toPrint = featureFlagNames.size() == 1 ? featureFlagNames.get(0): String.join(",", featureFlagNames); + if (!_gates.isSDKReady()) { + _log.warn(String.format("%s: the SDK is not ready, results may be incorrect for feature flag %s. Make sure to wait for " + + "SDK readiness before using this method", methodEnum.getMethod(), toPrint)); + _telemetryConfigProducer.recordNonReadyUsage(); + } + } + private void checkSDKReady(MethodEnum methodEnum) { - if(!_gates.isSDKReady()){ - _log.warn(methodEnum.getMethod() + ": the SDK is not ready, results may be incorrect. Make sure to wait for SDK readiness before using this method"); + if (!_gates.isSDKReady()) { + _log.warn(String.format("%s: the SDK is not ready, results may be incorrect. Make sure to wait for " + + "SDK readiness before using this method", methodEnum.getMethod())); _telemetryConfigProducer.recordNonReadyUsage(); } } - private Map createMapControl(List splits) { + private Map createMapControl(List featureFlags) { Map result = new HashMap<>(); - splits.forEach(s -> result.put(s, SPLIT_RESULT_CONTROL)); + featureFlags.forEach(s -> result.put(s, checkFallbackTreatment(s))); return result; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/SplitFactoryBuilder.java b/client/src/main/java/io/split/client/SplitFactoryBuilder.java index dfaba9b69..c2271ec4f 100644 --- a/client/src/main/java/io/split/client/SplitFactoryBuilder.java +++ b/client/src/main/java/io/split/client/SplitFactoryBuilder.java @@ -16,6 +16,8 @@ */ public class SplitFactoryBuilder { private static final Logger _log = LoggerFactory.getLogger(SplitFactoryBuilder.class); + static final String LOCALHOST = "localhost"; + /** * Instantiates a SplitFactory with default config @@ -38,8 +40,8 @@ public static SplitFactory build(String apiToken) throws IOException, URISyntaxE */ public static synchronized SplitFactory build(String apiToken, SplitClientConfig config) throws IOException, URISyntaxException { ApiKeyValidator.validate(apiToken); - if (LocalhostSplitFactory.LOCALHOST.equals(apiToken)) { - return LocalhostSplitFactory.createLocalhostSplitFactory(config); + if (LOCALHOST.equals(apiToken)) { + return new SplitFactoryImpl(config); } if (StorageMode.PLUGGABLE.equals(config.storageMode()) || StorageMode.REDIS.equals(config.storageMode())){ return new SplitFactoryImpl(apiToken, config, config.customStorageWrapper()); @@ -47,25 +49,6 @@ public static synchronized SplitFactory build(String apiToken, SplitClientConfig return new SplitFactoryImpl(apiToken, config); } - /** - * Instantiates a local Off-The-Grid SplitFactory - * - * @throws IOException if there were problems reading the override file from disk. - */ - public static SplitFactory local() throws IOException, URISyntaxException { - return LocalhostSplitFactory.createLocalhostSplitFactory(SplitClientConfig.builder().build()); - } - - /** - * Instantiates a local Off-The-Grid SplitFactory - * - * @return config Split config file - * @throws IOException if there were problems reading the override file from disk. - */ - public static SplitFactory local(SplitClientConfig config) throws IOException, URISyntaxException { - return LocalhostSplitFactory.createLocalhostSplitFactory(config); - } - public static void main(String... args) throws IOException, URISyntaxException { if (args.length != 1) { System.out.println("Usage: "); @@ -98,4 +81,4 @@ public static void main(String... args) throws IOException, URISyntaxException { _log.error(io.getMessage(), io); } } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/SplitFactoryImpl.java b/client/src/main/java/io/split/client/SplitFactoryImpl.java index a80ee88db..9ad38ef1b 100644 --- a/client/src/main/java/io/split/client/SplitFactoryImpl.java +++ b/client/src/main/java/io/split/client/SplitFactoryImpl.java @@ -1,26 +1,54 @@ package io.split.client; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.google.common.io.Files; +import io.split.client.dtos.BearerCredentialsProvider; +import io.split.client.dtos.FallbackTreatmentCalculatorImp; import io.split.client.dtos.Metadata; +import io.split.client.events.EventsSender; import io.split.client.events.EventsStorage; import io.split.client.events.EventsTask; import io.split.client.events.InMemoryEventsStorage; +import io.split.client.events.NoopEventsStorageImp; import io.split.client.impressions.AsynchronousImpressionListener; +import io.split.client.impressions.HttpImpressionsSender; +import io.split.client.impressions.ImpressionCounter; import io.split.client.impressions.ImpressionListener; +import io.split.client.impressions.ImpressionObserver; +import io.split.client.impressions.ImpressionsManager; import io.split.client.impressions.ImpressionsManagerImpl; +import io.split.client.impressions.ImpressionsSender; import io.split.client.impressions.ImpressionsStorage; import io.split.client.impressions.ImpressionsStorageConsumer; import io.split.client.impressions.ImpressionsStorageProducer; import io.split.client.impressions.InMemoryImpressionsStorage; -import io.split.client.interceptors.AuthorizationInterceptorFilter; +import io.split.client.impressions.PluggableImpressionSender; +import io.split.client.impressions.UniqueKeysTracker; +import io.split.client.impressions.UniqueKeysTrackerImp; +import io.split.client.impressions.strategy.ProcessImpressionDebug; +import io.split.client.impressions.strategy.ProcessImpressionNone; +import io.split.client.impressions.strategy.ProcessImpressionOptimized; +import io.split.client.impressions.strategy.ProcessImpressionStrategy; import io.split.client.interceptors.ClientKeyInterceptorFilter; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; import io.split.client.interceptors.GzipDecoderResponseInterceptor; import io.split.client.interceptors.GzipEncoderRequestInterceptor; import io.split.client.interceptors.SdkMetadataInterceptorFilter; +import io.split.client.utils.FileInputStreamProvider; +import io.split.client.utils.FileTypeEnum; +import io.split.client.utils.InputStreamProvider; import io.split.client.utils.SDKMetadata; +import io.split.client.utils.StaticContentInputStreamProvider; import io.split.engine.SDKReadinessGates; +import io.split.engine.common.ConsumerSyncManager; +import io.split.engine.common.ConsumerSynchronizer; +import io.split.engine.common.LocalhostSyncManager; +import io.split.engine.common.LocalhostSynchronizer; +import io.split.engine.common.SplitAPI; +import io.split.engine.common.SplitTasks; import io.split.engine.common.SyncManager; import io.split.engine.common.SyncManagerImp; +import io.split.engine.common.Synchronizer; import io.split.engine.evaluator.Evaluator; import io.split.engine.evaluator.EvaluatorImp; import io.split.engine.experiments.SplitChangeFetcher; @@ -28,32 +56,43 @@ import io.split.engine.experiments.SplitFetcherImp; import io.split.engine.experiments.SplitParser; import io.split.engine.experiments.SplitSynchronizationTask; +import io.split.engine.experiments.RuleBasedSegmentParser; import io.split.engine.segments.SegmentChangeFetcher; import io.split.engine.segments.SegmentSynchronizationTaskImp; import io.split.integrations.IntegrationsConfig; +import io.split.service.SplitHttpClientImpl; +import io.split.service.SplitHttpClient; + import io.split.storages.SegmentCache; import io.split.storages.SegmentCacheConsumer; import io.split.storages.SegmentCacheProducer; import io.split.storages.SplitCache; import io.split.storages.SplitCacheConsumer; import io.split.storages.SplitCacheProducer; +import io.split.storages.RuleBasedSegmentCache; +import io.split.storages.RuleBasedSegmentCacheProducer; +import io.split.storages.RuleBasedSegmentCacheConsumer; import io.split.storages.enums.OperationMode; import io.split.storages.memory.InMemoryCacheImp; import io.split.storages.memory.SegmentCacheInMemoryImpl; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; import io.split.storages.pluggable.adapters.UserCustomEventAdapterProducer; import io.split.storages.pluggable.adapters.UserCustomImpressionAdapterConsumer; import io.split.storages.pluggable.adapters.UserCustomImpressionAdapterProducer; import io.split.storages.pluggable.adapters.UserCustomSegmentAdapterConsumer; import io.split.storages.pluggable.adapters.UserCustomSplitAdapterConsumer; import io.split.storages.pluggable.adapters.UserCustomTelemetryAdapterProducer; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.adapters.UserCustomRuleBasedSegmentAdapterConsumer; +import io.split.storages.pluggable.domain.UserStorageWrapper; import io.split.storages.pluggable.synchronizer.TelemetryConsumerSubmitter; import io.split.telemetry.storage.InMemoryTelemetryStorage; +import io.split.telemetry.storage.NoopTelemetryStorage; import io.split.telemetry.storage.TelemetryStorage; import io.split.telemetry.storage.TelemetryStorageProducer; import io.split.telemetry.synchronizer.TelemetryInMemorySubmitter; import io.split.telemetry.synchronizer.TelemetrySyncTask; import io.split.telemetry.synchronizer.TelemetrySynchronizer; + import org.apache.hc.client5.http.auth.AuthScope; import org.apache.hc.client5.http.auth.Credentials; import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; @@ -68,35 +107,40 @@ import org.apache.hc.client5.http.impl.routing.DefaultProxyRoutePlanner; import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactory; import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactoryBuilder; +import org.apache.hc.core5.http.HttpHost; import org.apache.hc.core5.http.io.SocketConfig; import org.apache.hc.core5.http.ssl.TLS; import org.apache.hc.core5.ssl.SSLContexts; import org.apache.hc.core5.util.TimeValue; import org.apache.hc.core5.util.Timeout; -import org.slf4j.Logger; import org.slf4j.LoggerFactory; import pluggable.CustomStorageWrapper; +import javax.net.ssl.SSLContext; import java.io.IOException; +import java.io.InputStream; import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; +import java.security.KeyStore; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.stream.Collectors; +import java.util.HashSet; +import java.util.List; +import java.util.ArrayList; + +import static io.split.client.utils.SplitExecutorFactory.buildExecutorService; public class SplitFactoryImpl implements SplitFactory { - private static final Logger _log = LoggerFactory.getLogger(SplitFactory.class); + private static final org.slf4j.Logger _log = LoggerFactory.getLogger(SplitFactoryImpl.class); + private static final String LEGACY_LOG_MESSAGE = "The sdk initialize in localhost mode using Legacy file. The splitFile or " + + + "inputStream are not added to the config."; private final static long SSE_CONNECT_TIMEOUT = 30000; private final static long SSE_SOCKET_TIMEOUT = 70000; - private static Random RANDOM = new Random(); - private final SDKReadinessGates _gates; - private final ImpressionsManagerImpl _impressionsManager; + private final ImpressionsManager _impressionsManager; private final Evaluator _evaluator; private final String _apiToken; @@ -104,7 +148,7 @@ public class SplitFactoryImpl implements SplitFactory { private final SplitClient _client; private final SplitManager _manager; - //Cache + // Cache private final SplitCacheConsumer _splitCache; private final SegmentCacheConsumer _segmentCache; @@ -112,26 +156,28 @@ public class SplitFactoryImpl implements SplitFactory { private final ApiKeyCounter _apiKeyCounter; private final TelemetryStorageProducer _telemetryStorageProducer; private final TelemetrySynchronizer _telemetrySynchronizer; - private final long _startTime; + private long _startTime; private final SDKMetadata _sdkMetadata; - private final OperationMode _operationMode; + private OperationMode _operationMode; - //Depending on mode are not mandatory + // Depending on mode are not mandatory private final TelemetrySyncTask _telemetrySyncTask; private final SegmentSynchronizationTaskImp _segmentSynchronizationTaskImp; private final SplitFetcher _splitFetcher; private final SplitSynchronizationTask _splitSynchronizationTask; private final EventsTask _eventsTask; private final SyncManager _syncManager; - private final CloseableHttpClient _httpclient; - private final SafeUserStorageWrapper _safeUserStorageWrapper; + private SplitHttpClient _splitHttpClient; + private final UserStorageWrapper _userStorageWrapper; + private final ImpressionsSender _impressionsSender; private final URI _rootTarget; private final URI _eventsRootTarget; + private final UniqueKeysTracker _uniqueKeysTracker; + private RequestDecorator _requestDecorator; - - //Constructor for standalone mode - public SplitFactoryImpl(String apiToken, SplitClientConfig config) throws URISyntaxException { - _safeUserStorageWrapper = null; + // Constructor for standalone mode + public SplitFactoryImpl(String apiToken, SplitClientConfig config) throws URISyntaxException, IOException { + _userStorageWrapper = null; _operationMode = config.operationMode(); _startTime = System.currentTimeMillis(); _apiToken = apiToken; @@ -143,16 +189,23 @@ public SplitFactoryImpl(String apiToken, SplitClientConfig config) throws URISyn _telemetryStorageProducer = telemetryStorage; if (config.blockUntilReady() == -1) { - //BlockUntilReady not been set - _log.warn("no setBlockUntilReadyTimeout parameter has been set - incorrect control treatments could be logged” " + - "if no ready config has been set when building factory"); + // BlockUntilReady not been set + _log.warn( + "no setBlockUntilReadyTimeout parameter has been set - incorrect control treatments could be logged” " + + + "if no ready config has been set when building factory"); } // SDKReadinessGates _gates = new SDKReadinessGates(); + _requestDecorator = new RequestDecorator(config.customHeaderDecorator()); // HttpClient - _httpclient = buildHttpClient(apiToken, config, _sdkMetadata); + if (config.alternativeHTTPModule() == null) { + _splitHttpClient = buildSplitHttpClient(apiToken, config, _sdkMetadata, _requestDecorator); + } else { + _splitHttpClient = config.alternativeHTTPModule().createClient(apiToken, _sdkMetadata, _requestDecorator); + } // Roots _rootTarget = URI.create(config.endpoint()); @@ -160,39 +213,53 @@ public SplitFactoryImpl(String apiToken, SplitClientConfig config) throws URISyn // Cache Initialisations SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); - SplitCache splitCache = new InMemoryCacheImp(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(config.getSetsFilter()); + SplitCache splitCache = new InMemoryCacheImp(flagSetsFilter); ImpressionsStorage impressionsStorage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); _splitCache = splitCache; _segmentCache = segmentCache; - _telemetrySynchronizer = new TelemetryInMemorySubmitter(_httpclient, URI.create(config.telemetryURL()), telemetryStorage, splitCache, segmentCache, telemetryStorage, _startTime); + _telemetrySynchronizer = new TelemetryInMemorySubmitter(_splitHttpClient, URI.create(config.telemetryURL()), + telemetryStorage, + splitCache, _segmentCache, telemetryStorage, _startTime); // Segments - _segmentSynchronizationTaskImp = buildSegments(config, segmentCache, splitCache); + _segmentSynchronizationTaskImp = buildSegments(config, segmentCache, splitCache, ruleBasedSegmentCache); + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); // SplitFetcher - _splitFetcher = buildSplitFetcher(splitCache, splitCache); + _splitFetcher = buildSplitFetcher(splitCache, splitParser, flagSetsFilter, + ruleBasedSegmentParser, ruleBasedSegmentCache, config.isSdkEndpointOverridden()); // SplitSynchronizationTask _splitSynchronizationTask = new SplitSynchronizationTask(_splitFetcher, splitCache, - findPollingPeriod(RANDOM, config.featuresRefreshRate())); + config.featuresRefreshRate(), + config.getThreadFactory()); + + // ImpressionSender + _impressionsSender = HttpImpressionsSender.create(_splitHttpClient, URI.create(config.eventsEndpoint()), + config.impressionsMode(), + _telemetryStorageProducer); + + // UniqueKeysTracker + _uniqueKeysTracker = createUniqueKeysTracker(config); // Impressions _impressionsManager = buildImpressionsManager(config, impressionsStorage, impressionsStorage); // EventClient EventsStorage eventsStorage = new InMemoryEventsStorage(config.eventsQueueSize(), _telemetryStorageProducer); - _eventsTask = EventsTask.create(_httpclient, - _eventsRootTarget, - config.eventsQueueSize(), - config.eventFlushIntervalInMillis(), - config.waitBeforeShutdown(), - _telemetryStorageProducer, eventsStorage, eventsStorage); - - _telemetrySyncTask = new TelemetrySyncTask(config.get_telemetryRefreshRate(), _telemetrySynchronizer); + EventsSender eventsSender = EventsSender.create(_splitHttpClient, _eventsRootTarget, _telemetryStorageProducer); + _eventsTask = EventsTask.create(config.eventSendIntervalInMillis(), eventsStorage, eventsSender, + config.getThreadFactory()); + _telemetrySyncTask = new TelemetrySyncTask(config.getTelemetryRefreshRate(), _telemetrySynchronizer, + config.getThreadFactory()); + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(config.fallbackTreatments()); // Evaluator - _evaluator = new EvaluatorImp(splitCache, segmentCache); + _evaluator = new EvaluatorImp(splitCache, segmentCache, ruleBasedSegmentCache, fallbackTreatmentCalculatorImp); // SplitClient _client = new SplitClientImpl(this, @@ -202,28 +269,24 @@ public SplitFactoryImpl(String apiToken, SplitClientConfig config) throws URISyn config, _gates, _evaluator, - _telemetryStorageProducer, //TelemetryEvaluation instance - _telemetryStorageProducer); //TelemetryConfiguration instance + _telemetryStorageProducer, // TelemetryEvaluation instance + _telemetryStorageProducer, // TelemetryConfiguration instance + flagSetsFilter, + fallbackTreatmentCalculatorImp + ); // SplitManager _manager = new SplitManagerImpl(splitCache, config, _gates, _telemetryStorageProducer); // SyncManager - _syncManager = SyncManagerImp.build(config.streamingEnabled(), - _splitSynchronizationTask, - _splitFetcher, - _segmentSynchronizationTaskImp, - splitCache, - config.authServiceURL(), - _httpclient, - config.streamingServiceURL(), - config.authRetryBackoffBase(), - buildSSEdHttpClient(apiToken, config, _sdkMetadata), - segmentCache, - config.streamingRetryDelay(), - config.streamingFetchMaxRetries(), - config.failedAttemptsBeforeLogging(), - config.cdnDebugLogging(), _gates, _telemetryStorageProducer, _telemetrySynchronizer,config); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + SplitAPI splitAPI = SplitAPI.build(_splitHttpClient, buildSSEdHttpClient(apiToken, config, _sdkMetadata), + _requestDecorator); + + _syncManager = SyncManagerImp.build(splitTasks, _splitFetcher, splitCache, splitAPI, + segmentCache, _gates, _telemetryStorageProducer, _telemetrySynchronizer, config, splitParser, + ruleBasedSegmentParser, flagSetsFilter, ruleBasedSegmentCache); _syncManager.start(); // DestroyOnShutDown @@ -237,27 +300,32 @@ public SplitFactoryImpl(String apiToken, SplitClientConfig config) throws URISyn } } - - //Constructor for consumer mode - protected SplitFactoryImpl(String apiToken, SplitClientConfig config, CustomStorageWrapper customStorageWrapper) throws URISyntaxException { - //Variables that are not used in Consumer mode. - _telemetrySyncTask = null; + // Constructor for consumer mode + protected SplitFactoryImpl(String apiToken, SplitClientConfig config, CustomStorageWrapper customStorageWrapper) + throws URISyntaxException { + // Variables that are not used in Consumer mode. _segmentSynchronizationTaskImp = null; _splitFetcher = null; _splitSynchronizationTask = null; _eventsTask = null; - _syncManager = null; - _httpclient = null; + _splitHttpClient = null; _rootTarget = null; _eventsRootTarget = null; Metadata metadata = new Metadata(config.ipAddressEnabled(), SplitClientConfig.splitSdkVersion); - _safeUserStorageWrapper = new SafeUserStorageWrapper(customStorageWrapper); - UserCustomSegmentAdapterConsumer userCustomSegmentAdapterConsumer= new UserCustomSegmentAdapterConsumer(customStorageWrapper); - UserCustomSplitAdapterConsumer userCustomSplitAdapterConsumer = new UserCustomSplitAdapterConsumer(customStorageWrapper); - UserCustomImpressionAdapterConsumer userCustomImpressionAdapterConsumer = new UserCustomImpressionAdapterConsumer(); // TODO migrate impressions sender to Task instead manager and not instantiate Producer here. - UserCustomImpressionAdapterProducer userCustomImpressionAdapterProducer = new UserCustomImpressionAdapterProducer(customStorageWrapper, metadata); - UserCustomEventAdapterProducer userCustomEventAdapterProducer = new UserCustomEventAdapterProducer(customStorageWrapper, metadata); + _userStorageWrapper = new UserStorageWrapper(customStorageWrapper); + UserCustomSegmentAdapterConsumer userCustomSegmentAdapterConsumer = new UserCustomSegmentAdapterConsumer( + customStorageWrapper); + UserCustomSplitAdapterConsumer userCustomSplitAdapterConsumer = new UserCustomSplitAdapterConsumer( + customStorageWrapper); + // TODO migrate impressions sender to Task instead manager and not instantiate + // Producer here. + UserCustomImpressionAdapterConsumer userCustomImpressionAdapterConsumer = new UserCustomImpressionAdapterConsumer(); + UserCustomImpressionAdapterProducer userCustomImpressionAdapterProducer = new UserCustomImpressionAdapterProducer( + customStorageWrapper, + metadata); + UserCustomEventAdapterProducer userCustomEventAdapterProducer = new UserCustomEventAdapterProducer( + customStorageWrapper, metadata); _operationMode = config.operationMode(); _sdkMetadata = createSdkMetadata(config.ipAddressEnabled(), SplitClientConfig.splitSdkVersion); @@ -271,19 +339,41 @@ protected SplitFactoryImpl(String apiToken, SplitClientConfig config, CustomStor _segmentCache = userCustomSegmentAdapterConsumer; if (config.blockUntilReady() == -1) { - //BlockUntilReady not been set - _log.warn("no setBlockUntilReadyTimeout parameter has been set - incorrect control treatments could be logged” " + - "if no ready config has been set when building factory"); + // BlockUntilReady not been set + _log.warn( + "no setBlockUntilReadyTimeout parameter has been set - incorrect control treatments could be logged” " + + + "if no ready config has been set when building factory"); } // SDKReadinessGates _gates = new SDKReadinessGates(); - _evaluator = new EvaluatorImp(userCustomSplitAdapterConsumer, userCustomSegmentAdapterConsumer); - _impressionsManager = buildImpressionsManager(config, userCustomImpressionAdapterConsumer, userCustomImpressionAdapterProducer); - _telemetrySynchronizer = new TelemetryConsumerSubmitter(customStorageWrapper, _sdkMetadata); - + UserCustomRuleBasedSegmentAdapterConsumer userCustomRuleBasedSegmentAdapterConsumer = + new UserCustomRuleBasedSegmentAdapterConsumer(customStorageWrapper); + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(config.fallbackTreatments()); + _evaluator = new EvaluatorImp(userCustomSplitAdapterConsumer, userCustomSegmentAdapterConsumer, + userCustomRuleBasedSegmentAdapterConsumer, fallbackTreatmentCalculatorImp); + _impressionsSender = PluggableImpressionSender.create(customStorageWrapper); + _uniqueKeysTracker = createUniqueKeysTracker(config); + _impressionsManager = buildImpressionsManager(config, userCustomImpressionAdapterConsumer, + userCustomImpressionAdapterProducer); + _telemetrySyncTask = new TelemetrySyncTask(config.getTelemetryRefreshRate(), _telemetrySynchronizer, + config.getThreadFactory()); + + SplitTasks splitTasks = SplitTasks.build(null, null, + _impressionsManager, null, _telemetrySyncTask, _uniqueKeysTracker); + + // Synchronizer + Synchronizer synchronizer = new ConsumerSynchronizer(splitTasks); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>()); + if (!config.getSetsFilter().isEmpty()) { + _log.warn( + "FlagSets filter is not applicable for Consumer modes where the SDK does not keep rollout data in sync. FlagSet " + + + "filter was discarded"); + } _client = new SplitClientImpl(this, userCustomSplitAdapterConsumer, _impressionsManager, @@ -291,13 +381,119 @@ protected SplitFactoryImpl(String apiToken, SplitClientConfig config, CustomStor config, _gates, _evaluator, - _telemetryStorageProducer, //TelemetryEvaluation instance - _telemetryStorageProducer); //TelemetryConfiguration instance + _telemetryStorageProducer, // TelemetryEvaluation instance + _telemetryStorageProducer, // TelemetryConfiguration instance + flagSetsFilter, + fallbackTreatmentCalculatorImp + ); + + // SyncManager + _syncManager = new ConsumerSyncManager(synchronizer); + _syncManager.start(); _manager = new SplitManagerImpl(userCustomSplitAdapterConsumer, config, _gates, _telemetryStorageProducer); manageSdkReady(config); } + // Localhost + protected SplitFactoryImpl(SplitClientConfig config) { + _userStorageWrapper = null; + _apiToken = "localhost"; + _apiKeyCounter = ApiKeyCounter.getApiKeyCounterInstance(); + _apiKeyCounter.add("localhost"); + _sdkMetadata = createSdkMetadata(config.ipAddressEnabled(), SplitClientConfig.splitSdkVersion); + _telemetrySynchronizer = null; + _telemetrySyncTask = null; + _eventsTask = null; + _splitHttpClient = null; + _impressionsSender = null; + _rootTarget = null; + _eventsRootTarget = null; + _uniqueKeysTracker = null; + _telemetryStorageProducer = new NoopTelemetryStorage(); + + SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(config.getSetsFilter()); + SplitCache splitCache = new InMemoryCacheImp(flagSetsFilter); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + _splitCache = splitCache; + _gates = new SDKReadinessGates(); + _segmentCache = segmentCache; + + // SegmentFetcher + + SegmentChangeFetcher segmentChangeFetcher = new LocalhostSegmentFetcherNoop(); + if (config.segmentDirectory() != null) { + segmentChangeFetcher = new LocalhostSegmentChangeFetcher(config.segmentDirectory()); + } + + _segmentSynchronizationTaskImp = new SegmentSynchronizationTaskImp(segmentChangeFetcher, + config.segmentsRefreshRate(), + config.numThreadsForSegmentFetch(), + segmentCache, + _telemetryStorageProducer, + _splitCache, + config.getThreadFactory(), + ruleBasedSegmentCache); + + // SplitFetcher + SplitChangeFetcher splitChangeFetcher = createSplitChangeFetcher(config); + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + + _splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCache, _telemetryStorageProducer, + flagSetsFilter, ruleBasedSegmentParser, ruleBasedSegmentCache); + + // SplitSynchronizationTask + _splitSynchronizationTask = new SplitSynchronizationTask(_splitFetcher, splitCache, + config.featuresRefreshRate(), config.getThreadFactory()); + + _impressionsManager = new ImpressionsManager.NoOpImpressionsManager(); + + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, null, null, null); + + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(config.fallbackTreatments()); + // Evaluator + _evaluator = new EvaluatorImp(splitCache, segmentCache, ruleBasedSegmentCache, fallbackTreatmentCalculatorImp); + + EventsStorage eventsStorage = new NoopEventsStorageImp(); + + // SplitClient + _client = new SplitClientImpl(this, + splitCache, + _impressionsManager, + eventsStorage, + config, + _gates, + _evaluator, + _telemetryStorageProducer, // TelemetryEvaluation instance + _telemetryStorageProducer, // TelemetryConfiguration instance + flagSetsFilter, + fallbackTreatmentCalculatorImp + ); + + // Synchronizer + Synchronizer synchronizer = new LocalhostSynchronizer(splitTasks, _splitFetcher, + config.localhostRefreshEnabled()); + + // SplitManager + _manager = new SplitManagerImpl(splitCache, config, _gates, _telemetryStorageProducer); + // SyncManager + _syncManager = new LocalhostSyncManager(synchronizer, _gates); + _syncManager.start(); + + // DestroyOnShutDown + if (config.destroyOnShutDown()) { + Thread shutdown = new Thread(() -> { + // Using the full path to avoid conflicting with Thread.destroy() + SplitFactoryImpl.this.destroy(); + }); + shutdown.setName("split-destroy-worker"); + Runtime.getRuntime().addShutdownHook(shutdown); + } + } + @Override public SplitClient client() { return _client; @@ -313,33 +509,17 @@ public synchronized void destroy() { if (isTerminated) { return; } - if(OperationMode.STANDALONE.equals(_operationMode)) { + try { _log.info("Shutdown called for split"); - try { - long splitCount = _splitCache.getAll().stream().count(); - long segmentCount = _segmentCache.getSegmentCount(); - long segmentKeyCount = _segmentCache.getKeyCount(); - _impressionsManager.close(); - _log.info("Successful shutdown of impressions manager"); - _eventsTask.close(); - _log.info("Successful shutdown of eventsTask"); - _segmentSynchronizationTaskImp.close(); - _log.info("Successful shutdown of segment fetchers"); - _splitSynchronizationTask.close(); - _log.info("Successful shutdown of splits"); - _syncManager.shutdown(); - _log.info("Successful shutdown of syncManager"); + _syncManager.shutdown(); + _log.info("Successful shutdown of syncManager"); + if (OperationMode.STANDALONE.equals(_operationMode)) { _telemetryStorageProducer.recordSessionLength(System.currentTimeMillis() - _startTime); - _telemetrySyncTask.stopScheduledTask(splitCount, segmentCount, segmentKeyCount); - _log.info("Successful shutdown of telemetry sync task"); - _httpclient.close(); - _log.info("Successful shutdown of httpclient"); - } catch (IOException e) { - _log.error("We could not shutdown split", e); + } else if (OperationMode.CONSUMER.equals(_operationMode)) { + _userStorageWrapper.disconnect(); } - } - else if(OperationMode.CONSUMER.equals(_operationMode)) { - _safeUserStorageWrapper.disconnect(); + } catch (IOException e) { + _log.error("We could not shutdown split", e); } _apiKeyCounter.remove(_apiToken); isTerminated = true; @@ -350,9 +530,14 @@ public boolean isDestroyed() { return isTerminated; } - private static CloseableHttpClient buildHttpClient(String apiToken, SplitClientConfig config, SDKMetadata sdkMetadata) { + protected static SplitHttpClient buildSplitHttpClient(String apiToken, SplitClientConfig config, + SDKMetadata sdkMetadata, RequestDecorator requestDecorator) + throws URISyntaxException, IOException { + + SSLContext sslContext = buildSSLContext(config); + SSLConnectionSocketFactory sslSocketFactory = SSLConnectionSocketFactoryBuilder.create() - .setSslContext(SSLContexts.createSystemDefault()) + .setSslContext(sslContext) .setTlsVersions(TLS.V_1_1, TLS.V_1_2) .build(); @@ -374,26 +559,30 @@ private static CloseableHttpClient buildHttpClient(String apiToken, SplitClientC HttpClientBuilder httpClientbuilder = HttpClients.custom() .setConnectionManager(cm) .setDefaultRequestConfig(requestConfig) - .addRequestInterceptorLast(AuthorizationInterceptorFilter.instance(apiToken)) - .addRequestInterceptorLast(SdkMetadataInterceptorFilter.instance(sdkMetadata)) .addRequestInterceptorLast(new GzipEncoderRequestInterceptor()) .addResponseInterceptorLast((new GzipDecoderResponseInterceptor())); // Set up proxy is it exists - if (config.proxy() != null) { + if (config.proxy() != null || config.proxyConfiguration() != null) { httpClientbuilder = setupProxy(httpClientbuilder, config); } - return httpClientbuilder.build(); + return SplitHttpClientImpl.create(httpClientbuilder.build(), + requestDecorator, + apiToken, + sdkMetadata); } - private static CloseableHttpClient buildSSEdHttpClient(String apiToken, SplitClientConfig config, SDKMetadata sdkMetadata) { + private static CloseableHttpClient buildSSEdHttpClient(String apiToken, SplitClientConfig config, + SDKMetadata sdkMetadata) throws IOException { RequestConfig requestConfig = RequestConfig.custom() .setConnectTimeout(Timeout.ofMilliseconds(SSE_CONNECT_TIMEOUT)) .build(); + SSLContext sslContext = buildSSLContext(config); + SSLConnectionSocketFactory sslSocketFactory = SSLConnectionSocketFactoryBuilder.create() - .setSslContext(SSLContexts.createSystemDefault()) + .setSslContext(sslContext) .setTlsVersions(TLS.V_1_1, TLS.V_1_2) .build(); @@ -413,55 +602,122 @@ private static CloseableHttpClient buildSSEdHttpClient(String apiToken, SplitCli .addRequestInterceptorLast(ClientKeyInterceptorFilter.instance(apiToken)); // Set up proxy is it exists - if (config.proxy() != null) { + if (config.proxy() != null || config.proxyConfiguration() != null) { httpClientbuilder = setupProxy(httpClientbuilder, config); } return httpClientbuilder.build(); } + private static SSLContext buildSSLContext(SplitClientConfig config) throws IOException, NullPointerException { + SSLContext sslContext; + if (config.proxyConfiguration() != null && config.proxyConfiguration().getP12File() != null) { + _log.debug("Proxy setup using mTLS"); + InputStream keystoreStream = null; + try { + KeyStore keyStore = KeyStore.getInstance("PKCS12"); + keystoreStream = config.proxyConfiguration().getP12File(); + keyStore.load(keystoreStream, config.proxyConfiguration().getPassKey().toCharArray()); + sslContext = SSLContexts.custom() + .loadKeyMaterial(keyStore, config.proxyConfiguration().getPassKey().toCharArray()) + .build(); + } catch (Exception e) { + _log.error("Exception caught while processing p12 file for Proxy mTLS auth: ", e); + _log.warn("Ignoring p12 mTLS config and switching to default context"); + sslContext = SSLContexts.createSystemDefault(); + } finally { + if (keystoreStream != null) { + keystoreStream.close(); + } + } + } else { + sslContext = SSLContexts.createSystemDefault(); + } + return sslContext; + } + private static HttpClientBuilder setupProxy(HttpClientBuilder httpClientbuilder, SplitClientConfig config) { _log.info("Initializing Split SDK with proxy settings"); - DefaultProxyRoutePlanner routePlanner = new DefaultProxyRoutePlanner(config.proxy()); - httpClientbuilder.setRoutePlanner(routePlanner); + if (config.proxyConfiguration() != null) { + return useProxyConfiguration(httpClientbuilder, config); + } else { + return useLegacyProxyConfiguration(httpClientbuilder, config); + } + } + private static HttpClientBuilder useLegacyProxyConfiguration(HttpClientBuilder httpClientbuilder, SplitClientConfig config) { + HttpHost proxyHost = config.proxy(); + addProxyHost(httpClientbuilder, proxyHost); if (config.proxyUsername() != null && config.proxyPassword() != null) { - _log.debug("Proxy setup using credentials"); - BasicCredentialsProvider credsProvider = new BasicCredentialsProvider(); - AuthScope siteScope = new AuthScope(config.proxy().getHostName(), config.proxy().getPort()); - Credentials siteCreds = new UsernamePasswordCredentials(config.proxyUsername(), config.proxyPassword().toCharArray()); - credsProvider.setCredentials(siteScope, siteCreds); - httpClientbuilder.setDefaultCredentialsProvider(credsProvider); + return addProxyBasicAuth(httpClientbuilder, proxyHost, config.proxyUsername(), config.proxyPassword()); + } + + return httpClientbuilder; + } + + private static HttpClientBuilder useProxyConfiguration(HttpClientBuilder httpClientbuilder, SplitClientConfig config) { + HttpHost proxyHost = config.proxyConfiguration().getHost(); + addProxyHost(httpClientbuilder, proxyHost); + if (config.proxyConfiguration().getProxyCredentialsProvider() == null) { + return httpClientbuilder; + } + + if (config.proxyConfiguration().getProxyCredentialsProvider() instanceof io.split.client.dtos.BasicCredentialsProvider) { + io.split.client.dtos.BasicCredentialsProvider basicAuth = + (io.split.client.dtos.BasicCredentialsProvider) config.proxyConfiguration().getProxyCredentialsProvider(); + return addProxyBasicAuth(httpClientbuilder, proxyHost, basicAuth.getUsername(), basicAuth.getPassword()); } - return httpClientbuilder; + _log.debug("Proxy setup using Bearer token"); + httpClientbuilder.setDefaultCredentialsProvider(new HttpClientDynamicCredentials( + (BearerCredentialsProvider) config.proxyConfiguration().getProxyCredentialsProvider())); + return httpClientbuilder; + } + + private static void addProxyHost(HttpClientBuilder httpClientbuilder, HttpHost proxyHost) { + DefaultProxyRoutePlanner routePlanner = new DefaultProxyRoutePlanner(proxyHost); + httpClientbuilder.setRoutePlanner(routePlanner); } - private static int findPollingPeriod(Random rand, int max) { - int min = max / 2; - return rand.nextInt((max - min) + 1) + min; + private static HttpClientBuilder addProxyBasicAuth(HttpClientBuilder httpClientbuilder, HttpHost proxyHost, String userName, String password) { + _log.debug("Proxy setup using Basic authentication"); + BasicCredentialsProvider credsProvider = new BasicCredentialsProvider(); + AuthScope siteScope = new AuthScope(proxyHost.getHostName(), proxyHost.getPort()); + Credentials siteCreds = new UsernamePasswordCredentials(userName, + password.toCharArray()); + credsProvider.setCredentials(siteScope, siteCreds); + httpClientbuilder.setDefaultCredentialsProvider(credsProvider); + return httpClientbuilder; } - private SegmentSynchronizationTaskImp buildSegments(SplitClientConfig config, SegmentCacheProducer segmentCacheProducer, SplitCacheConsumer splitCacheConsumer) throws URISyntaxException { - SegmentChangeFetcher segmentChangeFetcher = HttpSegmentChangeFetcher.create(_httpclient, _rootTarget, _telemetryStorageProducer); + private SegmentSynchronizationTaskImp buildSegments(SplitClientConfig config, + SegmentCacheProducer segmentCacheProducer, + SplitCacheConsumer splitCacheConsumer, RuleBasedSegmentCacheConsumer ruleBasedSegmentCache) throws URISyntaxException { + SegmentChangeFetcher segmentChangeFetcher = HttpSegmentChangeFetcher.create(_splitHttpClient, _rootTarget, + _telemetryStorageProducer); return new SegmentSynchronizationTaskImp(segmentChangeFetcher, - findPollingPeriod(RANDOM, config.segmentsRefreshRate()), + config.segmentsRefreshRate(), config.numThreadsForSegmentFetch(), - _gates, segmentCacheProducer, _telemetryStorageProducer, - splitCacheConsumer); + splitCacheConsumer, + config.getThreadFactory(), + ruleBasedSegmentCache); } - private SplitFetcher buildSplitFetcher(SplitCacheConsumer splitCacheConsumer, SplitCacheProducer splitCacheProducer) throws URISyntaxException { - SplitChangeFetcher splitChangeFetcher = HttpSplitChangeFetcher.create(_httpclient, _rootTarget, _telemetryStorageProducer); - SplitParser splitParser = new SplitParser(); - - return new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheConsumer, splitCacheProducer, _telemetryStorageProducer); + private SplitFetcher buildSplitFetcher(SplitCacheProducer splitCacheProducer, SplitParser splitParser, + FlagSetsFilter flagSetsFilter, RuleBasedSegmentParser ruleBasedSegmentParser, + RuleBasedSegmentCacheProducer ruleBasedSegmentCache, boolean isRootURIOverriden) throws URISyntaxException { + SplitChangeFetcher splitChangeFetcher = HttpSplitChangeFetcher.create(_splitHttpClient, _rootTarget, + _telemetryStorageProducer, isRootURIOverriden); + return new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, _telemetryStorageProducer, + flagSetsFilter, ruleBasedSegmentParser, ruleBasedSegmentCache); } - private ImpressionsManagerImpl buildImpressionsManager(SplitClientConfig config, ImpressionsStorageConsumer impressionsStorageConsumer, ImpressionsStorageProducer impressionsStorageProducer) throws URISyntaxException { + private ImpressionsManagerImpl buildImpressionsManager(SplitClientConfig config, + ImpressionsStorageConsumer impressionsStorageConsumer, + ImpressionsStorageProducer impressionsStorageProducer) throws URISyntaxException { List impressionListeners = new ArrayList<>(); if (config.integrationsConfig() != null) { config.integrationsConfig().getImpressionsListeners(IntegrationsConfig.Execution.ASYNC).stream() @@ -472,8 +728,30 @@ private ImpressionsManagerImpl buildImpressionsManager(SplitClientConfig config, .map(IntegrationsConfig.ImpressionListenerWithMeta::listener) .collect(Collectors.toCollection(() -> impressionListeners)); } - - return ImpressionsManagerImpl.instance(_httpclient, config, impressionListeners, _telemetryStorageProducer, impressionsStorageConsumer, impressionsStorageProducer); + ProcessImpressionStrategy processImpressionStrategy = null; + ImpressionCounter counter = new ImpressionCounter(); + ImpressionListener listener = !impressionListeners.isEmpty() + ? new ImpressionListener.FederatedImpressionListener(impressionListeners) + : null; + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(listener != null, _uniqueKeysTracker, counter); + + switch (config.impressionsMode()) { + case OPTIMIZED: + ImpressionObserver impressionObserver = new ImpressionObserver(config.getLastSeenCacheSize()); + processImpressionStrategy = new ProcessImpressionOptimized(listener != null, impressionObserver, + counter, _telemetryStorageProducer); + break; + case DEBUG: + impressionObserver = new ImpressionObserver(config.getLastSeenCacheSize()); + processImpressionStrategy = new ProcessImpressionDebug(listener != null, impressionObserver); + break; + case NONE: + processImpressionStrategy = processImpressionNone; + break; + } + return ImpressionsManagerImpl.instance(config, _telemetryStorageProducer, impressionsStorageConsumer, + impressionsStorageProducer, + _impressionsSender, processImpressionNone, processImpressionStrategy, counter, listener); } private SDKMetadata createSdkMetadata(boolean ipAddressEnabled, String splitSdkVersion) { @@ -493,12 +771,10 @@ private SDKMetadata createSdkMetadata(boolean ipAddressEnabled, String splitSdkV } private void manageSdkReady(SplitClientConfig config) { - ExecutorService executorService = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder() - .setNameFormat("SPLIT-SDKReadyForConsumer-%d") - .setDaemon(true) - .build()); + ExecutorService executorService = buildExecutorService(config.getThreadFactory(), + "SPLIT-SDKReadyForConsumer-%d"); executorService.submit(() -> { - while(!_safeUserStorageWrapper.connect()) { + while (!_userStorageWrapper.connect()) { try { Thread.currentThread().sleep(1000); } catch (InterruptedException e) { @@ -507,7 +783,70 @@ private void manageSdkReady(SplitClientConfig config) { } } _gates.sdkInternalReady(); - _telemetrySynchronizer.synchronizeConfig(config, System.currentTimeMillis(), ApiKeyCounter.getApiKeyCounterInstance().getFactoryInstances(), new ArrayList<>()); + _telemetrySynchronizer.synchronizeConfig(config, System.currentTimeMillis(), + ApiKeyCounter.getApiKeyCounterInstance().getFactoryInstances(), new ArrayList<>()); }); } + + private UniqueKeysTracker createUniqueKeysTracker(SplitClientConfig config) { + int uniqueKeysRefreshRate = config.operationMode().equals(OperationMode.STANDALONE) + ? config.uniqueKeysRefreshRateInMemory() + : config.uniqueKeysRefreshRateRedis(); + return new UniqueKeysTrackerImp(_telemetrySynchronizer, uniqueKeysRefreshRate, + config.filterUniqueKeysRefreshRate(), + config.getThreadFactory()); + } + + private SplitChangeFetcher createSplitChangeFetcher(SplitClientConfig splitClientConfig) { + String splitFile = splitClientConfig.splitFile(); + InputStream inputStream = splitClientConfig.inputStream(); + FileTypeEnum fileType = splitClientConfig.fileType(); + InputStreamProvider inputStreamProvider; + if (splitFile != null || !isInputStreamConfigValid(inputStream, fileType)) { + if (splitFile == null) { + _log.warn("The InputStream config is invalid"); + } + fileType = getFileTypeFromFileName(splitFile); + inputStreamProvider = new FileInputStreamProvider(splitFile); + } else { + inputStreamProvider = new StaticContentInputStreamProvider(inputStream); + } + try { + switch (fileType) { + case JSON: + return new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + case YAML: + case YML: + return new YamlLocalhostSplitChangeFetcher(inputStreamProvider); + default: + _log.warn(LEGACY_LOG_MESSAGE); + return new LegacyLocalhostSplitChangeFetcher(splitFile); + } + } catch (Exception e) { + _log.warn(String.format("There was no file named %s found. " + + "We created a split client that returns default treatments for all feature flags for all of your users. " + + + "If you wish to return a specific treatment for a feature flag, enter the name of that feature flag name and " + + + "treatment name separated by whitespace in %s; one pair per line. Empty lines or lines starting with '#' are " + + + "considered comments", + splitFile, splitFile), e); + } + _log.warn(LEGACY_LOG_MESSAGE); + return new LegacyLocalhostSplitChangeFetcher(splitFile); + } + + private Boolean isInputStreamConfigValid(InputStream inputStream, FileTypeEnum fileType) { + return inputStream != null && fileType != null; + } + + private FileTypeEnum getFileTypeFromFileName(String fileName) { + try { + return FileTypeEnum.valueOf(Files.getFileExtension(fileName).toUpperCase()); + } catch (Exception e) { + return FileTypeEnum.LEGACY; + } + + } } diff --git a/client/src/main/java/io/split/client/SplitManager.java b/client/src/main/java/io/split/client/SplitManager.java index e93c929bb..29691a5f9 100644 --- a/client/src/main/java/io/split/client/SplitManager.java +++ b/client/src/main/java/io/split/client/SplitManager.java @@ -10,7 +10,7 @@ */ public interface SplitManager { /** - * Retrieves the features (or Splits) that are currently registered with the + * Retrieves the feature flags that are currently registered with the * SDK. * * @return a List of SplitView or empty @@ -18,23 +18,23 @@ public interface SplitManager { List splits(); /** - * Returns the feature (or Split) registered with the SDK of this name. + * Returns the feature flag registered with the SDK of this name. * * @return SplitView or null */ - SplitView split(String featureName); + SplitView split(String featureFlagName); /** - * Returns the names of features (or Splits) registered with the SDK. + * Returns the names of feature flags registered with the SDK. * - * @return a List of String (Split Feature Names) or empty + * @return a List of String (Feature Flag Names) or empty */ List splitNames(); /** * The SDK kicks off background threads to download data necessary * for using the SDK. You can choose to block until the SDK has - * downloaded split definitions so that you will not get + * downloaded feature flag definitions so that you will not get * the 'control' treatment. *

* @@ -43,4 +43,4 @@ public interface SplitManager { *

*/ void blockUntilReady() throws TimeoutException, InterruptedException; -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/SplitManagerImpl.java b/client/src/main/java/io/split/client/SplitManagerImpl.java index f6fd4d6c5..d3e69cf7f 100644 --- a/client/src/main/java/io/split/client/SplitManagerImpl.java +++ b/client/src/main/java/io/split/client/SplitManagerImpl.java @@ -56,22 +56,23 @@ public List splits() { } @Override - public SplitView split(String featureName) { + public SplitView split(String featureFlagName) { if (!_gates.isSDKReady()) { { - _log.warn("split: the SDK is not ready, results may be incorrect. Make sure to wait for SDK readiness before using this method"); + _log.warn(String.format("the SDK is not ready, results may be incorrect for feature flag %s. Make sure to wait " + + "for SDK readiness before using this method", featureFlagName)); _telemetryConfigProducer.recordNonReadyUsage(); }} - Optional result = SplitNameValidator.isValid(featureName, "split"); + Optional result = SplitNameValidator.isValid(featureFlagName, "split"); if (!result.isPresent()) { return null; } - featureName = result.get(); + featureFlagName = result.get(); - ParsedSplit parsedSplit = _splitCacheConsumer.get(featureName); + ParsedSplit parsedSplit = _splitCacheConsumer.get(featureFlagName); if (parsedSplit == null) { if (_gates.isSDKReady()) { - _log.warn("split: you passed \"" + featureName + "\" that does not exist in this environment, " + - "please double check what Splits exist in the web console."); + _log.warn("split: you passed \"" + featureFlagName + "\" that does not exist in this environment, " + + "please double check what feature flags exist in the Split user interface."); } return null; } @@ -85,13 +86,7 @@ public List splitNames() { _log.warn("splitNames: the SDK is not ready, results may be incorrect. Make sure to wait for SDK readiness before using this method"); _telemetryConfigProducer.recordNonReadyUsage(); }} - List result = new ArrayList<>(); - Collection parsedSplits = _splitCacheConsumer.getAll(); - for (ParsedSplit split : parsedSplits) { - result.add(split.feature()); - } - - return result; + return _splitCacheConsumer.splitNames(); } @Override @@ -104,4 +99,4 @@ public void blockUntilReady() throws TimeoutException, InterruptedException { throw new TimeoutException("SDK was not ready in " + _config.blockUntilReady()+ " milliseconds"); } } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/YamlLocalhostSplitChangeFetcher.java b/client/src/main/java/io/split/client/YamlLocalhostSplitChangeFetcher.java new file mode 100644 index 000000000..b2dccfdca --- /dev/null +++ b/client/src/main/java/io/split/client/YamlLocalhostSplitChangeFetcher.java @@ -0,0 +1,87 @@ +package io.split.client; + +import io.split.client.dtos.Condition; +import io.split.client.dtos.ConditionType; +import io.split.client.dtos.Split; +import io.split.client.dtos.SplitChange; +import io.split.client.dtos.Status; +import io.split.client.dtos.ChangeDto; +import io.split.client.utils.InputStreamProvider; +import io.split.client.utils.LocalhostConstants; +import io.split.engine.common.FetchOptions; +import io.split.engine.experiments.SplitChangeFetcher; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static io.split.client.utils.LocalhostSanitizer.createCondition; + +public class YamlLocalhostSplitChangeFetcher implements SplitChangeFetcher { + + private static final Logger _log = LoggerFactory.getLogger(YamlLocalhostSplitChangeFetcher.class); + private final InputStreamProvider _inputStreamProvider; + + public YamlLocalhostSplitChangeFetcher(InputStreamProvider inputStreamProvider) { + _inputStreamProvider = inputStreamProvider; + } + + @Override + public SplitChange fetch(long since, long sinceRBS, FetchOptions options) { + try { + Yaml yaml = new Yaml(); + List>> yamlSplits = yaml.load(_inputStreamProvider.get()); + SplitChange splitChange = new SplitChange(); + splitChange.featureFlags = new ChangeDto<>(); + splitChange.featureFlags.d = new ArrayList<>(); + for(Map> aSplit : yamlSplits) { + // The outter map is a map with one key, the split name + Map.Entry> splitAndValues = aSplit.entrySet().iterator().next(); + + Optional splitOptional = splitChange.featureFlags.d.stream(). + filter(split -> split.name.equals(splitAndValues.getKey())).findFirst(); + Split split = splitOptional.orElse(null); + if(split == null) { + split = new Split(); + split.name = splitAndValues.getKey(); + split.configurations = new HashMap<>(); + split.conditions = new ArrayList<>(); + } else { + splitChange.featureFlags.d.remove(split); + } + String treatment = (String) splitAndValues.getValue().get("treatment"); + String configurations = splitAndValues.getValue().get("config") != null ? (String) splitAndValues.getValue().get("config") : null; + Object keyOrKeys = splitAndValues.getValue().get("keys"); + split.configurations.put(treatment, configurations); + + Condition condition = createCondition(keyOrKeys, treatment); + if(condition.conditionType != ConditionType.ROLLOUT){ + split.conditions.add(0, condition); + } else { + split.conditions.add(condition); + } + split.status = Status.ACTIVE; + split.defaultTreatment = LocalhostConstants.CONTROL; + split.trafficTypeName = LocalhostConstants.USER; + split.trafficAllocation = LocalhostConstants.SIZE_100; + split.trafficAllocationSeed = LocalhostConstants.SIZE_1; + splitChange.featureFlags.d.add(split); + } + splitChange.featureFlags.t = since; + splitChange.featureFlags.s = since; + splitChange.ruleBasedSegments = new ChangeDto<>(); + splitChange.ruleBasedSegments.s = -1; + splitChange.ruleBasedSegments.t = -1; + splitChange.ruleBasedSegments.d = new ArrayList<>(); + return splitChange; + } catch (Exception e) { + throw new IllegalStateException("Problem fetching splitChanges using a yaml file: " + e.getMessage(), e); + } + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/YamlLocalhostSplitFile.java b/client/src/main/java/io/split/client/YamlLocalhostSplitFile.java deleted file mode 100644 index b9ece01c5..000000000 --- a/client/src/main/java/io/split/client/YamlLocalhostSplitFile.java +++ /dev/null @@ -1,65 +0,0 @@ -package io.split.client; - -import com.google.common.base.Preconditions; -import com.google.common.collect.Maps; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -import java.io.FileReader; -import java.io.IOException; -import java.util.List; -import java.util.Map; - -public class YamlLocalhostSplitFile extends AbstractLocalhostSplitFile { - - private static final Logger _log = LoggerFactory.getLogger(YamlLocalhostSplitFile.class); - - public YamlLocalhostSplitFile(LocalhostSplitFactory localhostSplitFactory, String directory, String filenameYaml) throws IOException { - super(localhostSplitFactory, directory, filenameYaml); - } - - public Map readOnSplits() throws IOException { - Map onSplits = Maps.newHashMap(); - try { - - Yaml yaml = new Yaml(); - List>> yamlSplits = yaml.load(new FileReader(_file)); - - for(Map> aSplit : yamlSplits) { - // The outter map is a map with one key, the split name - Map.Entry> splitAndValues = aSplit.entrySet().iterator().next(); - - SplitAndKey splitAndKey = null; - String splitName = splitAndValues.getKey(); - String treatment = (String) splitAndValues.getValue().get("treatment"); - String configurations = splitAndValues.getValue().get("config") != null? (String) splitAndValues.getValue().get("config") : null; - Object keyOrKeys = splitAndValues.getValue().get("keys"); - - if (keyOrKeys == null) { - splitAndKey = SplitAndKey.of(splitName); // Key in this line is splitName - onSplits.put(splitAndKey, LocalhostSplit.of(treatment, configurations)); - } else { - if (keyOrKeys instanceof String) { - splitAndKey = SplitAndKey.of(splitName, (String) keyOrKeys); - onSplits.put(splitAndKey, LocalhostSplit.of(treatment, configurations)); - } else { - Preconditions.checkArgument(keyOrKeys instanceof List, "'keys' is not a String nor a List."); - for (String aKey : (List) keyOrKeys) { - splitAndKey = SplitAndKey.of(splitName, aKey); - onSplits.put(splitAndKey, LocalhostSplit.of(treatment, configurations)); - } - } - } - } - } catch (Exception e) { - _log.warn("There was no file named " + _file.getPath() + " found. " + - "We created a split client that returns default treatments for all features for all of your users. " + - "If you wish to return a specific treatment for a feature, enter the name of that feature name and " + - "treatment name separated by whitespace in " + _file.getPath() + - "; one pair per line. Empty lines or lines starting with '#' are considered comments", e); - } - - return onSplits; - } -} diff --git a/client/src/main/java/io/split/client/api/SplitView.java b/client/src/main/java/io/split/client/api/SplitView.java index c053c8950..cc217fe1f 100644 --- a/client/src/main/java/io/split/client/api/SplitView.java +++ b/client/src/main/java/io/split/client/api/SplitView.java @@ -1,6 +1,7 @@ package io.split.client.api; import io.split.client.dtos.Partition; +import io.split.client.dtos.Prerequisites; import io.split.engine.experiments.ParsedCondition; import io.split.engine.experiments.ParsedSplit; @@ -24,6 +25,10 @@ public class SplitView { public List treatments; public long changeNumber; public Map configs; + public List sets; + public String defaultTreatment; + public boolean impressionsDisabled; + public List prerequisites; public static SplitView fromParsedSplit(ParsedSplit parsedSplit) { SplitView splitView = new SplitView(); @@ -31,6 +36,8 @@ public static SplitView fromParsedSplit(ParsedSplit parsedSplit) { splitView.trafficType = parsedSplit.trafficTypeName(); splitView.killed = parsedSplit.killed(); splitView.changeNumber = parsedSplit.changeNumber(); + splitView.sets = parsedSplit.flagSets() != null ? new ArrayList<>(parsedSplit.flagSets()): new ArrayList<>(); + splitView.defaultTreatment = parsedSplit.defaultTreatment(); Set treatments = new HashSet(); for (ParsedCondition condition : parsedSplit.parsedConditions()) { @@ -42,7 +49,10 @@ public static SplitView fromParsedSplit(ParsedSplit parsedSplit) { splitView.treatments = new ArrayList(treatments); splitView.configs = parsedSplit.configurations() == null? Collections.emptyMap() : parsedSplit.configurations() ; + splitView.impressionsDisabled = parsedSplit.impressionsDisabled(); + splitView.prerequisites = parsedSplit.prerequisitesMatcher() != null ? + parsedSplit.prerequisitesMatcher().getPrerequisites(): new ArrayList<>(); return splitView; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/dtos/BasicCredentialsProvider.java b/client/src/main/java/io/split/client/dtos/BasicCredentialsProvider.java new file mode 100644 index 000000000..b77c9f599 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/BasicCredentialsProvider.java @@ -0,0 +1,7 @@ +package io.split.client.dtos; + +public interface BasicCredentialsProvider extends ProxyCredentialsProvider +{ + String getUsername(); + String getPassword(); +} diff --git a/client/src/main/java/io/split/client/dtos/BearerCredentialsProvider.java b/client/src/main/java/io/split/client/dtos/BearerCredentialsProvider.java new file mode 100644 index 000000000..d4e98c5ff --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/BearerCredentialsProvider.java @@ -0,0 +1,6 @@ +package io.split.client.dtos; + +public interface BearerCredentialsProvider extends ProxyCredentialsProvider +{ + String getToken(); +} diff --git a/client/src/main/java/io/split/client/dtos/BetweenStringMatcherData.java b/client/src/main/java/io/split/client/dtos/BetweenStringMatcherData.java new file mode 100644 index 000000000..3af1cc0c1 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/BetweenStringMatcherData.java @@ -0,0 +1,11 @@ +package io.split.client.dtos; + +/** + * Metadata to support the between matcher. + * + * @author adil + */ +public class BetweenStringMatcherData { + public String start; + public String end; +} diff --git a/client/src/main/java/io/split/client/dtos/ChangeDto.java b/client/src/main/java/io/split/client/dtos/ChangeDto.java new file mode 100644 index 000000000..596c05e0e --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/ChangeDto.java @@ -0,0 +1,9 @@ +package io.split.client.dtos; + +import java.util.List; + +public class ChangeDto { + public long s; + public long t; + public List d; +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/dtos/DecoratedImpression.java b/client/src/main/java/io/split/client/dtos/DecoratedImpression.java new file mode 100644 index 000000000..34b3b468f --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/DecoratedImpression.java @@ -0,0 +1,18 @@ +package io.split.client.dtos; + +import io.split.client.impressions.Impression; + +public class DecoratedImpression { + private Impression impression; + private boolean disabled; + + public DecoratedImpression(Impression impression, boolean disabled) { + this.impression = impression; + this.disabled = disabled; + } + + public Impression impression() { return this.impression;} + + public boolean disabled() { return this.disabled;} +} + diff --git a/client/src/main/java/io/split/client/dtos/EvaluationOptions.java b/client/src/main/java/io/split/client/dtos/EvaluationOptions.java new file mode 100644 index 000000000..7248f64e0 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/EvaluationOptions.java @@ -0,0 +1,14 @@ +package io.split.client.dtos; + +import java.util.Map; + +public class EvaluationOptions { + private Map _properties; + + public EvaluationOptions(Map properties) { + _properties = properties; + } + public Map getProperties() { + return _properties; + } +} diff --git a/client/src/main/java/io/split/client/dtos/Excluded.java b/client/src/main/java/io/split/client/dtos/Excluded.java new file mode 100644 index 000000000..e23afa4b0 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/Excluded.java @@ -0,0 +1,8 @@ +package io.split.client.dtos; + +import java.util.List; + +public class Excluded { + public List keys; + public List segments; +} diff --git a/client/src/main/java/io/split/client/dtos/ExcludedSegments.java b/client/src/main/java/io/split/client/dtos/ExcludedSegments.java new file mode 100644 index 000000000..9e65fa60f --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/ExcludedSegments.java @@ -0,0 +1,27 @@ +package io.split.client.dtos; + +public class ExcludedSegments { + static final String STANDARD_TYPE = "standard"; + static final String RULE_BASED_TYPE = "rule-based"; + + public ExcludedSegments() {} + public ExcludedSegments(String type, String name) { + this.type = type; + this.name = name; + } + + public String type; + public String name; + + public boolean isStandard() { + return STANDARD_TYPE.equals(type); + } + + public boolean isRuleBased() { + return RULE_BASED_TYPE.equals(type); + } + + public String getSegmentName(){ + return name; + } +} diff --git a/client/src/main/java/io/split/client/dtos/FallbackTreatment.java b/client/src/main/java/io/split/client/dtos/FallbackTreatment.java new file mode 100644 index 000000000..291db4f48 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/FallbackTreatment.java @@ -0,0 +1,37 @@ +package io.split.client.dtos; + +public class FallbackTreatment { + private final String _config; + private final String _treatment; + private final String _label; + + public FallbackTreatment(String treatment, String config) { + _treatment = treatment; + _config = config; + _label = null; + } + + public FallbackTreatment(String treatment) { + _treatment = treatment; + _config = null; + _label = null; + } + + public FallbackTreatment(String treatment, String config, String label) { + _treatment = treatment; + _config = config; + _label = label; + } + + public String getConfig() { + return _config; + } + + public String getTreatment() { + return _treatment; + } + + public String getLabel() { + return _label; + } +} diff --git a/client/src/main/java/io/split/client/dtos/FallbackTreatmentCalculator.java b/client/src/main/java/io/split/client/dtos/FallbackTreatmentCalculator.java new file mode 100644 index 000000000..b172a1cb2 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/FallbackTreatmentCalculator.java @@ -0,0 +1,6 @@ +package io.split.client.dtos; + +public interface FallbackTreatmentCalculator +{ + FallbackTreatment resolve(String flagName, String label); +} diff --git a/client/src/main/java/io/split/client/dtos/FallbackTreatmentCalculatorImp.java b/client/src/main/java/io/split/client/dtos/FallbackTreatmentCalculatorImp.java new file mode 100644 index 000000000..936abc493 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/FallbackTreatmentCalculatorImp.java @@ -0,0 +1,40 @@ +package io.split.client.dtos; + +import io.split.grammar.Treatments; + +public class FallbackTreatmentCalculatorImp implements FallbackTreatmentCalculator +{ + private final FallbackTreatmentsConfiguration _fallbackTreatmentsConfiguration; + private final static String labelPrefix = "fallback - "; + + public FallbackTreatmentCalculatorImp(FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration) { + _fallbackTreatmentsConfiguration = fallbackTreatmentsConfiguration; + } + + public FallbackTreatment resolve(String flagName, String label) { + if (_fallbackTreatmentsConfiguration != null) { + if (_fallbackTreatmentsConfiguration.getByFlagFallbackTreatment() != null + && _fallbackTreatmentsConfiguration.getByFlagFallbackTreatment().get(flagName) != null) { + return copyWithLabel(_fallbackTreatmentsConfiguration.getByFlagFallbackTreatment().get(flagName), + resolveLabel(label)); + } + if (_fallbackTreatmentsConfiguration.getGlobalFallbackTreatment() != null) { + return copyWithLabel(_fallbackTreatmentsConfiguration.getGlobalFallbackTreatment(), + resolveLabel(label)); + } + } + + return new FallbackTreatment(Treatments.CONTROL, null, label); + } + + private String resolveLabel(String label) { + if (label == null) { + return null; + } + return labelPrefix + label; + } + + private FallbackTreatment copyWithLabel(FallbackTreatment fallbackTreatment, String label) { + return new FallbackTreatment(fallbackTreatment.getTreatment(), fallbackTreatment.getConfig(), label); + } +} diff --git a/client/src/main/java/io/split/client/dtos/FallbackTreatmentsConfiguration.java b/client/src/main/java/io/split/client/dtos/FallbackTreatmentsConfiguration.java new file mode 100644 index 000000000..55a0a27d1 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/FallbackTreatmentsConfiguration.java @@ -0,0 +1,65 @@ +package io.split.client.dtos; + +import java.util.HashMap; +import java.util.Map; + +public class FallbackTreatmentsConfiguration { + private final FallbackTreatment _globalFallbackTreatment; + private final Map _byFlagFallbackTreatment; + + public FallbackTreatmentsConfiguration(FallbackTreatment globalFallbackTreatment, Map byFlagFallbackTreatment) { + _globalFallbackTreatment = globalFallbackTreatment; + _byFlagFallbackTreatment = byFlagFallbackTreatment; + } + + public FallbackTreatmentsConfiguration(Map byFlagFallbackTreatment) { + _globalFallbackTreatment = null; + _byFlagFallbackTreatment = byFlagFallbackTreatment; + } + + public FallbackTreatmentsConfiguration(HashMap byFlagFallbackTreatment) { + _globalFallbackTreatment = null; + _byFlagFallbackTreatment = buildByFlagFallbackTreatment(byFlagFallbackTreatment); + } + + public FallbackTreatmentsConfiguration(FallbackTreatment globalFallbackTreatment) { + _globalFallbackTreatment = globalFallbackTreatment; + _byFlagFallbackTreatment = null; + } + + public FallbackTreatmentsConfiguration(String globalFallbackTreatment, Map byFlagFallbackTreatment) { + _globalFallbackTreatment = new FallbackTreatment(globalFallbackTreatment); + _byFlagFallbackTreatment = byFlagFallbackTreatment; + } + + public FallbackTreatmentsConfiguration(String globalFallbackTreatment) { + _globalFallbackTreatment = new FallbackTreatment(globalFallbackTreatment); + _byFlagFallbackTreatment = null; + } + + + public FallbackTreatmentsConfiguration(String globalFallbackTreatment, HashMap byFlagFallbackTreatment) { + _globalFallbackTreatment = new FallbackTreatment(globalFallbackTreatment); + _byFlagFallbackTreatment = buildByFlagFallbackTreatment(byFlagFallbackTreatment); + } + + public FallbackTreatmentsConfiguration(FallbackTreatment globalFallbackTreatment, HashMap byFlagFallbackTreatment) { + _globalFallbackTreatment = globalFallbackTreatment; + _byFlagFallbackTreatment = buildByFlagFallbackTreatment(byFlagFallbackTreatment); + } + + public FallbackTreatment getGlobalFallbackTreatment() { + return _globalFallbackTreatment; + } + + public Map getByFlagFallbackTreatment() { return _byFlagFallbackTreatment;} + + private Map buildByFlagFallbackTreatment(Map byFlagFallbackTreatment) { + Map result = new HashMap<>(); + for (Map.Entry entry : byFlagFallbackTreatment.entrySet()) { + result.put(entry.getKey(), new FallbackTreatment(entry.getValue())); + } + + return result; + } +} diff --git a/client/src/main/java/io/split/client/dtos/KeyImpression.java b/client/src/main/java/io/split/client/dtos/KeyImpression.java index 4d6a580c3..980fc1178 100644 --- a/client/src/main/java/io/split/client/dtos/KeyImpression.java +++ b/client/src/main/java/io/split/client/dtos/KeyImpression.java @@ -15,6 +15,7 @@ public class KeyImpression { /* package private */ static final String FIELD_TIME = "m"; /* package private */ static final String FIELD_CHANGE_NUMBER = "c"; /* package private */ static final String FIELD_PREVIOUS_TIME = "pt"; + /* package private */ static final String FIELD_PROPERTIES = "properties"; public transient String feature; // Non-serializable @@ -39,6 +40,9 @@ public class KeyImpression { @SerializedName(FIELD_PREVIOUS_TIME) public Long previousTime; + @SerializedName(FIELD_PROPERTIES) + public String properties; + @Override public boolean equals(Object o) { if (this == o) return true; @@ -50,6 +54,7 @@ public boolean equals(Object o) { if (!Objects.equals(feature, that.feature)) return false; if (!keyName.equals(that.keyName)) return false; if (!treatment.equals(that.treatment)) return false; + if (properties != null && !properties.equals(that.properties)) return false; if (bucketingKey == null) { return that.bucketingKey == null; @@ -77,6 +82,8 @@ public static KeyImpression fromImpression(Impression i) { ki.changeNumber = i.changeNumber(); ki.treatment = i.treatment(); ki.label = i.appliedRule(); + ki.previousTime = i.pt(); + ki.properties = i.properties(); return ki; } } diff --git a/client/src/main/java/io/split/client/dtos/Matcher.java b/client/src/main/java/io/split/client/dtos/Matcher.java index 12c824d2c..fc2c65155 100644 --- a/client/src/main/java/io/split/client/dtos/Matcher.java +++ b/client/src/main/java/io/split/client/dtos/Matcher.java @@ -13,6 +13,7 @@ public class Matcher { public WhitelistMatcherData whitelistMatcherData; public UnaryNumericMatcherData unaryNumericMatcherData; public BetweenMatcherData betweenMatcherData; + public BetweenStringMatcherData betweenStringMatcherData; public DependencyMatcherData dependencyMatcherData; public Boolean booleanMatcherData; public String stringMatcherData; diff --git a/client/src/main/java/io/split/client/dtos/MatcherType.java b/client/src/main/java/io/split/client/dtos/MatcherType.java index ec22baec7..22f22adb3 100644 --- a/client/src/main/java/io/split/client/dtos/MatcherType.java +++ b/client/src/main/java/io/split/client/dtos/MatcherType.java @@ -30,5 +30,15 @@ public enum MatcherType { EQUAL_TO_BOOLEAN, /* Dependency Matcher */ - IN_SPLIT_TREATMENT + IN_SPLIT_TREATMENT, + + /* Semver matchers */ + EQUAL_TO_SEMVER, + GREATER_THAN_OR_EQUAL_TO_SEMVER, + LESS_THAN_OR_EQUAL_TO_SEMVER, + IN_LIST_SEMVER, + BETWEEN_SEMVER, + + /* Rule based segment */ + IN_RULE_BASED_SEGMENT } diff --git a/client/src/main/java/io/split/client/dtos/Prerequisites.java b/client/src/main/java/io/split/client/dtos/Prerequisites.java new file mode 100644 index 000000000..644cb5fc4 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/Prerequisites.java @@ -0,0 +1,12 @@ +package io.split.client.dtos; + +import com.google.gson.annotations.SerializedName; + +import java.util.List; + +public class Prerequisites { + @SerializedName("n") + public String featureFlagName; + @SerializedName("ts") + public List treatments; +} diff --git a/client/src/main/java/io/split/client/dtos/ProxyConfiguration.java b/client/src/main/java/io/split/client/dtos/ProxyConfiguration.java new file mode 100644 index 000000000..c1ed2b409 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/ProxyConfiguration.java @@ -0,0 +1,66 @@ +package io.split.client.dtos; + +import org.apache.hc.core5.http.HttpHost; + +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; + +public class ProxyConfiguration { + private final HttpHost _proxyHost; + private ProxyCredentialsProvider _provider; + private final InputStream _p12File; + private final String _passKey; + + private ProxyConfiguration(HttpHost proxyHost, + ProxyCredentialsProvider proxyCredentialsProvider, + InputStream p12File, String passKey) { + _proxyHost = proxyHost; + _p12File = p12File; + _passKey = passKey; + _provider = proxyCredentialsProvider; + } + + public HttpHost getHost() { return _proxyHost; } + public InputStream getP12File() { return _p12File; } + public String getPassKey() { return _passKey; } + public ProxyCredentialsProvider getProxyCredentialsProvider() { return _provider; } + + public static ProxyConfiguration.Builder builder() { + return new ProxyConfiguration.Builder(); + } + + public static class Builder { + private ProxyCredentialsProvider _provider; + private HttpHost _proxyHost; + private InputStream _p12File; + private String _passKey; + + public ProxyConfiguration.Builder credentialsProvider(ProxyCredentialsProvider provider) { + _provider = provider; + return this; + } + + public ProxyConfiguration.Builder url(URL url) throws MalformedURLException { + try { + _proxyHost = new HttpHost(url.getProtocol(), url.getHost(), url.getPort()); + } catch (Exception exc) { + throw new MalformedURLException("Proxy configuration is invalid. The proxy `url` is malformed"); + } + return this; + } + + public ProxyConfiguration.Builder mtls(InputStream p12File, String passKey) { + _passKey = passKey; + _p12File = p12File; + return this; + } + + public ProxyConfiguration build() { + if (_proxyHost == null) { + throw new IllegalArgumentException("Proxy configuration is invalid. The proxy `url` was not provided"); + } + return new ProxyConfiguration(_proxyHost, _provider, _p12File, _passKey); + } + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/dtos/ProxyCredentialsProvider.java b/client/src/main/java/io/split/client/dtos/ProxyCredentialsProvider.java new file mode 100644 index 000000000..e1653d5f4 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/ProxyCredentialsProvider.java @@ -0,0 +1,4 @@ +package io.split.client.dtos; + +public interface ProxyCredentialsProvider +{} diff --git a/client/src/main/java/io/split/client/dtos/RequestContext.java b/client/src/main/java/io/split/client/dtos/RequestContext.java new file mode 100644 index 000000000..29fb7f77b --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/RequestContext.java @@ -0,0 +1,20 @@ +package io.split.client.dtos; + +import java.util.Map; +import java.util.List; +/** + * A structure returning a context for RequestDecorator class + */ + +public class RequestContext +{ + private final Map> _headers; + + public RequestContext(Map> headers) { + _headers = headers; + } + + public Map> headers() { + return _headers; + } +} diff --git a/client/src/main/java/io/split/client/dtos/RuleBasedSegment.java b/client/src/main/java/io/split/client/dtos/RuleBasedSegment.java new file mode 100644 index 000000000..56c4756de --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/RuleBasedSegment.java @@ -0,0 +1,37 @@ +package io.split.client.dtos; + +import java.util.ArrayList; +import java.util.List; + +public class RuleBasedSegment { + public String name; + public Status status; + public String trafficTypeName; + public long changeNumber; + public List conditions; + public Excluded excluded; + + @Override + public String toString() { + return "RuleBasedSegment{" + + "name='" + name + '\'' + + ", status=" + status + + ", trafficTypeName='" + trafficTypeName + '\'' + + ", changeNumber=" + changeNumber + '\'' + + excludedToString() + '\'' + + '}'; + } + + public String excludedToString() { + Excluded ts = excluded != null ? excluded : new Excluded(); + if (ts.keys == null) { + ts.keys = new ArrayList<>(); + } + + if (ts.segments == null) { + ts.segments = new ArrayList<>(); + } + + return ", excludedKeys=" + ts.keys + '\'' + ", excludedSegments=" + ts.segments; + } +} diff --git a/client/src/main/java/io/split/client/dtos/Split.java b/client/src/main/java/io/split/client/dtos/Split.java index 15e1a9457..1b9a01e38 100644 --- a/client/src/main/java/io/split/client/dtos/Split.java +++ b/client/src/main/java/io/split/client/dtos/Split.java @@ -1,5 +1,6 @@ package io.split.client.dtos; +import java.util.HashSet; import java.util.List; import java.util.Map; @@ -16,7 +17,9 @@ public class Split { public Integer trafficAllocationSeed; public int algo; public Map configurations; - + public HashSet sets; + public Boolean impressionsDisabled = null; + public List prerequisites; @Override public String toString() { diff --git a/client/src/main/java/io/split/client/dtos/SplitChange.java b/client/src/main/java/io/split/client/dtos/SplitChange.java index ba1130886..f3676bf75 100644 --- a/client/src/main/java/io/split/client/dtos/SplitChange.java +++ b/client/src/main/java/io/split/client/dtos/SplitChange.java @@ -1,9 +1,11 @@ package io.split.client.dtos; -import java.util.List; +import com.google.gson.annotations.SerializedName; public class SplitChange { - public List splits; - public long since; - public long till; + @SerializedName("ff") + public ChangeDto featureFlags; + @SerializedName("rbs") + public ChangeDto ruleBasedSegments; + public boolean clearCache; } diff --git a/client/src/main/java/io/split/client/dtos/SplitChangesOldPayloadDto.java b/client/src/main/java/io/split/client/dtos/SplitChangesOldPayloadDto.java new file mode 100644 index 000000000..aa292f918 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/SplitChangesOldPayloadDto.java @@ -0,0 +1,34 @@ +package io.split.client.dtos; + +import com.google.gson.annotations.SerializedName; + +import java.util.ArrayList; +import java.util.List; + +public class SplitChangesOldPayloadDto { + @SerializedName("since") + public long s; + + @SerializedName("till") + public long t; + + @SerializedName("splits") + public List d; + + public SplitChange toSplitChange() { + SplitChange splitChange = new SplitChange(); + ChangeDto ff = new ChangeDto<>(); + ff.s = this.s; + ff.t = this.t; + ff.d = this.d; + ChangeDto rbs = new ChangeDto<>(); + rbs.d = new ArrayList<>(); + rbs.t = -1; + rbs.s = -1; + + splitChange.featureFlags = ff; + splitChange.ruleBasedSegments = rbs; + + return splitChange; + } +} diff --git a/client/src/main/java/io/split/client/dtos/SplitHttpResponse.java b/client/src/main/java/io/split/client/dtos/SplitHttpResponse.java new file mode 100644 index 000000000..259ed0794 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/SplitHttpResponse.java @@ -0,0 +1,61 @@ +package io.split.client.dtos; + +import java.util.List; + +/** + * A structure for returning http call results information + */ +public class SplitHttpResponse { + private final Integer _statusCode; + private final String _statusMessage; + private final String _body; + private final Header[] _responseHeaders; + + public static class Header { + private String _name; + private List _values; + + public Header(String name, List values) { + _name = name; + _values = values; + } + + public String getName() { + return _name; + } + + public List getValues() { + return _values; + } + }; + + public SplitHttpResponse(Integer statusCode, String statusMessage, String body, Header[] headers) { + _statusCode = statusCode; + _statusMessage = statusMessage; + _body = body; + _responseHeaders = headers; + } + + public SplitHttpResponse(Integer statusCode, String statusMessage, String body, List

headers) { + _statusCode = statusCode; + _statusMessage = statusMessage; + _body = body; + _responseHeaders = headers.toArray(new Header[0]); + } + + public Integer statusCode() { + return _statusCode; + } + + public String statusMessage() { + return _statusMessage; + } + + public String body() { + return _body; + } + + public Header[] responseHeaders() { + return _responseHeaders; + } +} diff --git a/client/src/main/java/io/split/client/dtos/UniqueKeys.java b/client/src/main/java/io/split/client/dtos/UniqueKeys.java new file mode 100644 index 000000000..a0866a155 --- /dev/null +++ b/client/src/main/java/io/split/client/dtos/UniqueKeys.java @@ -0,0 +1,34 @@ +package io.split.client.dtos; + +import com.google.gson.annotations.SerializedName; + +import java.util.List; + +public class UniqueKeys { + + static final String KEYS = "keys"; + + @SerializedName(KEYS) + public List uniqueKeys; + + public UniqueKeys(List uniqueKeys) { + this.uniqueKeys = uniqueKeys; + } + + public static class UniqueKey { + static final String FEATURE = "f"; + + static final String FEATURE_KEYS = "ks"; + + @SerializedName(FEATURE) + public String featureName; + + @SerializedName(FEATURE_KEYS) + public List keysDto; + + public UniqueKey(String featureName, List keysDto) { + this.featureName = featureName; + this.keysDto = keysDto; + } + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/events/EventsSender.java b/client/src/main/java/io/split/client/events/EventsSender.java index 24a93459a..d83969dc8 100644 --- a/client/src/main/java/io/split/client/events/EventsSender.java +++ b/client/src/main/java/io/split/client/events/EventsSender.java @@ -1,37 +1,45 @@ package io.split.client.events; +import com.google.common.annotations.VisibleForTesting; import io.split.client.dtos.Event; +import io.split.client.utils.Utils; import io.split.service.HttpPostImp; -import io.split.telemetry.domain.enums.HTTPLatenciesEnum; +import io.split.service.SplitHttpClient; import io.split.telemetry.domain.enums.HttpParamsWrapper; -import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; -import io.split.telemetry.domain.enums.ResourceEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import java.net.URI; +import java.net.URISyntaxException; import java.util.List; -import static com.google.gson.internal.$Gson$Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkNotNull; public class EventsSender { - private final URI _endpoint; - private final CloseableHttpClient _client; + + private static final String BULK_ENDPOINT_PATH = "api/events/bulk"; + private final URI _bulkEndpoint; + private final SplitHttpClient _client; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; private final HttpPostImp _httpPostImp; - public static EventsSender create(CloseableHttpClient httpclient, URI eventsTarget, TelemetryRuntimeProducer telemetryRuntimeProducer) { - return new EventsSender(httpclient, eventsTarget, telemetryRuntimeProducer); + public static EventsSender create(SplitHttpClient splitHttpclient, URI eventsTarget, TelemetryRuntimeProducer telemetryRuntimeProducer) + throws URISyntaxException { + return new EventsSender(splitHttpclient, Utils.appendPath(eventsTarget, BULK_ENDPOINT_PATH), telemetryRuntimeProducer); } - EventsSender(CloseableHttpClient httpclient, URI eventsTarget, TelemetryRuntimeProducer telemetryRuntimeProducer) { - _client = checkNotNull(httpclient); - _endpoint = checkNotNull(eventsTarget); + EventsSender(SplitHttpClient splitHttpclient, URI eventsTarget, TelemetryRuntimeProducer telemetryRuntimeProducer) { + _client = splitHttpclient; + _bulkEndpoint = checkNotNull(eventsTarget); _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); - _httpPostImp = new HttpPostImp(httpclient, telemetryRuntimeProducer); + _httpPostImp = new HttpPostImp(_client, telemetryRuntimeProducer); } public void sendEvents(List _data) { - _httpPostImp.post(_endpoint, _data, "Events ", HttpParamsWrapper.EVENTS); + _httpPostImp.post(_bulkEndpoint, _data, "Events ", HttpParamsWrapper.EVENTS); + } + + @VisibleForTesting + URI getBulkEndpoint() { + return _bulkEndpoint; } } diff --git a/client/src/main/java/io/split/client/events/EventsStorageConsumer.java b/client/src/main/java/io/split/client/events/EventsStorageConsumer.java index 3e6b613ef..488be9d9b 100644 --- a/client/src/main/java/io/split/client/events/EventsStorageConsumer.java +++ b/client/src/main/java/io/split/client/events/EventsStorageConsumer.java @@ -1,5 +1,9 @@ package io.split.client.events; +import java.util.List; + public interface EventsStorageConsumer { WrappedEvent pop(); + List popAll(); + boolean isFull(); } diff --git a/client/src/main/java/io/split/client/events/EventsTask.java b/client/src/main/java/io/split/client/events/EventsTask.java index 9d3c5fc45..fb71a7241 100644 --- a/client/src/main/java/io/split/client/events/EventsTask.java +++ b/client/src/main/java/io/split/client/events/EventsTask.java @@ -1,26 +1,18 @@ package io.split.client.events; -import com.google.common.annotations.VisibleForTesting; +import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.split.client.dtos.Event; -import io.split.client.utils.Utils; -import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import io.split.client.utils.SplitExecutorFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import static java.lang.Thread.MIN_PRIORITY; import static com.google.common.base.Preconditions.checkNotNull; /** @@ -28,145 +20,71 @@ */ public class EventsTask{ - public static final Long MAX_SIZE_BYTES = 5 * 1024 * 1024L; - private final EventsStorageConsumer _eventsStorageConsumer; - private final EventsStorageProducer _eventsStorageProducer; private final EventsSender _eventsSender; - private final int _maxQueueSize; - private final long _flushIntervalMillis; - - private final ExecutorService _senderExecutor; - private final ExecutorService _consumerExecutor; + private final long _sendIntervalMillis; - private final ScheduledExecutorService _flushScheduler; - - static final Event SENTINEL = new Event(); + private final ScheduledExecutorService _senderScheduledExecutorService; private static final Logger _log = LoggerFactory.getLogger(EventsTask.class); - private final CloseableHttpClient _httpclient; - private final URI _target; - private final int _waitBeforeShutdown; - private final TelemetryRuntimeProducer _telemetryRuntimeProducer; - - ThreadFactory eventClientThreadFactory(final String name) { - return r -> new Thread(() -> { - Thread.currentThread().setPriority(MIN_PRIORITY); - r.run(); - }, name); - } - - public static EventsTask create(CloseableHttpClient httpclient, URI eventsRootTarget, int maxQueueSize, - long flushIntervalMillis, int waitBeforeShutdown, TelemetryRuntimeProducer telemetryRuntimeProducer, EventsStorageConsumer eventsStorageConsumer, EventsStorageProducer _eventsStorageProducer) throws URISyntaxException { + public static EventsTask create(long sendIntervalMillis, EventsStorageConsumer eventsStorageConsumer, EventsSender eventsSender, + ThreadFactory threadFactory) throws URISyntaxException { return new EventsTask(eventsStorageConsumer, - _eventsStorageProducer, - httpclient, - Utils.appendPath(eventsRootTarget, "api/events/bulk"), - maxQueueSize, - flushIntervalMillis, - waitBeforeShutdown, - telemetryRuntimeProducer); + sendIntervalMillis, + eventsSender, + threadFactory); } - EventsTask(EventsStorageConsumer eventsStorageConsumer, EventsStorageProducer eventsStorageProducer, CloseableHttpClient httpclient, URI target, int maxQueueSize, - long flushIntervalMillis, int waitBeforeShutdown, TelemetryRuntimeProducer telemetryRuntimeProducer) throws URISyntaxException { - - _httpclient = checkNotNull(httpclient); - - _target = checkNotNull(target); + EventsTask(EventsStorageConsumer eventsStorageConsumer, + long sendIntervalMillis, EventsSender eventsSender, ThreadFactory threadFactory) { _eventsStorageConsumer = checkNotNull(eventsStorageConsumer); - _eventsStorageProducer = checkNotNull(eventsStorageProducer); - _waitBeforeShutdown = waitBeforeShutdown; - - _maxQueueSize = maxQueueSize; - _flushIntervalMillis = flushIntervalMillis; - _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); - - _eventsSender = EventsSender.create(_httpclient, _target, _telemetryRuntimeProducer); - _senderExecutor = new ThreadPoolExecutor( - 1, - 1, - 0L, - TimeUnit.MILLISECONDS, - new LinkedBlockingQueue(50), - eventClientThreadFactory("eventclient-sender"), - (r, executor) -> _log.warn("Executor queue full. Dropping events.")); - - _consumerExecutor = Executors.newSingleThreadExecutor(eventClientThreadFactory("eventclient-consumer")); - _consumerExecutor.submit(runConsumer()); + _sendIntervalMillis = sendIntervalMillis; + _eventsSender = checkNotNull(eventsSender); + _senderScheduledExecutorService = SplitExecutorFactory.buildSingleThreadScheduledExecutor(threadFactory, "Sender-events-%d"); + } - _flushScheduler = Executors.newScheduledThreadPool(1, eventClientThreadFactory("eventclient-flush")); - _flushScheduler.scheduleAtFixedRate(() -> flush(), _flushIntervalMillis, _flushIntervalMillis, TimeUnit.MILLISECONDS); + ThreadFactory eventClientThreadFactory(final String name) { + return new ThreadFactoryBuilder() + .setDaemon(true) + .setNameFormat(name) + .build(); } - /** - * the existence of this message in the queue triggers a send event in the consumer thread. - */ - public void flush() { - _eventsStorageProducer.track(SENTINEL, 0); - } // SENTINEL event won't be queued, so no size needed. + public void start(){ + _senderScheduledExecutorService.scheduleWithFixedDelay(() -> { + try { + sendEvents(); + } catch (Exception e) { + _log.error("Error executing Event Action", e); + } + }, _sendIntervalMillis, _sendIntervalMillis, TimeUnit.MILLISECONDS); + } public void close() { try { - _consumerExecutor.shutdownNow(); - _flushScheduler.shutdownNow(); - _senderExecutor.awaitTermination(_waitBeforeShutdown, TimeUnit.MILLISECONDS); + sendEvents(); + _senderScheduledExecutorService.shutdown(); } catch (Exception e) { _log.warn("Error when shutting down EventClientImpl", e); } } - /** - * Infinite loop that listens to event from the event queue, dequeue them and send them over once: - * - a CENTINEL message has arrived, or - * - the queue reached a specific size - * - */ - - private Runnable runConsumer() { - Runnable runnable = () -> { - List events = new ArrayList<>(); - long accumulated = 0; - while (!Thread.currentThread().isInterrupted()) { - WrappedEvent data = _eventsStorageConsumer.pop(); - Event event = data.event(); - Long size = data.size(); - - if (event != SENTINEL) { - events.add(event); - accumulated += size; - } else if (events.size() < 1) { - - if (_log.isDebugEnabled()) { - _log.debug("No messages to publish."); - } - - continue; - } - if (events.size() >= _maxQueueSize || accumulated >= MAX_SIZE_BYTES || event == SENTINEL) { - - // Send over the network - if (_log.isDebugEnabled()) { - _log.debug(String.format("Sending %d events", events.size())); - } - - // Dispatch - List finalEvents = events; //This is to be able to handle events on Runnable. - Runnable r = () -> _eventsSender.sendEvents(finalEvents); - _senderExecutor.submit(r); + void sendEvents(){ + if (_eventsStorageConsumer.isFull()) { + _log.warn("Split SDK events queue is full. Events may have been dropped. Consider increasing capacity."); + } - // Clear the queue of events for the next batch. - events = new ArrayList<>(); - accumulated = 0; - } - } - }; - return runnable; - } + List wrappedEventList = _eventsStorageConsumer.popAll(); + List eventsToSend = new ArrayList<>(); + for (WrappedEvent wrappedEvent: wrappedEventList){ + Event event = wrappedEvent.event(); + eventsToSend.add(event); + } - @VisibleForTesting - URI getTarget() { - return _target ; + if (eventsToSend.isEmpty()){ + return; + } + _eventsSender.sendEvents(eventsToSend); } } \ No newline at end of file diff --git a/client/src/main/java/io/split/client/events/InMemoryEventsStorage.java b/client/src/main/java/io/split/client/events/InMemoryEventsStorage.java index 2d2aa9b1e..a3463f0ed 100644 --- a/client/src/main/java/io/split/client/events/InMemoryEventsStorage.java +++ b/client/src/main/java/io/split/client/events/InMemoryEventsStorage.java @@ -1,6 +1,5 @@ package io.split.client.events; - import com.google.common.annotations.VisibleForTesting; import io.split.client.dtos.Event; import io.split.telemetry.domain.enums.EventsDataRecordsEnum; @@ -8,10 +7,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; -import static com.google.gson.internal.$Gson$Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkNotNull; public class InMemoryEventsStorage implements EventsStorage{ @@ -36,6 +37,18 @@ public WrappedEvent pop() { return null; } + @Override + public List popAll() { + ArrayList popped = new ArrayList<>(); + _eventQueue.drainTo(popped); + return popped; + } + + @Override + public boolean isFull() { + return _eventQueue.remainingCapacity() == 0; + } + @Override public boolean track(Event event, int eventSize) { try { diff --git a/client/src/main/java/io/split/client/events/NoopEventsStorageImp.java b/client/src/main/java/io/split/client/events/NoopEventsStorageImp.java index 27f20225e..5e6eb99eb 100644 --- a/client/src/main/java/io/split/client/events/NoopEventsStorageImp.java +++ b/client/src/main/java/io/split/client/events/NoopEventsStorageImp.java @@ -3,6 +3,9 @@ import io.split.client.dtos.Event; +import java.util.ArrayList; +import java.util.List; + public class NoopEventsStorageImp implements EventsStorage { @Override @@ -18,4 +21,15 @@ public static NoopEventsStorageImp create() { public WrappedEvent pop() { return new WrappedEvent(new Event(), 0l); } + + @Override + public List popAll() { + //no-op + return new ArrayList<>(); + } + + @Override + public boolean isFull() { + return false; + } } diff --git a/client/src/main/java/io/split/client/exceptions/InputStreamProviderException.java b/client/src/main/java/io/split/client/exceptions/InputStreamProviderException.java new file mode 100644 index 000000000..33fba556f --- /dev/null +++ b/client/src/main/java/io/split/client/exceptions/InputStreamProviderException.java @@ -0,0 +1,8 @@ +package io.split.client.exceptions; + +public class InputStreamProviderException extends Exception { + + public InputStreamProviderException(String message) { + super(message); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/exceptions/SemverParseException.java b/client/src/main/java/io/split/client/exceptions/SemverParseException.java new file mode 100644 index 000000000..892ba7535 --- /dev/null +++ b/client/src/main/java/io/split/client/exceptions/SemverParseException.java @@ -0,0 +1,7 @@ +package io.split.client.exceptions; + +public class SemverParseException extends Exception { + public SemverParseException(String message) { + super(message); + } +} diff --git a/client/src/main/java/io/split/client/exceptions/UriTooLongException.java b/client/src/main/java/io/split/client/exceptions/UriTooLongException.java new file mode 100644 index 000000000..401838528 --- /dev/null +++ b/client/src/main/java/io/split/client/exceptions/UriTooLongException.java @@ -0,0 +1,7 @@ +package io.split.client.exceptions; + +public class UriTooLongException extends Exception { + public UriTooLongException (String message) { + super(message); + } +} diff --git a/client/src/main/java/io/split/client/impressions/HttpImpressionsSender.java b/client/src/main/java/io/split/client/impressions/HttpImpressionsSender.java index 017ef45a3..7c346a904 100644 --- a/client/src/main/java/io/split/client/impressions/HttpImpressionsSender.java +++ b/client/src/main/java/io/split/client/impressions/HttpImpressionsSender.java @@ -2,17 +2,16 @@ import com.google.common.annotations.VisibleForTesting; import io.split.client.dtos.ImpressionCount; +import io.split.client.dtos.SplitHttpResponse; import io.split.client.dtos.TestImpressions; +import io.split.client.utils.Json; import io.split.client.utils.Utils; +import io.split.service.SplitHttpClient; import io.split.telemetry.domain.enums.HTTPLatenciesEnum; import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; import io.split.telemetry.domain.enums.ResourceEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.classic.methods.HttpPost; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; -import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; -import org.apache.hc.core5.http.HttpEntity; import org.apache.hc.core5.http.HttpStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -20,8 +19,10 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Map; import static com.google.common.base.Preconditions.checkNotNull; @@ -36,13 +37,15 @@ public class HttpImpressionsSender implements ImpressionsSender { private static final Logger _logger = LoggerFactory.getLogger(HttpImpressionsSender.class); - private final CloseableHttpClient _client; + private final SplitHttpClient _client; private final URI _impressionBulkTarget; private final URI _impressionCountTarget; private final ImpressionsManager.Mode _mode; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; - public static HttpImpressionsSender create(CloseableHttpClient client, URI eventsRootEndpoint, ImpressionsManager.Mode mode, TelemetryRuntimeProducer telemetryRuntimeProducer) throws URISyntaxException { + public static HttpImpressionsSender create(SplitHttpClient client, URI eventsRootEndpoint, + ImpressionsManager.Mode mode, + TelemetryRuntimeProducer telemetryRuntimeProducer) throws URISyntaxException { return new HttpImpressionsSender(client, Utils.appendPath(eventsRootEndpoint, BULK_ENDPOINT_PATH), Utils.appendPath(eventsRootEndpoint, COUNT_ENDPOINT_PATH), @@ -50,7 +53,9 @@ public static HttpImpressionsSender create(CloseableHttpClient client, URI event telemetryRuntimeProducer); } - private HttpImpressionsSender(CloseableHttpClient client, URI impressionBulkTarget, URI impressionCountTarget, ImpressionsManager.Mode mode, TelemetryRuntimeProducer telemetryRuntimeProducer) { + private HttpImpressionsSender(SplitHttpClient client, URI impressionBulkTarget, URI impressionCountTarget, + ImpressionsManager.Mode mode, + TelemetryRuntimeProducer telemetryRuntimeProducer) { _client = client; _mode = mode; _impressionBulkTarget = impressionBulkTarget; @@ -60,53 +65,48 @@ private HttpImpressionsSender(CloseableHttpClient client, URI impressionBulkTarg @Override public void postImpressionsBulk(List impressions) { - - CloseableHttpResponse response = null; long initTime = System.currentTimeMillis(); try { - HttpEntity entity = Utils.toJsonEntity(impressions); - - HttpPost request = new HttpPost(_impressionBulkTarget); - request.addHeader(IMPRESSIONS_MODE_HEADER, _mode.toString()); - request.setEntity(entity); + Map> additionalHeaders = new HashMap<>(); + additionalHeaders.put(IMPRESSIONS_MODE_HEADER, Collections.singletonList(_mode.toString())); + additionalHeaders.put("Content-Type", Collections.singletonList("application/json")); - response = _client.execute(request); + SplitHttpResponse response = _client.post(_impressionBulkTarget, Json.toJson(impressions), + additionalHeaders); - int status = response.getCode(); - - if (status < HttpStatus.SC_OK || status >= HttpStatus.SC_MULTIPLE_CHOICES) { - _telemetryRuntimeProducer.recordSyncError(ResourceEnum.IMPRESSION_SYNC, status); - _logger.warn("Response status was: " + status); + if (response.statusCode() < HttpStatus.SC_OK || response.statusCode() >= HttpStatus.SC_MULTIPLE_CHOICES) { + _telemetryRuntimeProducer.recordSyncError(ResourceEnum.IMPRESSION_SYNC, response.statusCode()); } - _telemetryRuntimeProducer.recordSuccessfulSync(LastSynchronizationRecordsEnum.IMPRESSIONS, System.currentTimeMillis()); + _telemetryRuntimeProducer.recordSuccessfulSync(LastSynchronizationRecordsEnum.IMPRESSIONS, + System.currentTimeMillis()); } catch (Throwable t) { _logger.warn("Exception when posting impressions" + impressions, t); } finally { - _telemetryRuntimeProducer.recordSyncLatency(HTTPLatenciesEnum.IMPRESSIONS, System.currentTimeMillis() - initTime); - Utils.forceClose(response); + _telemetryRuntimeProducer.recordSyncLatency(HTTPLatenciesEnum.IMPRESSIONS, + System.currentTimeMillis() - initTime); } - } @Override public void postCounters(HashMap raw) { long initTime = System.currentTimeMillis(); - if (_mode.equals(ImpressionsManager.Mode.DEBUG)) { - _logger.warn("Attempted to submit counters in impressions debugging mode. Ignoring"); - return; - } + try { + + Map> additionalHeaders = new HashMap<>(); + additionalHeaders.put("Content-Type", Collections.singletonList("application/json")); + + SplitHttpResponse response = _client.post(_impressionCountTarget, + Json.toJson(ImpressionCount.fromImpressionCounterData(raw)), + null); - HttpPost request = new HttpPost(_impressionCountTarget); - request.setEntity(Utils.toJsonEntity(ImpressionCount.fromImpressionCounterData(raw))); - try (CloseableHttpResponse response = _client.execute(request)) { - int status = response.getCode(); - if (status < HttpStatus.SC_OK || status >= HttpStatus.SC_MULTIPLE_CHOICES) { - _telemetryRuntimeProducer.recordSyncError(ResourceEnum.IMPRESSION_COUNT_SYNC, status); - _logger.warn("Response status was: " + status); + if (response.statusCode() < HttpStatus.SC_OK || response.statusCode() >= HttpStatus.SC_MULTIPLE_CHOICES) { + _telemetryRuntimeProducer.recordSyncError(ResourceEnum.IMPRESSION_COUNT_SYNC, response.statusCode()); } - _telemetryRuntimeProducer.recordSyncLatency(HTTPLatenciesEnum.IMPRESSIONS_COUNT, System.currentTimeMillis() - initTime); - _telemetryRuntimeProducer.recordSuccessfulSync(LastSynchronizationRecordsEnum.IMPRESSIONS_COUNT, System.currentTimeMillis()); + _telemetryRuntimeProducer.recordSyncLatency(HTTPLatenciesEnum.IMPRESSIONS_COUNT, + System.currentTimeMillis() - initTime); + _telemetryRuntimeProducer.recordSuccessfulSync(LastSynchronizationRecordsEnum.IMPRESSIONS_COUNT, + System.currentTimeMillis()); } catch (IOException exc) { _logger.warn("Exception when posting impression counters: ", exc); } diff --git a/client/src/main/java/io/split/client/impressions/Impression.java b/client/src/main/java/io/split/client/impressions/Impression.java index fdabb5539..fc7b73141 100644 --- a/client/src/main/java/io/split/client/impressions/Impression.java +++ b/client/src/main/java/io/split/client/impressions/Impression.java @@ -16,17 +16,20 @@ public class Impression { private final Long _changeNumber; private Long _pt; private final Map _attributes; + private final String _properties; - public Impression(String key, String bucketingKey, String split, String treatment, long time, String appliedRule, Long changeNumber, Map atributes) { + public Impression(String key, String bucketingKey, String featureFlag, String treatment, long time, String appliedRule, + Long changeNumber, Map atributes, String properties) { _key = key; _bucketingKey = bucketingKey; - _split = split; + _split = featureFlag; _treatment = treatment; _time = time; _appliedRule = appliedRule; _changeNumber = changeNumber; _attributes = atributes; + _properties = properties; } public String key() { @@ -66,4 +69,8 @@ public Long pt() { } public Impression withPreviousTime(Long pt) { _pt = pt; return this; } + + public String properties() { + return _properties; + } } diff --git a/client/src/main/java/io/split/client/impressions/ImpressionCounter.java b/client/src/main/java/io/split/client/impressions/ImpressionCounter.java index ca1d5ff7a..381177a89 100644 --- a/client/src/main/java/io/split/client/impressions/ImpressionCounter.java +++ b/client/src/main/java/io/split/client/impressions/ImpressionCounter.java @@ -13,12 +13,12 @@ public static class Key { private final String _featureName; private final long _timeFrame; - public Key(String featureName, long timeframe) { - _featureName = checkNotNull(featureName); + public Key(String featureFlagName, long timeframe) { + _featureName = checkNotNull(featureFlagName); _timeFrame = timeframe; } - public String featureName() { return _featureName; } + public String featureName() { return _featureName; } public long timeFrame() { return _timeFrame; } @Override @@ -43,8 +43,8 @@ public ImpressionCounter() { _counts = new ConcurrentHashMap<>(); } - public void inc(String featureName, long timeFrame, int amount) { - Key key = new Key(featureName, ImpressionUtils.truncateTimeframe(timeFrame)); + public void inc(String featureFlagName, long timeFrame, int amount) { + Key key = new Key(featureFlagName, ImpressionUtils.truncateTimeframe(timeFrame)); AtomicInteger count = _counts.get(key); if (Objects.isNull(count)) { count = new AtomicInteger(); @@ -66,4 +66,4 @@ public HashMap popAll() { } public boolean isEmpty() { return _counts.isEmpty(); } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/impressions/ImpressionListener.java b/client/src/main/java/io/split/client/impressions/ImpressionListener.java index 3726b598d..dc15511fb 100644 --- a/client/src/main/java/io/split/client/impressions/ImpressionListener.java +++ b/client/src/main/java/io/split/client/impressions/ImpressionListener.java @@ -21,18 +21,6 @@ public interface ImpressionListener { */ void close(); - final class NoopImpressionListener implements ImpressionListener { - @Override - public void log(Impression impression) { - // noop - } - - @Override - public void close() { - // noop - } - } - final class FederatedImpressionListener implements ImpressionListener { private List _delegates; diff --git a/client/src/main/java/io/split/client/impressions/ImpressionsManager.java b/client/src/main/java/io/split/client/impressions/ImpressionsManager.java index ac1f8a9b4..ce7c62011 100644 --- a/client/src/main/java/io/split/client/impressions/ImpressionsManager.java +++ b/client/src/main/java/io/split/client/impressions/ImpressionsManager.java @@ -1,19 +1,34 @@ package io.split.client.impressions; +import io.split.client.dtos.DecoratedImpression; + import java.util.List; public interface ImpressionsManager { public enum Mode { OPTIMIZED, - DEBUG + DEBUG, + NONE } - void track(List impressions); + void track(List decoratedImpressions); + void start(); + void close(); final class NoOpImpressionsManager implements ImpressionsManager { @Override - public void track(List impressions) { /* do nothing */ } + public void track(List decoratedImpressions) { /* do nothing */ } + + @Override + public void start(){ + /* do nothing */ + } + + @Override + public void close() { + /* do nothing */ + } } } diff --git a/client/src/main/java/io/split/client/impressions/ImpressionsManagerImpl.java b/client/src/main/java/io/split/client/impressions/ImpressionsManagerImpl.java index ed23b337a..3b784abaf 100644 --- a/client/src/main/java/io/split/client/impressions/ImpressionsManagerImpl.java +++ b/client/src/main/java/io/split/client/impressions/ImpressionsManagerImpl.java @@ -1,28 +1,27 @@ package io.split.client.impressions; import com.google.common.annotations.VisibleForTesting; -import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.split.client.SplitClientConfig; +import io.split.client.dtos.DecoratedImpression; import io.split.client.dtos.KeyImpression; import io.split.client.dtos.TestImpressions; -import io.split.storages.enums.OperationMode; +import io.split.client.impressions.strategy.ProcessImpressionNone; +import io.split.client.impressions.strategy.ProcessImpressionStrategy; +import io.split.client.utils.SplitExecutorFactory; import io.split.telemetry.domain.enums.ImpressionsDataTypeEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; -import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.Objects; -import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import java.util.stream.Stream; import static com.google.common.base.Preconditions.checkNotNull; @@ -36,109 +35,140 @@ public class ImpressionsManagerImpl implements ImpressionsManager, Closeable { private static final long BULK_INITIAL_DELAY_SECONDS = 10L; private static final long COUNT_INITIAL_DELAY_SECONDS = 100L; private static final long COUNT_REFRESH_RATE_SECONDS = 30 * 60; - private static final long LAST_SEEN_CACHE_SIZE = 500000; // cache up to 500k impression hashes - private final SplitClientConfig _config; private final ImpressionsStorageProducer _impressionsStorageProducer; private final ImpressionsStorageConsumer _impressionsStorageConsumer; private final ScheduledExecutorService _scheduler; private final ImpressionsSender _impressionsSender; - private final ImpressionObserver _impressionObserver; - private final ImpressionCounter _counter; private final ImpressionListener _listener; - private final ImpressionsManager.Mode _mode; - private final TelemetryRuntimeProducer _telemetryRuntimeProducer; - private final boolean _addPreviousTimeEnabled; - private final boolean _isOptimized; - private final OperationMode _operationMode; - - public static ImpressionsManagerImpl instance(CloseableHttpClient client, - SplitClientConfig config, - List listeners, + private final ImpressionsManager.Mode _impressionsMode; + private TelemetryRuntimeProducer _telemetryRuntimeProducer; + private ImpressionCounter _counter; + private ProcessImpressionStrategy _processImpressionStrategy; + private ProcessImpressionNone _processImpressionNone; + + private final int _impressionsRefreshRate; + + public static ImpressionsManagerImpl instance(SplitClientConfig config, TelemetryRuntimeProducer telemetryRuntimeProducer, ImpressionsStorageConsumer impressionsStorageConsumer, - ImpressionsStorageProducer impressionsStorageProducer) throws URISyntaxException { - return new ImpressionsManagerImpl(client, config, null, listeners, telemetryRuntimeProducer, impressionsStorageConsumer, impressionsStorageProducer); + ImpressionsStorageProducer impressionsStorageProducer, + ImpressionsSender impressionsSender, + ProcessImpressionNone processImpressionNone, + ProcessImpressionStrategy processImpressionStrategy, + ImpressionCounter counter, + ImpressionListener listener) throws URISyntaxException { + return new ImpressionsManagerImpl(config, impressionsSender, telemetryRuntimeProducer, impressionsStorageConsumer, + impressionsStorageProducer, processImpressionNone, processImpressionStrategy, counter, listener); } - public static ImpressionsManagerImpl instanceForTest(CloseableHttpClient client, - SplitClientConfig config, + public static ImpressionsManagerImpl instanceForTest(SplitClientConfig config, ImpressionsSender impressionsSender, - List listeners, TelemetryRuntimeProducer telemetryRuntimeProducer, ImpressionsStorageConsumer impressionsStorageConsumer, - ImpressionsStorageProducer impressionsStorageProducer) throws URISyntaxException { - return new ImpressionsManagerImpl(client, config, impressionsSender, listeners, telemetryRuntimeProducer, impressionsStorageConsumer, impressionsStorageProducer); + ImpressionsStorageProducer impressionsStorageProducer, + ProcessImpressionNone processImpressionNone, + ProcessImpressionStrategy processImpressionStrategy, + ImpressionCounter counter, + ImpressionListener listener) { + return new ImpressionsManagerImpl(config, impressionsSender, telemetryRuntimeProducer, impressionsStorageConsumer, + impressionsStorageProducer, processImpressionNone, processImpressionStrategy, counter, listener); } - private ImpressionsManagerImpl(CloseableHttpClient client, - SplitClientConfig config, + private ImpressionsManagerImpl(SplitClientConfig config, ImpressionsSender impressionsSender, - List listeners, TelemetryRuntimeProducer telemetryRuntimeProducer, ImpressionsStorageConsumer impressionsStorageConsumer, - ImpressionsStorageProducer impressionsStorageProducer) throws URISyntaxException { + ImpressionsStorageProducer impressionsStorageProducer, + ProcessImpressionNone processImpressionNone, + ProcessImpressionStrategy processImpressionStrategy, + ImpressionCounter impressionCounter, + ImpressionListener impressionListener) { _config = checkNotNull(config); - _mode = checkNotNull(config.impressionsMode()); - _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); + _impressionsMode = checkNotNull(config.impressionsMode()); _impressionsStorageConsumer = checkNotNull(impressionsStorageConsumer); _impressionsStorageProducer = checkNotNull(impressionsStorageProducer); - _impressionObserver = new ImpressionObserver(LAST_SEEN_CACHE_SIZE); - _impressionsSender = (null != impressionsSender) ? impressionsSender - : HttpImpressionsSender.create(client, URI.create(config.eventsEndpoint()), _mode, telemetryRuntimeProducer); - - _scheduler = buildExecutor(); - _scheduler.scheduleAtFixedRate(this::sendImpressions, BULK_INITIAL_DELAY_SECONDS, config.impressionsRefreshRate(), TimeUnit.SECONDS); - - _listener = (null != listeners && !listeners.isEmpty()) ? new ImpressionListener.FederatedImpressionListener(listeners) - : new ImpressionListener.NoopImpressionListener(); - - _operationMode = config.operationMode(); - _addPreviousTimeEnabled = shouldAddPreviousTime(); - _counter = _addPreviousTimeEnabled ? new ImpressionCounter() : null; - _isOptimized = _counter != null && shouldBeOptimized(); - if (_isOptimized) { - _scheduler.scheduleAtFixedRate(this::sendImpressionCounters, COUNT_INITIAL_DELAY_SECONDS, COUNT_REFRESH_RATE_SECONDS, TimeUnit.SECONDS); - } + _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); + _processImpressionNone = checkNotNull(processImpressionNone); + _processImpressionStrategy = checkNotNull(processImpressionStrategy); + _impressionsSender = impressionsSender; + _counter = impressionCounter; + + _scheduler = SplitExecutorFactory.buildScheduledExecutorService(config.getThreadFactory(), "Split-ImpressionsManager-%d", 2); + _listener = impressionListener; + + _impressionsRefreshRate = config.impressionsRefreshRate(); } - private boolean shouldQueueImpression(Impression i) { - return Objects.isNull(i.pt()) || - ImpressionUtils.truncateTimeframe(i.pt()) != ImpressionUtils.truncateTimeframe(i.time()); + @Override + public void start(){ + switch (_impressionsMode){ + case OPTIMIZED: + _scheduler.scheduleAtFixedRate(this::sendImpressionCounters, COUNT_INITIAL_DELAY_SECONDS, COUNT_REFRESH_RATE_SECONDS, + TimeUnit.SECONDS); + _scheduler.scheduleAtFixedRate(this::sendImpressions, BULK_INITIAL_DELAY_SECONDS, _impressionsRefreshRate, TimeUnit.SECONDS); + break; + case DEBUG: + _scheduler.scheduleAtFixedRate(this::sendImpressions, BULK_INITIAL_DELAY_SECONDS, _impressionsRefreshRate, TimeUnit.SECONDS); + _scheduler.scheduleAtFixedRate(this::sendImpressionCounters, COUNT_INITIAL_DELAY_SECONDS, COUNT_REFRESH_RATE_SECONDS, + TimeUnit.SECONDS); + break; + case NONE: + _scheduler.scheduleAtFixedRate(this::sendImpressionCounters, COUNT_INITIAL_DELAY_SECONDS, COUNT_REFRESH_RATE_SECONDS, + TimeUnit.SECONDS); + break; + } } @Override - public void track(List impressions) { - if (null == impressions) { + public void track(List decoratedImpressions) { + if (null == decoratedImpressions) { return; } - int totalImpressions = impressions.size(); - - impressions = processImpressions(impressions); - - if (totalImpressions > impressions.size()) { - _telemetryRuntimeProducer.recordImpressionStats(ImpressionsDataTypeEnum.IMPRESSIONS_DEDUPED, totalImpressions-impressions.size()); - totalImpressions = impressions.size(); + List impressionsForLogs = new ArrayList<>(); + List impressionsToListener = new ArrayList<>(); + + for (int i = 0; i < decoratedImpressions.size(); i++) { + ImpressionsResult impressionsResult; + if (!decoratedImpressions.get(i).disabled()) { + impressionsResult = _processImpressionStrategy.process(Stream.of( + decoratedImpressions.get(i).impression()).collect(Collectors.toList())); + } else { + impressionsResult = _processImpressionNone.process(Stream.of( + decoratedImpressions.get(i).impression()).collect(Collectors.toList())); + } + if (!Objects.isNull(impressionsResult.getImpressionsToQueue())) { + impressionsForLogs.addAll(impressionsResult.getImpressionsToQueue()); + } + if (!Objects.isNull(impressionsResult.getImpressionsToListener())) + impressionsToListener.addAll(impressionsResult.getImpressionsToListener()); } - long queued = _impressionsStorageProducer.put(impressions.stream().map(KeyImpression::fromImpression).collect(Collectors.toList())); + int totalImpressions = impressionsForLogs.size(); + long queued = _impressionsStorageProducer.put(impressionsForLogs.stream().map(KeyImpression::fromImpression).collect(Collectors.toList())); if (queued < totalImpressions) { _telemetryRuntimeProducer.recordImpressionStats(ImpressionsDataTypeEnum.IMPRESSIONS_DROPPED, totalImpressions-queued); } _telemetryRuntimeProducer.recordImpressionStats(ImpressionsDataTypeEnum.IMPRESSIONS_QUEUED, queued); - impressions.forEach(imp -> _listener.log(imp)); + if (_listener!=null){ + impressionsToListener.forEach(imp -> _listener.log(imp)); + } } @Override public void close() { try { - _listener.close(); - _log.info("Successful shutdown of ImpressionListener"); + if(_listener!= null){ + _listener.close(); + _log.info("Successful shutdown of ImpressionListener"); + } _scheduler.shutdown(); sendImpressions(); - _scheduler.awaitTermination(_config.waitBeforeShutdown(), TimeUnit.MILLISECONDS); + if(_counter != null) { + sendImpressionCounters(); + } } catch (Exception e) { _log.warn("Unable to close ImpressionsManager properly", e); } @@ -171,62 +201,8 @@ public void close() { } } - private ScheduledExecutorService buildExecutor() { - ThreadFactory threadFactory = new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat("Split-ImpressionsManager-%d") - .build(); - return Executors.newScheduledThreadPool(2, threadFactory); - } - - - - private boolean shouldAddPreviousTime() { - switch (_operationMode) { - case STANDALONE: - return true; - default: - return false; - } - } - - private boolean shouldBeOptimized() { - if(!_addPreviousTimeEnabled) - return false; - switch (_mode) { - case OPTIMIZED: - return true; - default: - return false; - } - } - @VisibleForTesting /* package private */ ImpressionCounter getCounter() { return _counter; } - - /** - * Filter in case of deduping and format impressions to let them ready to be sent. - * @param impressions - * @return - */ - private List processImpressions(List impressions) { - if(!_addPreviousTimeEnabled) { //Only STANDALONE Mode needs to iterate over impressions to add previous time. - return impressions; - } - - List impressionsToQueue = new ArrayList<>(); - for(Impression impression : impressions) { - impression = impression.withPreviousTime(_impressionObserver.testAndSet(impression)); - if (_isOptimized) { - _counter.inc(impression.split(), impression.time(), 1); - if(!shouldQueueImpression(impression)) { - continue; - } - } - impressionsToQueue.add(impression); - } - return impressionsToQueue; - } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/impressions/ImpressionsResult.java b/client/src/main/java/io/split/client/impressions/ImpressionsResult.java new file mode 100644 index 000000000..1467ebb76 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/ImpressionsResult.java @@ -0,0 +1,22 @@ +package io.split.client.impressions; + +import java.util.List; + +public class ImpressionsResult { + + private List impressionsToListener; + private List impressionsToQueue; + + public ImpressionsResult(List impressionsForLogs, List impressionsToListener) { + this.impressionsToListener = impressionsToListener; + this.impressionsToQueue = impressionsForLogs; + } + + public List getImpressionsToQueue() { + return impressionsToQueue; + } + + public List getImpressionsToListener() { + return impressionsToListener; + } +} diff --git a/client/src/main/java/io/split/client/impressions/PluggableImpressionSender.java b/client/src/main/java/io/split/client/impressions/PluggableImpressionSender.java new file mode 100644 index 000000000..2162f3030 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/PluggableImpressionSender.java @@ -0,0 +1,48 @@ +package io.split.client.impressions; + +import io.split.client.dtos.TestImpressions; +import io.split.storages.pluggable.domain.PrefixAdapter; +import io.split.storages.pluggable.domain.UserPipelineWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import pluggable.CustomStorageWrapper; + +import java.util.HashMap; +import java.util.List; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class PluggableImpressionSender implements ImpressionsSender{ + + private final UserStorageWrapper _userStorageWrapper; + + private static final Logger _logger = LoggerFactory.getLogger(PluggableImpressionSender.class); + + public static PluggableImpressionSender create(CustomStorageWrapper customStorageWrapper){ + return new PluggableImpressionSender(customStorageWrapper); + } + + private PluggableImpressionSender(CustomStorageWrapper customStorageWrapper) { + this._userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); + } + + @Override + public void postImpressionsBulk(List impressions) { + //No-Op + } + + @Override + public void postCounters(HashMap counts) { + try { + UserPipelineWrapper pipelineExecution = _userStorageWrapper.pipeline(); + for(ImpressionCounter.Key countsKey: counts.keySet()){ + String key = PrefixAdapter.buildImpressionsCount(); + pipelineExecution.hIncrement(key, countsKey.featureName() + "::" + countsKey.timeFrame(), counts.get(countsKey)); + } + pipelineExecution.exec(); + } catch (Exception e){ + _logger.warn("Redis pipeline exception when posting counters: ", e); + } + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/impressions/UniqueKeysTracker.java b/client/src/main/java/io/split/client/impressions/UniqueKeysTracker.java new file mode 100644 index 000000000..6d813d8a2 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/UniqueKeysTracker.java @@ -0,0 +1,8 @@ +package io.split.client.impressions; + +public interface UniqueKeysTracker { + + boolean track (String featureName, String key); + void start(); + void stop(); +} diff --git a/client/src/main/java/io/split/client/impressions/UniqueKeysTrackerImp.java b/client/src/main/java/io/split/client/impressions/UniqueKeysTrackerImp.java new file mode 100644 index 000000000..c0034b6b2 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/UniqueKeysTrackerImp.java @@ -0,0 +1,207 @@ +package io.split.client.impressions; + +import com.google.common.collect.Lists; +import io.split.client.dtos.UniqueKeys; +import io.split.client.impressions.filters.BloomFilterImp; +import io.split.client.impressions.filters.Filter; +import io.split.client.impressions.filters.FilterAdapter; +import io.split.client.impressions.filters.FilterAdapterImpl; +import io.split.client.utils.SplitExecutorFactory; +import io.split.telemetry.synchronizer.TelemetrySynchronizer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +public class UniqueKeysTrackerImp implements UniqueKeysTracker{ + private static final Logger _log = LoggerFactory.getLogger(UniqueKeysTrackerImp.class); + private static final double MARGIN_ERROR = 0.01; + private static final int MAX_UNIQUE_KEYS_POST_SIZE = 5000; + private static final int MAX_AMOUNT_OF_KEYS = 10000000; + private final AtomicInteger trackerKeysSize = new AtomicInteger(0); + private FilterAdapter filterAdapter; + private final TelemetrySynchronizer _telemetrySynchronizer; + private final ScheduledExecutorService _uniqueKeysSyncScheduledExecutorService; + private final ScheduledExecutorService _cleanFilterScheduledExecutorService; + private final ConcurrentHashMap> uniqueKeysTracker; + private final int _uniqueKeysRefreshRate; + private final int _filterRefreshRate; + private final AtomicBoolean sendGuard = new AtomicBoolean(false); + private static final Logger _logger = LoggerFactory.getLogger(UniqueKeysTrackerImp.class); + + public UniqueKeysTrackerImp(TelemetrySynchronizer telemetrySynchronizer, int uniqueKeysRefreshRate, int filterRefreshRate, + ThreadFactory threadFactory) { + Filter bloomFilter = new BloomFilterImp(MAX_AMOUNT_OF_KEYS, MARGIN_ERROR); + this.filterAdapter = new FilterAdapterImpl(bloomFilter); + uniqueKeysTracker = new ConcurrentHashMap<>(); + _telemetrySynchronizer = telemetrySynchronizer; + _uniqueKeysRefreshRate = uniqueKeysRefreshRate; + _filterRefreshRate = filterRefreshRate; + _uniqueKeysSyncScheduledExecutorService = SplitExecutorFactory.buildSingleThreadScheduledExecutor(threadFactory,"UniqueKeys-sync-%d"); + _cleanFilterScheduledExecutorService = SplitExecutorFactory.buildSingleThreadScheduledExecutor(threadFactory,"Filter-%d"); + } + + @Override + public boolean track(String featureFlagName, String key) { + if (!filterAdapter.add(featureFlagName, key)) { + _logger.debug("The feature flag " + featureFlagName + " and key " + key + " exist in the UniqueKeysTracker"); + return false; + } + uniqueKeysTracker.compute(featureFlagName, + (feature, current) -> { + HashSet keysByFeature = Optional.ofNullable(current).orElse(new HashSet<>()); + keysByFeature.add(key); + trackerKeysSize.incrementAndGet(); + return keysByFeature; + }); + _logger.debug("The feature flag " + featureFlagName + " and key " + key + " was added"); + if (trackerKeysSize.intValue() >= MAX_UNIQUE_KEYS_POST_SIZE){ + _logger.warn("The UniqueKeysTracker size reached the maximum limit"); + try { + sendUniqueKeys(); + } catch (Exception e) { + _log.error("Error sending unique keys.", e); + } + } + return true; + } + + @Override + public void start() { + scheduleWithFixedDelay(_uniqueKeysSyncScheduledExecutorService, _uniqueKeysRefreshRate, new ExecuteSendUniqueKeys()); + scheduleWithFixedDelay(_cleanFilterScheduledExecutorService, _filterRefreshRate, new ExecuteCleanFilter()); + } + + private void scheduleWithFixedDelay(ScheduledExecutorService scheduledExecutorService, int refreshRate, + ExecuteUniqueKeysAction executeUniqueKeysAction) { + scheduledExecutorService.scheduleWithFixedDelay(() -> { + try { + executeUniqueKeysAction.execute(); + } catch (Exception e) { + _log.error("Error executing an Unique Key Action.", e); + } + }, refreshRate, refreshRate, TimeUnit.SECONDS); + } + + @Override + public void stop() { + try { + sendUniqueKeys(); + } catch (Exception e) { + _log.error("Error sending unique keys."); + } + _uniqueKeysSyncScheduledExecutorService.shutdown(); + _cleanFilterScheduledExecutorService.shutdown(); + } + + public HashMap> popAll(){ + HashMap> toReturn = new HashMap<>(); + for (String key : uniqueKeysTracker.keySet()) { + HashSet value = uniqueKeysTracker.remove(key); + toReturn.put(key, value); + } + trackerKeysSize.set(0); + return toReturn; + } + + private void sendUniqueKeys(){ + if (!sendGuard.compareAndSet(false, true)) { + _log.debug("SendUniqueKeys already running"); + return; + } + + try { + if (uniqueKeysTracker.isEmpty()) { + _log.debug("The Unique Keys Tracker is empty"); + return; + } + + HashMap> uniqueKeysHashMap = popAll(); + List uniqueKeysFromPopAll = new ArrayList<>(); + for (Map.Entry> uniqueKeyEntry : uniqueKeysHashMap.entrySet()) { + UniqueKeys.UniqueKey uniqueKey = new UniqueKeys.UniqueKey(uniqueKeyEntry.getKey(), new ArrayList<>(uniqueKeyEntry.getValue())); + uniqueKeysFromPopAll.add(uniqueKey); + } + uniqueKeysFromPopAll = capChunksToMaxSize(uniqueKeysFromPopAll); + + for (List chunk : getChunks(uniqueKeysFromPopAll)) { + _telemetrySynchronizer.synchronizeUniqueKeys(new UniqueKeys(chunk)); + } + } finally { + sendGuard.set(false); + } + } + + private List capChunksToMaxSize(List uniqueKeys) { + List finalChunk = new ArrayList<>(); + for (UniqueKeys.UniqueKey uniqueKey : uniqueKeys) { + if (uniqueKey.keysDto.size() > MAX_UNIQUE_KEYS_POST_SIZE) { + for(List subChunk : Lists.partition(uniqueKey.keysDto, MAX_UNIQUE_KEYS_POST_SIZE)) { + finalChunk.add(new UniqueKeys.UniqueKey(uniqueKey.featureName, subChunk)); + } + continue; + } + finalChunk.add(uniqueKey); + } + return finalChunk; + } + + private List> getChunks(List uniqueKeys) { + List> chunks = new ArrayList<>(); + List intermediateChunk = new ArrayList<>(); + for (UniqueKeys.UniqueKey uniqueKey : uniqueKeys) { + if ((getChunkSize(intermediateChunk) + uniqueKey.keysDto.size()) > MAX_UNIQUE_KEYS_POST_SIZE) { + chunks.add(intermediateChunk); + intermediateChunk = new ArrayList<>(); + } + intermediateChunk.add(uniqueKey); + } + if (!intermediateChunk.isEmpty()) { + chunks.add(intermediateChunk); + } + return chunks; + } + + private int getChunkSize(List uniqueKeysChunk) { + int totalSize = 0; + for (UniqueKeys.UniqueKey uniqueKey : uniqueKeysChunk) { + totalSize += uniqueKey.keysDto.size(); + } + return totalSize; + } + + private interface ExecuteUniqueKeysAction{ + void execute(); + } + + private class ExecuteCleanFilter implements ExecuteUniqueKeysAction { + + @Override + public void execute() { + filterAdapter.clear(); + } + } + + private class ExecuteSendUniqueKeys implements ExecuteUniqueKeysAction { + + @Override + public void execute() { + sendUniqueKeys(); + } + } + + public AtomicBoolean getSendGuard() { + return sendGuard; + } +} diff --git a/client/src/main/java/io/split/client/impressions/filters/BloomFilterImp.java b/client/src/main/java/io/split/client/impressions/filters/BloomFilterImp.java new file mode 100644 index 000000000..9bf82f81e --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/filters/BloomFilterImp.java @@ -0,0 +1,34 @@ +package io.split.client.impressions.filters; + +import com.google.common.base.Charsets; +import com.google.common.hash.BloomFilter; +import com.google.common.hash.Funnels; + +public class BloomFilterImp implements Filter { + + private BloomFilter bloomFilter; + private final int size; + private final double errorMargin; + + public BloomFilterImp(int size, double errorMargin) { + this.size = size; + this.errorMargin = errorMargin; + this.bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8), size, errorMargin); + } + + @Override + public boolean add(String data) { + return bloomFilter.put(data); + } + + @Override + public boolean contains(String data) { + return bloomFilter.mightContain(data); + } + + @Override + public void clear() { + bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_16), size, errorMargin); + + } +} diff --git a/client/src/main/java/io/split/client/impressions/filters/Filter.java b/client/src/main/java/io/split/client/impressions/filters/Filter.java new file mode 100644 index 000000000..af5e8f8b1 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/filters/Filter.java @@ -0,0 +1,8 @@ +package io.split.client.impressions.filters; + +public interface Filter { + + boolean add(String data); + boolean contains(String data); + void clear(); +} diff --git a/client/src/main/java/io/split/client/impressions/filters/FilterAdapter.java b/client/src/main/java/io/split/client/impressions/filters/FilterAdapter.java new file mode 100644 index 000000000..e81e0bd97 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/filters/FilterAdapter.java @@ -0,0 +1,8 @@ +package io.split.client.impressions.filters; + +public interface FilterAdapter { + + boolean add(String featureFlagName, String key); + boolean contains(String featureFlagName, String key); + void clear(); +} diff --git a/client/src/main/java/io/split/client/impressions/filters/FilterAdapterImpl.java b/client/src/main/java/io/split/client/impressions/filters/FilterAdapterImpl.java new file mode 100644 index 000000000..0bdb8a169 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/filters/FilterAdapterImpl.java @@ -0,0 +1,25 @@ +package io.split.client.impressions.filters; + +public class FilterAdapterImpl implements FilterAdapter { + + private final Filter filter; + + public FilterAdapterImpl(Filter filter) { + this.filter = filter; + } + + @Override + public boolean add(String featureFlagName, String key) { + return filter.add(featureFlagName + key); + } + + @Override + public boolean contains(String featureFlagName, String key) { + return filter.contains(featureFlagName + key); + } + + @Override + public void clear() { + filter.clear(); + } +} diff --git a/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionDebug.java b/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionDebug.java new file mode 100644 index 000000000..0ac5a20d9 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionDebug.java @@ -0,0 +1,30 @@ +package io.split.client.impressions.strategy; + +import io.split.client.impressions.Impression; +import io.split.client.impressions.ImpressionObserver; +import io.split.client.impressions.ImpressionsResult; + +import java.util.List; + +public class ProcessImpressionDebug implements ProcessImpressionStrategy{ + + private final ImpressionObserver _impressionObserver; + private final boolean _listenerEnabled; + + public ProcessImpressionDebug(boolean listenerEnabled, ImpressionObserver impressionObserver) { + _listenerEnabled = listenerEnabled; + _impressionObserver = impressionObserver; + } + + @Override + public ImpressionsResult process(List impressions) { + for(Impression impression : impressions) { + if (impression.properties() != null) { + continue; + } + impression.withPreviousTime(_impressionObserver.testAndSet(impression)); + } + List impressionForListener = this._listenerEnabled ? impressions : null; + return new ImpressionsResult(impressions, impressionForListener); + } +} diff --git a/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionNone.java b/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionNone.java new file mode 100644 index 000000000..4764323d3 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionNone.java @@ -0,0 +1,33 @@ +package io.split.client.impressions.strategy; + +import io.split.client.impressions.Impression; +import io.split.client.impressions.ImpressionCounter; +import io.split.client.impressions.ImpressionsResult; +import io.split.client.impressions.UniqueKeysTracker; + +import java.util.ArrayList; +import java.util.List; + +public class ProcessImpressionNone implements ProcessImpressionStrategy{ + + private final UniqueKeysTracker _uniqueKeysTracker; + private final ImpressionCounter _impressionCounter; + private final boolean _listenerEnabled; + + public ProcessImpressionNone(boolean listenerEnabled,UniqueKeysTracker uniqueKeysTracker, ImpressionCounter impressionCounter) { + _listenerEnabled = listenerEnabled; + _uniqueKeysTracker = uniqueKeysTracker; + _impressionCounter = impressionCounter; + } + + @Override + public ImpressionsResult process(List impressions) { + + for(Impression impression: impressions){ + _impressionCounter.inc(impression.split(), impression.time(), 1); + _uniqueKeysTracker.track(impression.split(),impression.key()); + } + List impressionForListener = this._listenerEnabled ? impressions : null; + return new ImpressionsResult(new ArrayList<>(), impressionForListener); + } +} diff --git a/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionOptimized.java b/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionOptimized.java new file mode 100644 index 000000000..65fd9ab49 --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionOptimized.java @@ -0,0 +1,58 @@ +package io.split.client.impressions.strategy; + +import io.split.client.impressions.Impression; +import io.split.client.impressions.ImpressionObserver; +import io.split.client.impressions.ImpressionUtils; +import io.split.client.impressions.ImpressionsResult; +import io.split.client.impressions.ImpressionCounter; +import io.split.telemetry.domain.enums.ImpressionsDataTypeEnum; +import io.split.telemetry.storage.TelemetryRuntimeProducer; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +public class ProcessImpressionOptimized implements ProcessImpressionStrategy{ + + private final ImpressionObserver _impressionObserver; + private final ImpressionCounter _impressionCounter; + private final TelemetryRuntimeProducer _telemetryRuntimeProducer; + private final boolean _listenerEnabled; + + + public ProcessImpressionOptimized(boolean listenerEnabled, ImpressionObserver impressionObserver, ImpressionCounter impressionCounter, + TelemetryRuntimeProducer telemetryRuntimeProducer) { + _telemetryRuntimeProducer = telemetryRuntimeProducer; + _listenerEnabled = listenerEnabled; + _impressionObserver = impressionObserver; + _impressionCounter = impressionCounter; + } + + @Override + public ImpressionsResult process(List impressions) { + List impressionsToQueue = new ArrayList<>(); + for(Impression impression : impressions) { + if (impression.properties() == null) { + impression = impression.withPreviousTime(_impressionObserver.testAndSet(impression)); + if (!Objects.isNull(impression.pt()) && impression.pt() != 0) { + _impressionCounter.inc(impression.split(), impression.time(), 1); + } + if (shouldntQueueImpression(impression)) { + continue; + } + } + impressionsToQueue.add(impression); + } + List impressionForListener = this._listenerEnabled ? impressions : null; + + _telemetryRuntimeProducer.recordImpressionStats(ImpressionsDataTypeEnum.IMPRESSIONS_DEDUPED, impressions.size()- + (long)impressionsToQueue.size()); + + return new ImpressionsResult(impressionsToQueue, impressionForListener); + } + + private boolean shouldntQueueImpression(Impression i) { + return !Objects.isNull(i.pt()) && + ImpressionUtils.truncateTimeframe(i.pt()) == ImpressionUtils.truncateTimeframe(i.time()); + } +} diff --git a/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionStrategy.java b/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionStrategy.java new file mode 100644 index 000000000..48791001c --- /dev/null +++ b/client/src/main/java/io/split/client/impressions/strategy/ProcessImpressionStrategy.java @@ -0,0 +1,11 @@ +package io.split.client.impressions.strategy; + +import io.split.client.impressions.Impression; +import io.split.client.impressions.ImpressionsResult; + +import java.util.List; + +public interface ProcessImpressionStrategy { + + ImpressionsResult process(List impressions); +} diff --git a/client/src/main/java/io/split/client/interceptors/FlagSetsFilter.java b/client/src/main/java/io/split/client/interceptors/FlagSetsFilter.java new file mode 100644 index 000000000..f571f1342 --- /dev/null +++ b/client/src/main/java/io/split/client/interceptors/FlagSetsFilter.java @@ -0,0 +1,9 @@ +package io.split.client.interceptors; + +import java.util.Set; + +public interface FlagSetsFilter { + + boolean intersect(Set sets); + boolean intersect(String set); +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/interceptors/FlagSetsFilterImpl.java b/client/src/main/java/io/split/client/interceptors/FlagSetsFilterImpl.java new file mode 100644 index 000000000..e97da9f6c --- /dev/null +++ b/client/src/main/java/io/split/client/interceptors/FlagSetsFilterImpl.java @@ -0,0 +1,40 @@ +package io.split.client.interceptors; + +import java.util.Set; + +public class FlagSetsFilterImpl implements FlagSetsFilter { + + private final Set _flagSets; + private final boolean _shouldFilter; + + public FlagSetsFilterImpl(Set flagSets) { + _shouldFilter = !flagSets.isEmpty(); + _flagSets = flagSets; + } + @Override + public boolean intersect(Set sets) { + if (!_shouldFilter) { + return true; + } + if (sets == null || sets.isEmpty()) { + return false; + } + for (String set: sets) { + if (_flagSets.contains(set)) { + return true; + } + } + return false; + } + + @Override + public boolean intersect(String set) { + if (!_shouldFilter) { + return true; + } + if (set.isEmpty()){ + return false; + } + return _flagSets.contains(set); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/jmx/JmxMonitor.java b/client/src/main/java/io/split/client/jmx/JmxMonitor.java index ce2e3ff48..0acd7deda 100644 --- a/client/src/main/java/io/split/client/jmx/JmxMonitor.java +++ b/client/src/main/java/io/split/client/jmx/JmxMonitor.java @@ -130,4 +130,4 @@ private String getContextPath() { return null; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/jmx/SplitJmxMonitor.java b/client/src/main/java/io/split/client/jmx/SplitJmxMonitor.java index 052d726ad..fa5ad58ac 100644 --- a/client/src/main/java/io/split/client/jmx/SplitJmxMonitor.java +++ b/client/src/main/java/io/split/client/jmx/SplitJmxMonitor.java @@ -25,7 +25,8 @@ public class SplitJmxMonitor implements SplitJmxMonitorMBean { private final SegmentSynchronizationTask _segmentSynchronizationTask; private SegmentCacheConsumer segmentCacheConsumer; - public SplitJmxMonitor(SplitClient splitClient, SplitFetcher featureFetcher, SplitCacheConsumer splitCacheConsumer, SegmentSynchronizationTask segmentSynchronizationTask, SegmentCacheConsumer segmentCacheConsumer) { + public SplitJmxMonitor(SplitClient splitClient, SplitFetcher featureFetcher, SplitCacheConsumer splitCacheConsumer, + SegmentSynchronizationTask segmentSynchronizationTask, SegmentCacheConsumer segmentCacheConsumer) { _client = checkNotNull(splitClient); _featureFetcher = checkNotNull(featureFetcher); _splitCacheConsumer = checkNotNull(splitCacheConsumer); @@ -69,4 +70,4 @@ public String fetchDefinition(String featureName) { public boolean isKeyInSegment(String key, String segmentName) { return segmentCacheConsumer.isInSegment(segmentName, key); } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/jmx/SplitJmxMonitorMBean.java b/client/src/main/java/io/split/client/jmx/SplitJmxMonitorMBean.java index 6b492043d..94d1728bf 100644 --- a/client/src/main/java/io/split/client/jmx/SplitJmxMonitorMBean.java +++ b/client/src/main/java/io/split/client/jmx/SplitJmxMonitorMBean.java @@ -36,4 +36,4 @@ public interface SplitJmxMonitorMBean { */ boolean isKeyInSegment(String key, String segmentName); -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/utils/ApacheRequestDecorator.java b/client/src/main/java/io/split/client/utils/ApacheRequestDecorator.java new file mode 100644 index 000000000..c64d9d46c --- /dev/null +++ b/client/src/main/java/io/split/client/utils/ApacheRequestDecorator.java @@ -0,0 +1,43 @@ +package io.split.client.utils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpRequest; + +import io.split.client.RequestDecorator; +import io.split.client.dtos.RequestContext; + +public class ApacheRequestDecorator { + + public static HttpRequest decorate(HttpRequest request, RequestDecorator decorator) { + + RequestContext ctx = new RequestContext(convertToMap(request.getHeaders())); + for (Map.Entry> entry : decorator.decorateHeaders(ctx).headers().entrySet()) { + List values = entry.getValue(); + for (int i = 0; i < values.size(); i++) { + if (i == 0) { + request.setHeader(entry.getKey(), values.get(i)); + } else { + request.addHeader(entry.getKey(), values.get(i)); + } + } + } + + return request; + } + + private static Map> convertToMap(Header[] to_convert) { + Map> to_return = new HashMap>(); + for (Integer i = 0; i < to_convert.length; i++) { + if (!to_return.containsKey(to_convert[i].getName())) { + to_return.put(to_convert[i].getName(), new ArrayList()); + } + to_return.get(to_convert[i].getName()).add(to_convert[i].getValue()); + } + return to_return; + } +} diff --git a/client/src/main/java/io/split/client/utils/FeatureFlagProcessor.java b/client/src/main/java/io/split/client/utils/FeatureFlagProcessor.java new file mode 100644 index 000000000..f6e4878a9 --- /dev/null +++ b/client/src/main/java/io/split/client/utils/FeatureFlagProcessor.java @@ -0,0 +1,43 @@ +package io.split.client.utils; + +import io.split.client.dtos.Split; +import io.split.client.dtos.Status; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.engine.experiments.ParsedSplit; +import io.split.engine.experiments.SplitParser; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class FeatureFlagProcessor { + private static final Logger _log = LoggerFactory.getLogger(FeatureFlagProcessor.class); + + public static FeatureFlagsToUpdate processFeatureFlagChanges(SplitParser splitParser, List splits, FlagSetsFilter flagSetsFilter) { + List toAdd = new ArrayList<>(); + List toRemove = new ArrayList<>(); + Set segments = new HashSet<>(); + for (Split split : splits) { + if (split.status != Status.ACTIVE) { + // archive. + toRemove.add(split.name); + continue; + } + if (!flagSetsFilter.intersect(split.sets)) { + toRemove.add(split.name); + continue; + } + ParsedSplit parsedSplit = splitParser.parse(split); + if (parsedSplit == null) { + _log.debug(String.format("We could not parse the feature flag definition for: %s", split.name)); + continue; + } + segments.addAll(parsedSplit.getSegmentsNames()); + toAdd.add(parsedSplit); + } + return new FeatureFlagsToUpdate(toAdd, toRemove, segments); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/utils/FeatureFlagsToUpdate.java b/client/src/main/java/io/split/client/utils/FeatureFlagsToUpdate.java new file mode 100644 index 000000000..3eb03c7b5 --- /dev/null +++ b/client/src/main/java/io/split/client/utils/FeatureFlagsToUpdate.java @@ -0,0 +1,30 @@ +package io.split.client.utils; + +import io.split.engine.experiments.ParsedSplit; + +import java.util.List; +import java.util.Set; + +public class FeatureFlagsToUpdate { + List toAdd; + List toRemove; + Set segments; + + public FeatureFlagsToUpdate(List toAdd, List toRemove, Set segments) { + this.toAdd = toAdd; + this.toRemove = toRemove; + this.segments = segments; + } + + public List getToAdd() { + return toAdd; + } + + public List getToRemove() { + return toRemove; + } + + public Set getSegments() { + return segments; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/utils/FileInputStreamProvider.java b/client/src/main/java/io/split/client/utils/FileInputStreamProvider.java new file mode 100644 index 000000000..c1d9f9812 --- /dev/null +++ b/client/src/main/java/io/split/client/utils/FileInputStreamProvider.java @@ -0,0 +1,26 @@ +package io.split.client.utils; + +import io.split.client.exceptions.InputStreamProviderException; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.InputStream; + +public class FileInputStreamProvider implements InputStreamProvider { + + private final String _fileName; + + public FileInputStreamProvider(String fileName) { + _fileName = fileName; + } + + @Override + public InputStream get() throws InputStreamProviderException { + try { + return new FileInputStream(_fileName); + } catch (FileNotFoundException f) { + throw new InputStreamProviderException(String.format("Problem fetching splitChanges using file named %s: %s", + _fileName, f.getMessage())); + } + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/utils/FileTypeEnum.java b/client/src/main/java/io/split/client/utils/FileTypeEnum.java new file mode 100644 index 000000000..202b701d9 --- /dev/null +++ b/client/src/main/java/io/split/client/utils/FileTypeEnum.java @@ -0,0 +1,8 @@ +package io.split.client.utils; + +public enum FileTypeEnum { + LEGACY, + YAML, + YML, + JSON +} diff --git a/client/src/main/java/io/split/client/utils/GenericClientUtil.java b/client/src/main/java/io/split/client/utils/GenericClientUtil.java index ac631df80..7953fe5bb 100644 --- a/client/src/main/java/io/split/client/utils/GenericClientUtil.java +++ b/client/src/main/java/io/split/client/utils/GenericClientUtil.java @@ -41,3 +41,4 @@ public static void process(List data, URI endpoint, CloseableHttpClient cl } } + diff --git a/client/src/main/java/io/split/client/utils/InputStreamProvider.java b/client/src/main/java/io/split/client/utils/InputStreamProvider.java new file mode 100644 index 000000000..02df28398 --- /dev/null +++ b/client/src/main/java/io/split/client/utils/InputStreamProvider.java @@ -0,0 +1,10 @@ +package io.split.client.utils; + +import io.split.client.exceptions.InputStreamProviderException; + +import java.io.InputStream; + +public interface InputStreamProvider { + + InputStream get() throws InputStreamProviderException; +} diff --git a/client/src/main/java/io/split/client/utils/Json.java b/client/src/main/java/io/split/client/utils/Json.java index 9d9a72699..4709e891f 100644 --- a/client/src/main/java/io/split/client/utils/Json.java +++ b/client/src/main/java/io/split/client/utils/Json.java @@ -6,6 +6,7 @@ import com.google.gson.JsonPrimitive; import com.google.gson.JsonSerializationContext; import com.google.gson.JsonSerializer; +import com.google.gson.stream.JsonReader; import java.lang.reflect.Type; import java.util.Arrays; @@ -38,4 +39,7 @@ public static T fromJson(String json, Class clz) { return _json.fromJson(json, clz); } + public static T fromJson(JsonReader json, Class clz) { + return _json.fromJson(json, clz); + } } diff --git a/client/src/main/java/io/split/client/utils/LocalhostConstants.java b/client/src/main/java/io/split/client/utils/LocalhostConstants.java new file mode 100644 index 000000000..e883eabdd --- /dev/null +++ b/client/src/main/java/io/split/client/utils/LocalhostConstants.java @@ -0,0 +1,18 @@ +package io.split.client.utils; + +public final class LocalhostConstants { + public static final String CONTROL = "control"; + public static final String USER = "user"; + static final String TREATMENT_ON = "on"; + static final String TREATMENT_OFF = "off"; + public static final Integer SIZE_100 = 100; + public static final Integer SIZE_1 = 1; + static final Integer SIZE_0 = 0; + static final int ALGO = 2; + static final int MILLI_SECONDS = 1000; + static final Long DEFAULT_TS = -1L; + + private LocalhostConstants() { + + } +} diff --git a/client/src/main/java/io/split/client/utils/LocalhostSanitizer.java b/client/src/main/java/io/split/client/utils/LocalhostSanitizer.java new file mode 100644 index 000000000..28282adcb --- /dev/null +++ b/client/src/main/java/io/split/client/utils/LocalhostSanitizer.java @@ -0,0 +1,291 @@ +package io.split.client.utils; + +import com.google.common.base.Preconditions; +import io.split.client.dtos.Condition; +import io.split.client.dtos.ConditionType; +import io.split.client.dtos.KeySelector; +import io.split.client.dtos.Matcher; +import io.split.client.dtos.MatcherCombiner; +import io.split.client.dtos.MatcherGroup; +import io.split.client.dtos.MatcherType; +import io.split.client.dtos.Partition; +import io.split.client.dtos.SegmentChange; +import io.split.client.dtos.Split; +import io.split.client.dtos.SplitChange; +import io.split.client.dtos.Status; +import io.split.client.dtos.WhitelistMatcherData; +import io.split.client.dtos.RuleBasedSegment; +import io.split.client.dtos.ChangeDto; + +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +public final class LocalhostSanitizer { + private static final String DEFAULT_RULE = "default rule"; + + private LocalhostSanitizer() { + throw new IllegalStateException("Utility class"); + } + + public static SplitChange sanitization(SplitChange splitChange) { + sanitizeTillAndSince(splitChange); + splitChange.featureFlags.d = sanitizeFeatureFlags(splitChange.featureFlags.d); + splitChange.ruleBasedSegments.d = sanitizeRuleBasedSegments(splitChange.ruleBasedSegments.d); + + return splitChange; + } + + private static List sanitizeRuleBasedSegments(List ruleBasedSegments) { + List ruleBasedSegmentsToRemove = new ArrayList<>(); + if (ruleBasedSegments != null) { + for (RuleBasedSegment ruleBasedSegment : ruleBasedSegments) { + if (ruleBasedSegment.name == null) { + ruleBasedSegmentsToRemove.add(ruleBasedSegment); + continue; + } + ruleBasedSegment.trafficTypeName = sanitizeIfNullOrEmpty(ruleBasedSegment.trafficTypeName, LocalhostConstants.USER); + ruleBasedSegment.status = sanitizeStatus(ruleBasedSegment.status); + ruleBasedSegment.changeNumber = sanitizeChangeNumber(ruleBasedSegment.changeNumber, 0); + ruleBasedSegment.conditions = sanitizeConditions((ArrayList) ruleBasedSegment.conditions, false, + ruleBasedSegment.trafficTypeName); + ruleBasedSegment.excluded.segments = sanitizeExcluded((ArrayList) ruleBasedSegment.excluded.segments); + ruleBasedSegment.excluded.keys = sanitizeExcluded((ArrayList) ruleBasedSegment.excluded.keys); + } + ruleBasedSegments.removeAll(ruleBasedSegmentsToRemove); + } else { + ruleBasedSegments = new ArrayList<>(); + } + return ruleBasedSegments; + } + + private static List sanitizeFeatureFlags(List featureFlags) { + List splitsToRemove = new ArrayList<>(); + if (featureFlags != null) { + for (Split split : featureFlags) { + if (split.name == null) { + splitsToRemove.add(split); + continue; + } + split.trafficTypeName = sanitizeIfNullOrEmpty(split.trafficTypeName, LocalhostConstants.USER); + split.status = sanitizeStatus(split.status); + split.defaultTreatment = sanitizeIfNullOrEmpty(split.defaultTreatment, LocalhostConstants.CONTROL); + split.changeNumber = sanitizeChangeNumber(split.changeNumber, 0); + split.trafficAllocation = sanitizeTrafficAllocation(split.trafficAllocation); + split.trafficAllocationSeed = sanitizeSeed(split.trafficAllocationSeed); + split.seed = sanitizeSeed(split.seed); + split.algo = sanitizeAlgo(split.algo); + split.conditions = sanitizeConditions((ArrayList) split.conditions, false, split.trafficTypeName); + } + featureFlags.removeAll(splitsToRemove); + } else { + featureFlags = new ArrayList<>(); + } + return featureFlags; + } + + private static int sanitizeSeed(Integer seed) { + SecureRandom random = new SecureRandom(); + if (seed == null || seed == 0) { + seed = -random.nextInt(10) * LocalhostConstants.MILLI_SECONDS; + } + return seed; + } + + private static int sanitizeAlgo(int algo) { + if (algo != LocalhostConstants.ALGO) { + algo = LocalhostConstants.ALGO; + } + return algo; + } + + private static int sanitizeTrafficAllocation(Integer trafficAllocation) { + if (trafficAllocation == null || trafficAllocation < 0 || trafficAllocation > LocalhostConstants.SIZE_100) { + trafficAllocation = LocalhostConstants.SIZE_100; + } + return trafficAllocation; + } + + private static ArrayList sanitizeConditions(ArrayList conditions, boolean createPartition, String trafficTypeName) { + if (conditions == null) { + conditions = new ArrayList<>(); + } + + Condition condition = new Condition(); + if (!conditions.isEmpty()){ + condition = conditions.get(conditions.size() - 1); + } + + if (conditions.isEmpty() || !condition.conditionType.equals(ConditionType.ROLLOUT) || + condition.matcherGroup.matchers == null || + condition.matcherGroup.matchers.isEmpty() || + !condition.matcherGroup.matchers.get(0).matcherType.equals(MatcherType.ALL_KEYS)) { + Condition rolloutCondition = new Condition(); + conditions.add(createRolloutCondition(rolloutCondition, trafficTypeName, null, createPartition)); + } + return conditions; + } + private static String sanitizeIfNullOrEmpty(String toBeSanitized, String defaultValue) { + if (toBeSanitized == null || toBeSanitized.isEmpty()) { + return defaultValue; + } + return toBeSanitized; + } + + private static long sanitizeChangeNumber(long toBeSanitized, long defaultValue) { + if (toBeSanitized < 0) { + return defaultValue; + } + return toBeSanitized; + } + + private static Status sanitizeStatus(Status toBeSanitized) { + if (toBeSanitized == null || toBeSanitized != Status.ACTIVE && toBeSanitized != Status.ARCHIVED) { + return Status.ACTIVE; + } + return toBeSanitized; + + } + + private static ArrayList sanitizeExcluded(ArrayList excluded) + { + if (excluded == null) { + return new ArrayList<>(); + } + return excluded; + } + + private static SplitChange sanitizeTillAndSince(SplitChange splitChange) { + if (checkTillConditions(splitChange.featureFlags)) { + splitChange.featureFlags.t = LocalhostConstants.DEFAULT_TS; + } + if (checkSinceConditions(splitChange.featureFlags)) { + splitChange.featureFlags.s = splitChange.featureFlags.t; + } + + if (checkTillConditions(splitChange.ruleBasedSegments)) { + splitChange.ruleBasedSegments.t = LocalhostConstants.DEFAULT_TS; + } + if (checkSinceConditions(splitChange.ruleBasedSegments)) { + splitChange.ruleBasedSegments.s = splitChange.ruleBasedSegments.t; + } + return splitChange; + } + + private static boolean checkTillConditions(ChangeDto change) { + return change.t < LocalhostConstants.DEFAULT_TS || change.t == 0; + } + + private static boolean checkSinceConditions(ChangeDto change) { + return change.s < LocalhostConstants.DEFAULT_TS || change.s > change.t; + } + + public static SegmentChange sanitization(SegmentChange segmentChange) { + if (segmentChange.name == null || segmentChange.name.isEmpty()) { + return null; + } + if (segmentChange.added == null) { + segmentChange.added = new ArrayList<>(); + } + if (segmentChange.removed == null) { + segmentChange.removed = new ArrayList<>(); + } + List addedToRemoved = segmentChange.added.stream().filter(add -> segmentChange.removed.contains(add)).collect(Collectors.toList()); + segmentChange.removed.removeAll(addedToRemoved); + + if (segmentChange.till < LocalhostConstants.DEFAULT_TS || segmentChange.till == 0){ + segmentChange.till = LocalhostConstants.DEFAULT_TS; + } + if (segmentChange.since < LocalhostConstants.DEFAULT_TS || segmentChange.since > segmentChange.till) { + segmentChange.since = segmentChange.till; + } + return segmentChange; + } + + public static Condition createRolloutCondition(Condition condition, String trafficType, String treatment, boolean createPartition) { + condition.conditionType = ConditionType.ROLLOUT; + condition.matcherGroup = new MatcherGroup(); + condition.matcherGroup.combiner = MatcherCombiner.AND; + Matcher matcher = new Matcher(); + KeySelector keySelector = new KeySelector(); + keySelector.trafficType = trafficType; + + matcher.keySelector = keySelector; + matcher.matcherType = MatcherType.ALL_KEYS; + matcher.negate = false; + + condition.matcherGroup.matchers = new ArrayList<>(); + condition.matcherGroup.matchers.add(matcher); + + if (createPartition) { + condition.partitions = new ArrayList<>(); + Partition partition1 = new Partition(); + Partition partition2 = new Partition(); + partition1.size = LocalhostConstants.SIZE_100; + partition2.size = LocalhostConstants.SIZE_0; + if (treatment != null) { + partition1.treatment = treatment; + } else { + partition1.treatment = LocalhostConstants.TREATMENT_OFF; + partition2.treatment = LocalhostConstants.TREATMENT_ON; + } + condition.partitions.add(partition1); + condition.partitions.add(partition2); + } + condition.label = DEFAULT_RULE; + + return condition; + } + + public static Condition createCondition(Object keyOrKeys, String treatment) { + Condition condition = new Condition(); + if (keyOrKeys == null) { + return LocalhostSanitizer.createRolloutCondition(condition, "user", treatment, true); + } else { + if (keyOrKeys instanceof String) { + List keys = new ArrayList<>(); + keys.add(keyOrKeys); + return createWhitelistCondition(condition, "user", treatment, keys); + } else { + Preconditions.checkArgument(keyOrKeys instanceof List, "'keys' is not a String nor a List."); + return createWhitelistCondition(condition, "user", treatment, (List) keyOrKeys); + } + } + } + + public static Condition createWhitelistCondition(Condition condition, String trafficType, String treatment, List keys) { + condition.conditionType = ConditionType.WHITELIST; + condition.matcherGroup = new MatcherGroup(); + condition.matcherGroup.combiner = MatcherCombiner.AND; + Matcher matcher = new Matcher(); + KeySelector keySelector = new KeySelector(); + keySelector.trafficType = trafficType; + + matcher.keySelector = keySelector; + matcher.matcherType = MatcherType.WHITELIST; + matcher.negate = false; + matcher.whitelistMatcherData = new WhitelistMatcherData(); + matcher.whitelistMatcherData.whitelist = new ArrayList<>(keys); + + condition.matcherGroup.matchers = new ArrayList<>(); + condition.matcherGroup.matchers.add(matcher); + + condition.partitions = new ArrayList<>(); + Partition partition1 = new Partition(); + Partition partition2 = new Partition(); + partition1.size = LocalhostConstants.SIZE_100; + partition2.size = LocalhostConstants.SIZE_0; + if (treatment != null) { + partition1.treatment = treatment; + } else { + partition1.treatment = LocalhostConstants.TREATMENT_OFF; + partition2.treatment = LocalhostConstants.TREATMENT_ON; + } + condition.partitions.add(partition1); + condition.partitions.add(partition2); + condition.label = "default rule"; + + return condition; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/utils/RuleBasedSegmentProcessor.java b/client/src/main/java/io/split/client/utils/RuleBasedSegmentProcessor.java new file mode 100644 index 000000000..7720367b1 --- /dev/null +++ b/client/src/main/java/io/split/client/utils/RuleBasedSegmentProcessor.java @@ -0,0 +1,65 @@ +package io.split.client.utils; + +import io.split.client.dtos.Excluded; +import io.split.client.dtos.RuleBasedSegment; +import io.split.client.dtos.Status; +import io.split.engine.experiments.ParsedRuleBasedSegment; +import io.split.engine.experiments.RuleBasedSegmentParser; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class RuleBasedSegmentProcessor { + private static final Logger _log = LoggerFactory.getLogger(RuleBasedSegmentProcessor.class); + + private RuleBasedSegmentProcessor() { + throw new IllegalStateException("Utility class"); + } + + public static RuleBasedSegmentsToUpdate processRuleBasedSegmentChanges(RuleBasedSegmentParser ruleBasedSegmentParser, + List ruleBasedSegments) { + List toAdd = new ArrayList<>(); + List toRemove = new ArrayList<>(); + Set segments = new HashSet<>(); + for (RuleBasedSegment ruleBasedSegment : ruleBasedSegments) { + ruleBasedSegment.excluded = checkExcluded(ruleBasedSegment.excluded); + if (ruleBasedSegment.status != Status.ACTIVE) { + // archive. + toRemove.add(ruleBasedSegment.name); + continue; + } + ParsedRuleBasedSegment parsedRuleBasedSegment = ruleBasedSegmentParser.parse(ruleBasedSegment); + if (parsedRuleBasedSegment == null) { + _log.debug(String.format("We could not parse the rule based segment definition for: %s", ruleBasedSegment.name)); + } else { + segments.addAll(parsedRuleBasedSegment.getSegmentsNames()); + toAdd.add(parsedRuleBasedSegment); + } + } + return new RuleBasedSegmentsToUpdate(toAdd, toRemove, segments); + } + + private static Excluded createEmptyExcluded() { + Excluded excluded = new Excluded(); + excluded.segments = new ArrayList<>(); + excluded.keys = new ArrayList<>(); + return excluded; + } + + private static Excluded checkExcluded(Excluded excluded) { + if (excluded == null) { + excluded = createEmptyExcluded(); + } + if (excluded.segments == null) { + excluded.segments = new ArrayList<>(); + } + if (excluded.keys == null) { + excluded.keys = new ArrayList<>(); + } + return excluded; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/utils/RuleBasedSegmentsToUpdate.java b/client/src/main/java/io/split/client/utils/RuleBasedSegmentsToUpdate.java new file mode 100644 index 000000000..850ae8493 --- /dev/null +++ b/client/src/main/java/io/split/client/utils/RuleBasedSegmentsToUpdate.java @@ -0,0 +1,30 @@ +package io.split.client.utils; + +import io.split.engine.experiments.ParsedRuleBasedSegment; + +import java.util.List; +import java.util.Set; + +public class RuleBasedSegmentsToUpdate { + List toAdd; + List toRemove; + Set segments; + + public RuleBasedSegmentsToUpdate(List toAdd, List toRemove, Set segments) { + this.toAdd = toAdd; + this.toRemove = toRemove; + this.segments = segments; + } + + public List getToAdd() { + return toAdd; + } + + public List getToRemove() { + return toRemove; + } + + public Set getSegments() { + return segments; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/utils/SplitExecutorFactory.java b/client/src/main/java/io/split/client/utils/SplitExecutorFactory.java new file mode 100644 index 000000000..f9897edfd --- /dev/null +++ b/client/src/main/java/io/split/client/utils/SplitExecutorFactory.java @@ -0,0 +1,33 @@ +package io.split.client.utils; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ThreadFactory; + +public class SplitExecutorFactory { + + public static ScheduledExecutorService buildScheduledExecutorService(ThreadFactory threadFactory, String name, Integer size) { + return Executors.newScheduledThreadPool(size, buildThreadFactory(threadFactory, name)); + } + + public static ScheduledExecutorService buildSingleThreadScheduledExecutor(ThreadFactory threadFactory, String name){ + return Executors.newSingleThreadScheduledExecutor(buildThreadFactory(threadFactory, name)); + } + + public static ExecutorService buildExecutorService(ThreadFactory threadFactory, String name) { + return Executors.newSingleThreadExecutor(buildThreadFactory(threadFactory, name)); + } + + private static ThreadFactory buildThreadFactory(ThreadFactory threadFactory, String name) { + ThreadFactoryBuilder threadFactoryBuilder = new ThreadFactoryBuilder() + .setDaemon(true) + .setNameFormat(name); + if (threadFactory != null) { + threadFactoryBuilder.setThreadFactory(threadFactory); + } + return threadFactoryBuilder.build(); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/client/utils/StaticContentInputStreamProvider.java b/client/src/main/java/io/split/client/utils/StaticContentInputStreamProvider.java new file mode 100644 index 000000000..d129b77c8 --- /dev/null +++ b/client/src/main/java/io/split/client/utils/StaticContentInputStreamProvider.java @@ -0,0 +1,26 @@ +package io.split.client.utils; + +import io.split.client.exceptions.InputStreamProviderException; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.stream.Collectors; + +public class StaticContentInputStreamProvider implements InputStreamProvider { + + private final String _streamContents; + + public StaticContentInputStreamProvider(InputStream inputStream){ + _streamContents = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)) + .lines() + .collect(Collectors.joining("\n")); + } + + @Override + public InputStream get() throws InputStreamProviderException { + return new ByteArrayInputStream(_streamContents.getBytes()); + } +} diff --git a/client/src/main/java/io/split/client/utils/Utils.java b/client/src/main/java/io/split/client/utils/Utils.java index a83a322dd..9a386db55 100644 --- a/client/src/main/java/io/split/client/utils/Utils.java +++ b/client/src/main/java/io/split/client/utils/Utils.java @@ -1,11 +1,10 @@ package io.split.client.utils; -import com.google.common.base.Charsets; +import io.split.client.dtos.ChangeDto; import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; import org.apache.hc.core5.http.ContentType; import org.apache.hc.core5.http.HttpEntity; import org.apache.hc.core5.http.io.entity.HttpEntities; -import org.apache.hc.core5.http.io.entity.StringEntity; import org.apache.hc.core5.net.URIBuilder; import java.io.IOException; @@ -42,4 +41,8 @@ public static URI appendPath(URI root, String pathToAppend) throws URISyntaxExce String path = String.format("%s%s%s", root.getPath(), root.getPath().endsWith("/") ? "" : "/", pathToAppend); return new URIBuilder(root).setPath(path).build(); } -} + + public static boolean checkExitConditions(ChangeDto change, long cn) { + return change.t < cn && change.t != -1; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/SDKReadinessGates.java b/client/src/main/java/io/split/engine/SDKReadinessGates.java index 10a18fbba..c5ca5dbc8 100644 --- a/client/src/main/java/io/split/engine/SDKReadinessGates.java +++ b/client/src/main/java/io/split/engine/SDKReadinessGates.java @@ -3,9 +3,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -20,8 +17,8 @@ public class SDKReadinessGates { /** * Returns true if the SDK is ready. The SDK is ready when: *
    - *
  1. It has fetched Split definitions the first time.
  2. - *
  3. It has downloaded segment memberships for segments in use in the initial split definitions
  4. + *
  5. It has fetched Feature flag definitions the first time.
  6. + *
  7. It has downloaded segment memberships for segments in use in the initial Feature flag definitions
  8. *
*

* This operation will block until the SDK is ready or 'milliseconds' have passed. If the milliseconds diff --git a/client/src/main/java/io/split/engine/common/Backoff.java b/client/src/main/java/io/split/engine/common/Backoff.java index abbb0b077..56d9c426a 100644 --- a/client/src/main/java/io/split/engine/common/Backoff.java +++ b/client/src/main/java/io/split/engine/common/Backoff.java @@ -22,9 +22,7 @@ public Backoff(long backoffBase, long maxAllowed) { } public long interval() { - long interval = _backoffBase * (long) Math.pow(2, _attempt.getAndIncrement()); - - return interval >= _maxAllowed ? _maxAllowed : interval; + return Math.min(_backoffBase * (long) Math.pow(2, _attempt.getAndIncrement()), _maxAllowed); } public synchronized void reset() { diff --git a/client/src/main/java/io/split/engine/common/ConsumerSyncManager.java b/client/src/main/java/io/split/engine/common/ConsumerSyncManager.java new file mode 100644 index 000000000..ee6f2d627 --- /dev/null +++ b/client/src/main/java/io/split/engine/common/ConsumerSyncManager.java @@ -0,0 +1,21 @@ +package io.split.engine.common; + +import java.io.IOException; + +public class ConsumerSyncManager implements SyncManager { + private final Synchronizer _redisSynchronizer; + + public ConsumerSyncManager(Synchronizer redisSynchronizer){ + _redisSynchronizer = redisSynchronizer; + } + + @Override + public void start() { + _redisSynchronizer.startPeriodicDataRecording(); + } + + @Override + public void shutdown() throws IOException { + _redisSynchronizer.stopPeriodicDataRecording(); + } +} diff --git a/client/src/main/java/io/split/engine/common/ConsumerSynchronizer.java b/client/src/main/java/io/split/engine/common/ConsumerSynchronizer.java new file mode 100644 index 000000000..c92092537 --- /dev/null +++ b/client/src/main/java/io/split/engine/common/ConsumerSynchronizer.java @@ -0,0 +1,89 @@ +package io.split.engine.common; + +import io.split.client.impressions.ImpressionsManager; +import io.split.client.impressions.UniqueKeysTracker; +import io.split.engine.sse.dtos.SplitKillNotification; +import io.split.telemetry.synchronizer.TelemetrySyncTask; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ConsumerSynchronizer implements Synchronizer{ + + private static final Logger _log = LoggerFactory.getLogger(ConsumerSynchronizer.class); + private final UniqueKeysTracker _uniqueKeysTracker; + private final ImpressionsManager _impressionManager; + private final TelemetrySyncTask _telemetrySyncTask; + + public ConsumerSynchronizer(SplitTasks splitTasks){ + _uniqueKeysTracker = splitTasks.getUniqueKeysTracker(); + _impressionManager = splitTasks.getImpressionManager(); + _telemetrySyncTask = splitTasks.getTelemetrySyncTask(); + } + @Override + public boolean syncAll() { + return false; + } + + @Override + public void startPeriodicFetching() { + //No-Op + } + + @Override + public void stopPeriodicFetching() { + //No-Op + } + + @Override + public void refreshSplits(Long targetChangeNumber, Long ruleBasedSegmentChangeNumber) { + //No-Op + } + + @Override + public void localKillSplit(SplitKillNotification splitKillNotification) { + //No-Op + } + + @Override + public void refreshSegment(String segmentName, Long targetChangeNumber) { + //No-Op + } + + @Override + public void startPeriodicDataRecording() { + try { + _impressionManager.start(); + } catch (Exception e) { + _log.error("Error trying to init Impression Manager synchronizer task.", e); + } + if (_uniqueKeysTracker != null){ + try { + _uniqueKeysTracker.start(); + } catch (Exception e) { + _log.error("Error trying to init Unique Keys Tracker synchronizer task.", e); + } + } + try { + _telemetrySyncTask.startScheduledTask(); + } catch (Exception e) { + _log.error("Error trying to Telemetry synchronizer task.", e); + } + } + + @Override + public void stopPeriodicDataRecording() { + _impressionManager.close(); + _log.info("Successful shutdown of impressions manager"); + if (_uniqueKeysTracker != null){ + _uniqueKeysTracker.stop(); + _log.info("Successful stop of UniqueKeysTracker"); + } + _telemetrySyncTask.stopScheduledTask(); + _log.info("Successful shutdown of telemetry sync task"); + } + + @Override + public void forceRefreshSegment(String segmentName) { + //No-Op + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/common/FetchOptions.java b/client/src/main/java/io/split/engine/common/FetchOptions.java index e1996b3e8..ff7a3d49d 100644 --- a/client/src/main/java/io/split/engine/common/FetchOptions.java +++ b/client/src/main/java/io/split/engine/common/FetchOptions.java @@ -1,8 +1,6 @@ package io.split.engine.common; -import java.util.Map; import java.util.Objects; -import java.util.function.Function; public class FetchOptions { @@ -14,9 +12,9 @@ public Builder() {} public Builder(FetchOptions opts) { _targetCN = opts._targetCN; + _targetCnRBS = opts._targetCnRBS; _cacheControlHeaders = opts._cacheControlHeaders; - _fastlyDebugHeader = opts._fastlyDebugHeader; - _responseHeadersCallback = opts._responseHeadersCallback; + _flagSetsFilter = opts._flagSetsFilter; } public Builder cacheControlHeaders(boolean on) { @@ -24,58 +22,53 @@ public Builder cacheControlHeaders(boolean on) { return this; } - public Builder fastlyDebugHeader(boolean on) { - _fastlyDebugHeader = on; + public Builder targetChangeNumber(long targetCN) { + _targetCN = targetCN; return this; } - public Builder responseHeadersCallback(Function, Void> callback) { - _responseHeadersCallback = callback; + public Builder targetChangeNumberRBS(long targetCnRBS) { + _targetCnRBS = targetCnRBS; return this; } - public Builder targetChangeNumber(long targetCN) { - _targetCN = targetCN; + public Builder flagSetsFilter(String flagSetsFilter) { + _flagSetsFilter = flagSetsFilter; return this; } public FetchOptions build() { - return new FetchOptions(_cacheControlHeaders, _targetCN, _responseHeadersCallback, _fastlyDebugHeader); + return new FetchOptions(_cacheControlHeaders, _targetCN, _targetCnRBS, _flagSetsFilter); } private long _targetCN = DEFAULT_TARGET_CHANGENUMBER; + private long _targetCnRBS = DEFAULT_TARGET_CHANGENUMBER; private boolean _cacheControlHeaders = false; - private boolean _fastlyDebugHeader = false; - private Function, Void> _responseHeadersCallback = null; + private String _flagSetsFilter = ""; } public boolean cacheControlHeadersEnabled() { return _cacheControlHeaders; } - public boolean fastlyDebugHeaderEnabled() { - return _fastlyDebugHeader; - } - public long targetCN() { return _targetCN; } + public long targetCnRBS() { return _targetCnRBS; } + public boolean hasCustomCN() { return _targetCN != DEFAULT_TARGET_CHANGENUMBER; } - public void handleResponseHeaders(Map headers) { - if (Objects.isNull(_responseHeadersCallback) || Objects.isNull(headers)) { - return; - } - _responseHeadersCallback.apply(headers); + public String flagSetsFilter() { + return _flagSetsFilter; } private FetchOptions(boolean cacheControlHeaders, long targetCN, - Function, Void> responseHeadersCallback, - boolean fastlyDebugHeader) { + long targetCnRBS, + String flagSetsFilter) { _cacheControlHeaders = cacheControlHeaders; _targetCN = targetCN; - _responseHeadersCallback = responseHeadersCallback; - _fastlyDebugHeader = fastlyDebugHeader; + _targetCnRBS = targetCnRBS; + _flagSetsFilter = flagSetsFilter; } @Override @@ -87,18 +80,19 @@ public boolean equals(Object obj) { FetchOptions other = (FetchOptions) obj; return Objects.equals(_cacheControlHeaders, other._cacheControlHeaders) - && Objects.equals(_fastlyDebugHeader, other._fastlyDebugHeader) - && Objects.equals(_responseHeadersCallback, other._responseHeadersCallback) - && Objects.equals(_targetCN, other._targetCN); + && Objects.equals(_targetCN, other._targetCN) + && Objects.equals(_targetCnRBS, other._targetCnRBS) + && Objects.equals(_flagSetsFilter, other._flagSetsFilter); } @Override public int hashCode() { - return com.google.common.base.Objects.hashCode(_cacheControlHeaders, _fastlyDebugHeader, _responseHeadersCallback, _targetCN); + return com.google.common.base.Objects.hashCode(_cacheControlHeaders, + _targetCN, _flagSetsFilter); } private final boolean _cacheControlHeaders; - private final boolean _fastlyDebugHeader; private final long _targetCN; - private final Function, Void> _responseHeadersCallback; -} + private final long _targetCnRBS; + private final String _flagSetsFilter; +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/common/LocalhostSyncManager.java b/client/src/main/java/io/split/engine/common/LocalhostSyncManager.java new file mode 100644 index 000000000..bb7bc7c45 --- /dev/null +++ b/client/src/main/java/io/split/engine/common/LocalhostSyncManager.java @@ -0,0 +1,36 @@ +package io.split.engine.common; + +import io.split.engine.SDKReadinessGates; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class LocalhostSyncManager implements SyncManager { + + private static final Logger _log = LoggerFactory.getLogger(LocalhostSyncManager.class); + private final Synchronizer _localhostSynchronizer; + private final SDKReadinessGates _gates; + + public LocalhostSyncManager(Synchronizer localhostSynchronizer, SDKReadinessGates sdkReadinessGates){ + _localhostSynchronizer = checkNotNull(localhostSynchronizer); + _gates = sdkReadinessGates; + } + + @Override + public void start() { + if(!_localhostSynchronizer.syncAll()){ + _log.error("Could not synchronize feature flag and segment files"); + return; + } + _gates.sdkInternalReady(); + _localhostSynchronizer.startPeriodicFetching(); + } + + @Override + public void shutdown() throws IOException { + _localhostSynchronizer.stopPeriodicFetching(); + } +} diff --git a/client/src/main/java/io/split/engine/common/LocalhostSynchronizer.java b/client/src/main/java/io/split/engine/common/LocalhostSynchronizer.java new file mode 100644 index 000000000..211148804 --- /dev/null +++ b/client/src/main/java/io/split/engine/common/LocalhostSynchronizer.java @@ -0,0 +1,94 @@ +package io.split.engine.common; + +import io.split.engine.experiments.FetchResult; +import io.split.engine.experiments.SplitFetcher; +import io.split.engine.experiments.SplitSynchronizationTask; +import io.split.engine.segments.SegmentFetcher; +import io.split.engine.segments.SegmentSynchronizationTask; +import io.split.engine.sse.dtos.SplitKillNotification; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class LocalhostSynchronizer implements Synchronizer{ + + private static final Logger _log = LoggerFactory.getLogger(LocalhostSynchronizer.class); + private final SplitSynchronizationTask _splitSynchronizationTask; + private final SplitFetcher _splitFetcher; + private final SegmentSynchronizationTask _segmentSynchronizationTaskImp; + private final boolean _refreshEnable; + + public LocalhostSynchronizer(SplitTasks splitTasks, + SplitFetcher splitFetcher, + boolean refreshEnable){ + _splitSynchronizationTask = checkNotNull(splitTasks.getSplitSynchronizationTask()); + _splitFetcher = checkNotNull(splitFetcher); + _segmentSynchronizationTaskImp = splitTasks.getSegmentSynchronizationTask(); + _refreshEnable = refreshEnable; + } + + @Override + public boolean syncAll() { + FetchResult fetchResult = _splitFetcher.forceRefresh(new FetchOptions.Builder().cacheControlHeaders(true).build()); + return fetchResult.isSuccess() && _segmentSynchronizationTaskImp.fetchAllSynchronous(); + } + + @Override + public void startPeriodicFetching() { + _log.debug("Starting Periodic Fetching ..."); + if(!_refreshEnable){ + _log.info("Refresh enable is false. The synchronization tasks are not going to start"); + return; + } + _splitSynchronizationTask.start(); + _segmentSynchronizationTaskImp.start(); + } + + @Override + public void stopPeriodicFetching() { + _log.debug("Stop Periodic Fetching ..."); + if(!_refreshEnable){ + return; + } + _splitSynchronizationTask.stop(); + _segmentSynchronizationTaskImp.stop(); + } + + @Override + public void refreshSplits(Long targetChangeNumber, Long ruleBasedSegmentChangeNumber) { + FetchResult fetchResult = _splitFetcher.forceRefresh(new FetchOptions.Builder().cacheControlHeaders(true).build()); + if (fetchResult.isSuccess()){ + _log.debug("Refresh feature flags completed"); + fetchResult.getSegments().stream().forEach(segmentName -> refreshSegment(segmentName, null)); + } else { + _log.debug("No changes fetched"); + } + } + + @Override + public void localKillSplit(SplitKillNotification splitKillNotification) { + //No-Op + } + + @Override + public void refreshSegment(String segmentName, Long targetChangeNumber) { + SegmentFetcher segmentFetcher = _segmentSynchronizationTaskImp.getFetcher(segmentName); + segmentFetcher.fetch(new FetchOptions.Builder().cacheControlHeaders(true).build()); + } + + @Override + public void startPeriodicDataRecording() { + //No-Op + } + + @Override + public void stopPeriodicDataRecording() { + //No-Op + } + + @Override + public void forceRefreshSegment(String segmentName) { + //No-Op + } +} diff --git a/client/src/main/java/io/split/engine/common/PushManagerImp.java b/client/src/main/java/io/split/engine/common/PushManagerImp.java index 9585bc1a7..d7b5d8ae7 100644 --- a/client/src/main/java/io/split/engine/common/PushManagerImp.java +++ b/client/src/main/java/io/split/engine/common/PushManagerImp.java @@ -1,7 +1,9 @@ package io.split.engine.common; import com.google.common.annotations.VisibleForTesting; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.engine.experiments.RuleBasedSegmentParser; +import io.split.engine.experiments.SplitParser; import io.split.engine.sse.AuthApiClient; import io.split.engine.sse.AuthApiClientImp; import io.split.engine.sse.EventSourceClient; @@ -12,34 +14,39 @@ import io.split.engine.sse.dtos.AuthenticationResponse; import io.split.engine.sse.dtos.SegmentQueueDto; import io.split.engine.sse.workers.SegmentsWorkerImp; -import io.split.engine.sse.workers.SplitsWorker; -import io.split.engine.sse.workers.SplitsWorkerImp; +import io.split.engine.sse.workers.FeatureFlagsWorker; +import io.split.engine.sse.workers.FeatureFlagWorkerImp; import io.split.engine.sse.workers.Worker; +import io.split.storages.RuleBasedSegmentCache; +import io.split.storages.SplitCacheProducer; import io.split.telemetry.domain.StreamingEvent; import io.split.telemetry.domain.enums.StreamEventsEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; import static com.google.common.base.Preconditions.checkNotNull; +import static io.split.client.utils.SplitExecutorFactory.buildSingleThreadScheduledExecutor; public class PushManagerImp implements PushManager { private static final Logger _log = LoggerFactory.getLogger(PushManager.class); private final AuthApiClient _authApiClient; private final EventSourceClient _eventSourceClient; - private final SplitsWorker _splitsWorker; + private final FeatureFlagsWorker _featureFlagsWorker; private final Worker _segmentWorker; private final PushStatusTracker _pushStatusTracker; + private static final Lock lock = new ReentrantLock(); private Future _nextTokenRefreshTask; private final ScheduledExecutorService _scheduledExecutorService; @@ -48,72 +55,91 @@ public class PushManagerImp implements PushManager { @VisibleForTesting /* package private */ PushManagerImp(AuthApiClient authApiClient, - EventSourceClient eventSourceClient, - SplitsWorker splitsWorker, - Worker segmentWorker, - PushStatusTracker pushStatusTracker, - TelemetryRuntimeProducer telemetryRuntimeProducer) { + EventSourceClient eventSourceClient, + FeatureFlagsWorker featureFlagsWorker, + Worker segmentWorker, + PushStatusTracker pushStatusTracker, + TelemetryRuntimeProducer telemetryRuntimeProducer, + ThreadFactory threadFactory) { _authApiClient = checkNotNull(authApiClient); _eventSourceClient = checkNotNull(eventSourceClient); - _splitsWorker = splitsWorker; + _featureFlagsWorker = featureFlagsWorker; _segmentWorker = segmentWorker; _pushStatusTracker = pushStatusTracker; _expirationTime = new AtomicLong(); - _scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat("Split-SSERefreshToken-%d") - .build()); + _scheduledExecutorService = buildSingleThreadScheduledExecutor(threadFactory, "Split-SSERefreshToken-%d"); _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); } public static PushManagerImp build(Synchronizer synchronizer, String streamingUrl, String authUrl, - CloseableHttpClient httpClient, + SplitAPI splitAPI, LinkedBlockingQueue statusMessages, - CloseableHttpClient sseHttpClient, - TelemetryRuntimeProducer telemetryRuntimeProducer) { - SplitsWorker splitsWorker = new SplitsWorkerImp(synchronizer); + TelemetryRuntimeProducer telemetryRuntimeProducer, + ThreadFactory threadFactory, + SplitParser splitParser, + SplitCacheProducer splitCacheProducer, + FlagSetsFilter flagSetsFilter, + RuleBasedSegmentCache ruleBasedSegmentCache, + RuleBasedSegmentParser ruleBasedSegmentParser) { + FeatureFlagsWorker featureFlagsWorker = new FeatureFlagWorkerImp(synchronizer, splitParser, ruleBasedSegmentParser, splitCacheProducer, + ruleBasedSegmentCache, telemetryRuntimeProducer, flagSetsFilter); Worker segmentWorker = new SegmentsWorkerImp(synchronizer); PushStatusTracker pushStatusTracker = new PushStatusTrackerImp(statusMessages, telemetryRuntimeProducer); - return new PushManagerImp(new AuthApiClientImp(authUrl, httpClient, telemetryRuntimeProducer), - EventSourceClientImp.build(streamingUrl, splitsWorker, segmentWorker, pushStatusTracker, sseHttpClient, telemetryRuntimeProducer), - splitsWorker, + + return new PushManagerImp(new AuthApiClientImp(authUrl, splitAPI.getHttpClient(), telemetryRuntimeProducer), + EventSourceClientImp.build(streamingUrl, featureFlagsWorker, segmentWorker, pushStatusTracker, splitAPI.getSseHttpClient(), + telemetryRuntimeProducer, threadFactory, splitAPI.getRequestDecorator()), + featureFlagsWorker, segmentWorker, - pushStatusTracker, telemetryRuntimeProducer); + pushStatusTracker, + telemetryRuntimeProducer, + threadFactory); } @Override - public synchronized void start() { - AuthenticationResponse response = _authApiClient.Authenticate(); - _log.debug(String.format("Auth service response pushEnabled: %s", response.isPushEnabled())); - if (response.isPushEnabled() && startSse(response.getToken(), response.getChannels())) { - _expirationTime.set(response.getExpiration()); - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.TOKEN_REFRESH.getType(), response.getExpiration(), System.currentTimeMillis())); - return; - } - - stop(); - if (response.isRetry()) { - _pushStatusTracker.handleSseStatus(SSEClient.StatusMessage.RETRYABLE_ERROR); - } else { - _pushStatusTracker.forcePushDisable(); + public void start() { + try { + lock.lock(); + AuthenticationResponse response = _authApiClient.Authenticate(); + _log.debug(String.format("Auth service response pushEnabled: %s", response.isPushEnabled())); + if (response.isPushEnabled() && startSse(response.getToken(), response.getChannels())) { + _expirationTime.set(response.getExpiration()); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.TOKEN_REFRESH.getType(), + response.getExpiration(), System.currentTimeMillis())); + return; + } + + cleanUpResources(); + if (response.isRetry()) { + _pushStatusTracker.handleSseStatus(SSEClient.StatusMessage.RETRYABLE_ERROR); + } else { + _pushStatusTracker.forcePushDisable(); + } + } catch (Exception e) { + _log.debug("Exception in PushManager start: " + e.getMessage()); + } finally { + lock.unlock(); } } @Override - public synchronized void stop() { - _eventSourceClient.stop(); - stopWorkers(); - if (_nextTokenRefreshTask != null) { - _log.debug("Cancel nextTokenRefreshTask"); - _nextTokenRefreshTask.cancel(false); + public void stop() { + try { + lock.lock(); + _log.debug("Stopping PushManagerImp"); + cleanUpResources(); + } catch (Exception e) { + _log.debug("Exception in stopping push manager: " + e.getMessage()); + } finally { + lock.unlock(); } } @Override - public synchronized void scheduleConnectionReset() { + public void scheduleConnectionReset() { _log.debug(String.format("scheduleNextTokenRefresh in %s SECONDS", _expirationTime)); _nextTokenRefreshTask = _scheduledExecutorService.schedule(() -> { _log.debug("Starting scheduleNextTokenRefresh ..."); @@ -133,14 +159,31 @@ private boolean startSse(String token, String channels) { } @Override - public synchronized void startWorkers() { - _splitsWorker.start(); - _segmentWorker.start(); + public void startWorkers() { + try { + _featureFlagsWorker.start(); + _segmentWorker.start(); + } catch (Exception e) { + _log.debug("Exception in starting workers: " + e.getMessage()); + } } @Override - public synchronized void stopWorkers() { - _splitsWorker.stop(); - _segmentWorker.stop(); + public void stopWorkers() { + try { + _featureFlagsWorker.stop(); + _segmentWorker.stop(); + } catch (Exception e) { + _log.debug("Exception in stopping workers: " + e.getMessage()); + } + } + + private void cleanUpResources() { + _eventSourceClient.stop(); + stopWorkers(); + if (_nextTokenRefreshTask != null) { + _log.debug("Cancel nextTokenRefreshTask"); + _nextTokenRefreshTask.cancel(false); + } } -} \ No newline at end of file +} diff --git a/client/src/main/java/io/split/engine/common/SplitAPI.java b/client/src/main/java/io/split/engine/common/SplitAPI.java new file mode 100644 index 000000000..adb0500de --- /dev/null +++ b/client/src/main/java/io/split/engine/common/SplitAPI.java @@ -0,0 +1,51 @@ +package io.split.engine.common; + +import io.split.client.RequestDecorator; +import io.split.service.SplitHttpClient; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SplitAPI { + + private final SplitHttpClient _httpClient; + private final CloseableHttpClient _sseHttpClient; + private final RequestDecorator _requestDecorator; + private static final Logger _log = LoggerFactory.getLogger(SplitAPI.class); + + private SplitAPI(SplitHttpClient httpClient, CloseableHttpClient sseHttpClient, RequestDecorator requestDecorator) { + _httpClient = httpClient; + _sseHttpClient = sseHttpClient; + _requestDecorator = requestDecorator; + } + + public static SplitAPI build(SplitHttpClient httpClient, CloseableHttpClient sseHttpClient, + RequestDecorator requestDecorator) { + return new SplitAPI(httpClient, sseHttpClient, requestDecorator); + } + + public SplitHttpClient getHttpClient() { + return _httpClient; + } + + public CloseableHttpClient getSseHttpClient() { + return _sseHttpClient; + } + + public RequestDecorator getRequestDecorator() { + return _requestDecorator; + } + + public void close() { + try { + _httpClient.close(); + } catch (Exception e) { + _log.error("Error trying to close regular http client", e); + } + try { + _sseHttpClient.close(); + } catch (Exception e) { + _log.error("Error trying to close sseHttpClient", e); + } + } +} diff --git a/client/src/main/java/io/split/engine/common/SplitTasks.java b/client/src/main/java/io/split/engine/common/SplitTasks.java new file mode 100644 index 000000000..d2960d824 --- /dev/null +++ b/client/src/main/java/io/split/engine/common/SplitTasks.java @@ -0,0 +1,69 @@ +package io.split.engine.common; + +import io.split.client.events.EventsTask; +import io.split.client.impressions.ImpressionsManager; +import io.split.client.impressions.UniqueKeysTracker; +import io.split.engine.experiments.SplitSynchronizationTask; +import io.split.engine.segments.SegmentSynchronizationTask; +import io.split.telemetry.synchronizer.TelemetrySyncTask; + +public class SplitTasks { + private final SplitSynchronizationTask _splitSynchronizationTask; + private final SegmentSynchronizationTask _segmentSynchronizationTask; + private final ImpressionsManager _impressionManager; + private final EventsTask _eventsTask; + private final TelemetrySyncTask _telemetrySyncTask; + private final UniqueKeysTracker _uniqueKeysTracker; + + private SplitTasks (SplitSynchronizationTask splitSynchronizationTask, + SegmentSynchronizationTask segmentSynchronizationTask, + ImpressionsManager impressionsManager, + EventsTask eventsTask, + TelemetrySyncTask telemetrySyncTask, + UniqueKeysTracker uniqueKeysTracker){ + _splitSynchronizationTask = splitSynchronizationTask; + _segmentSynchronizationTask = segmentSynchronizationTask; + _impressionManager = impressionsManager; + _eventsTask = eventsTask; + _uniqueKeysTracker = uniqueKeysTracker; + _telemetrySyncTask = telemetrySyncTask; + } + + public static SplitTasks build (SplitSynchronizationTask splitSynchronizationTask, + SegmentSynchronizationTask segmentSynchronizationTask, + ImpressionsManager impressionsManager, + EventsTask eventsTask, + TelemetrySyncTask telemetrySyncTask, + UniqueKeysTracker uniqueKeysTracker) { + return new SplitTasks ( splitSynchronizationTask, + segmentSynchronizationTask, + impressionsManager, + eventsTask, + telemetrySyncTask, + uniqueKeysTracker); + } + + public SplitSynchronizationTask getSplitSynchronizationTask() { + return _splitSynchronizationTask; + } + + public SegmentSynchronizationTask getSegmentSynchronizationTask() { + return _segmentSynchronizationTask; + } + + public ImpressionsManager getImpressionManager() { + return _impressionManager; + } + + public EventsTask getEventsTask() { + return _eventsTask; + } + + public TelemetrySyncTask getTelemetrySyncTask() { + return _telemetrySyncTask; + } + + public UniqueKeysTracker getUniqueKeysTracker() { + return _uniqueKeysTracker; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/common/SyncManager.java b/client/src/main/java/io/split/engine/common/SyncManager.java index 147641748..f955e4c45 100644 --- a/client/src/main/java/io/split/engine/common/SyncManager.java +++ b/client/src/main/java/io/split/engine/common/SyncManager.java @@ -1,6 +1,8 @@ package io.split.engine.common; +import java.io.IOException; + public interface SyncManager { void start(); - void shutdown(); + void shutdown() throws IOException; } diff --git a/client/src/main/java/io/split/engine/common/SyncManagerImp.java b/client/src/main/java/io/split/engine/common/SyncManagerImp.java index a3e4ebb78..57faa07b3 100644 --- a/client/src/main/java/io/split/engine/common/SyncManagerImp.java +++ b/client/src/main/java/io/split/engine/common/SyncManagerImp.java @@ -1,169 +1,202 @@ package io.split.engine.common; import com.google.common.annotations.VisibleForTesting; -import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.split.client.ApiKeyCounter; import io.split.client.SplitClientConfig; +import io.split.client.interceptors.FlagSetsFilter; import io.split.engine.SDKReadinessGates; +import io.split.engine.experiments.RuleBasedSegmentParser; import io.split.engine.experiments.SplitFetcher; +import io.split.engine.experiments.SplitParser; import io.split.engine.experiments.SplitSynchronizationTask; -import io.split.engine.segments.SegmentSynchronizationTaskImp; +import io.split.engine.segments.SegmentSynchronizationTask; +import io.split.storages.RuleBasedSegmentCache; import io.split.storages.SegmentCacheProducer; import io.split.storages.SplitCacheProducer; import io.split.telemetry.domain.StreamingEvent; import io.split.telemetry.domain.enums.StreamEventsEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; import io.split.telemetry.synchronizer.TelemetrySynchronizer; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; import java.util.ArrayList; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicBoolean; import static com.google.common.base.Preconditions.checkNotNull; +import static io.split.client.utils.SplitExecutorFactory.buildExecutorService; public class SyncManagerImp implements SyncManager { - private static final Logger _log = LoggerFactory.getLogger(SyncManager.class); + private static final Logger _log = LoggerFactory.getLogger(SyncManagerImp.class); private final AtomicBoolean _streamingEnabledConfig; private final Synchronizer _synchronizer; private final PushManager _pushManager; - private final AtomicBoolean _shutdown; + private final AtomicBoolean _shuttedDown; private final LinkedBlockingQueue _incomingPushStatus; - private final ExecutorService _executorService; - private final ExecutorService _startExecutorService; + private final ExecutorService _pushMonitorExecutorService; + private final ExecutorService _initializationtExecutorService; private final SDKReadinessGates _gates; private Future _pushStatusMonitorTask; private Backoff _backoff; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; private final TelemetrySynchronizer _telemetrySynchronizer; private final SplitClientConfig _config; + private final long _startingSyncCallBackoffBaseMs; + private final SegmentSynchronizationTask _segmentSynchronizationTaskImp; + private final SplitSynchronizationTask _splitSynchronizationTask; + private static final long STARTING_SYNC_ALL_BACKOFF_MAX_WAIT_MS = 10000; // 10 seconds max wait + private final SplitAPI _splitAPI; @VisibleForTesting - /* package private */ SyncManagerImp(boolean streamingEnabledConfig, + /* package private */ SyncManagerImp(SplitTasks splitTasks, + boolean streamingEnabledConfig, Synchronizer synchronizer, PushManager pushManager, LinkedBlockingQueue pushMessages, - int authRetryBackOffBase, - SDKReadinessGates gates, TelemetryRuntimeProducer telemetryRuntimeProducer, + SDKReadinessGates gates, + TelemetryRuntimeProducer telemetryRuntimeProducer, TelemetrySynchronizer telemetrySynchronizer, - SplitClientConfig config) { + SplitClientConfig config, + SplitAPI splitAPI) { _streamingEnabledConfig = new AtomicBoolean(streamingEnabledConfig); _synchronizer = checkNotNull(synchronizer); _pushManager = checkNotNull(pushManager); - _shutdown = new AtomicBoolean(false); + _shuttedDown = new AtomicBoolean(false); _incomingPushStatus = pushMessages; - _executorService = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder() - .setNameFormat("SPLIT-PushStatusMonitor-%d") - .setDaemon(true) - .build()); - _startExecutorService = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder() - .setNameFormat("SPLIT-PollingMode-%d") - .setDaemon(true) - .build()); - _backoff = new Backoff(authRetryBackOffBase); + _pushMonitorExecutorService = buildExecutorService(config.getThreadFactory(), "SPLIT-PushStatusMonitor-%d"); + _initializationtExecutorService = buildExecutorService(config.getThreadFactory(), "SPLIT-Initialization-%d"); + _backoff = new Backoff(config.authRetryBackoffBase()); _gates = checkNotNull(gates); _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); _telemetrySynchronizer = checkNotNull(telemetrySynchronizer); _config = checkNotNull(config); + _startingSyncCallBackoffBaseMs = config.startingSyncCallBackoffBaseMs(); + _segmentSynchronizationTaskImp = checkNotNull(splitTasks.getSegmentSynchronizationTask()); + _splitSynchronizationTask = checkNotNull(splitTasks.getSplitSynchronizationTask()); + _splitAPI = splitAPI; } - public static SyncManagerImp build(boolean streamingEnabledConfig, - SplitSynchronizationTask splitSynchronizationTask, + public static SyncManagerImp build(SplitTasks splitTasks, SplitFetcher splitFetcher, - SegmentSynchronizationTaskImp segmentSynchronizationTaskImp, SplitCacheProducer splitCacheProducer, - String authUrl, - CloseableHttpClient httpClient, - String streamingServiceUrl, - int authRetryBackOffBase, - CloseableHttpClient sseHttpClient, + SplitAPI splitAPI, SegmentCacheProducer segmentCacheProducer, - int streamingRetryDelay, - int maxOnDemandFetchRetries, - int failedAttemptsBeforeLogging, - boolean cdnDebugLogging, SDKReadinessGates gates, TelemetryRuntimeProducer telemetryRuntimeProducer, TelemetrySynchronizer telemetrySynchronizer, - SplitClientConfig config) { + SplitClientConfig config, + SplitParser splitParser, + RuleBasedSegmentParser ruleBasedSegmentParser, + FlagSetsFilter flagSetsFilter, + RuleBasedSegmentCache ruleBasedSegmentCache) { LinkedBlockingQueue pushMessages = new LinkedBlockingQueue<>(); - Synchronizer synchronizer = new SynchronizerImp(splitSynchronizationTask, + Synchronizer synchronizer = new SynchronizerImp(splitTasks, splitFetcher, - segmentSynchronizationTaskImp, splitCacheProducer, segmentCacheProducer, - streamingRetryDelay, - maxOnDemandFetchRetries, - failedAttemptsBeforeLogging, - cdnDebugLogging, - gates); + ruleBasedSegmentCache, + config.streamingRetryDelay(), + config.streamingFetchMaxRetries(), + config.failedAttemptsBeforeLogging(), + config.getSetsFilter()); PushManager pushManager = PushManagerImp.build(synchronizer, - streamingServiceUrl, - authUrl, - httpClient, + config.streamingServiceURL(), + config.authServiceURL(), + splitAPI, pushMessages, - sseHttpClient, - telemetryRuntimeProducer); + telemetryRuntimeProducer, + config.getThreadFactory(), + splitParser, + splitCacheProducer, + flagSetsFilter, + ruleBasedSegmentCache, + ruleBasedSegmentParser); - return new SyncManagerImp(streamingEnabledConfig, + return new SyncManagerImp(splitTasks, + config.streamingEnabled(), synchronizer, pushManager, pushMessages, - authRetryBackOffBase, gates, telemetryRuntimeProducer, telemetrySynchronizer, - config); + config, + splitAPI); } @Override public void start() { - _startExecutorService.submit(() -> { + _initializationtExecutorService.submit(() -> { + Backoff startBackoff = new Backoff(_startingSyncCallBackoffBaseMs, STARTING_SYNC_ALL_BACKOFF_MAX_WAIT_MS); while(!_synchronizer.syncAll()) { - try { - Thread.currentThread().sleep(1000); + try{ + long howLong = startBackoff.interval(); + Thread.currentThread().sleep(howLong); } catch (InterruptedException e) { - _log.warn("Sdk Initializer thread interrupted"); Thread.currentThread().interrupt(); + break; } } + if (_shuttedDown.get()) { + return; + } + if (_log.isDebugEnabled()) { + _log.debug("SyncAll Ready"); + } _gates.sdkInternalReady(); - _telemetrySynchronizer.synchronizeConfig(_config, System.currentTimeMillis(), ApiKeyCounter.getApiKeyCounterInstance().getFactoryInstances(), new ArrayList<>()); if (_streamingEnabledConfig.get()) { startStreamingMode(); } else { startPollingMode(); } + _synchronizer.startPeriodicDataRecording(); + _telemetrySynchronizer.synchronizeConfig(_config, System.currentTimeMillis(), ApiKeyCounter.getApiKeyCounterInstance(). + getFactoryInstances(), new ArrayList<>()); }); } @Override - public void shutdown() { - _shutdown.set(true); + public void shutdown() throws IOException { + _log.info("Shutting down SyncManagerImp"); + if(_shuttedDown.get()) { + return; + } + _shuttedDown.set(true); + _initializationtExecutorService.shutdownNow(); _synchronizer.stopPeriodicFetching(); - _pushManager.stop(); + if (_streamingEnabledConfig.get()) { + _pushManager.stop(); + _pushMonitorExecutorService.shutdownNow(); + } + _segmentSynchronizationTaskImp.close(); + _log.info("Successful shutdown of segment fetchers"); + _splitSynchronizationTask.close(); + _log.info("Successful shutdown of splits"); + _synchronizer.stopPeriodicDataRecording(); + _splitAPI.close(); } private void startStreamingMode() { _log.debug("Starting in streaming mode ..."); if (null == _pushStatusMonitorTask) { - _pushStatusMonitorTask = _executorService.submit(this::incomingPushStatusHandler); + _pushStatusMonitorTask = _pushMonitorExecutorService.submit(this::incomingPushStatusHandler); } _pushManager.start(); - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SYNC_MODE_UPDATE.getType(), StreamEventsEnum.SyncModeUpdateValues.STREAMING_EVENT.getValue(), System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SYNC_MODE_UPDATE.getType(), + StreamEventsEnum.SyncModeUpdateValues.STREAMING_EVENT.getValue(), System.currentTimeMillis())); } private void startPollingMode() { _log.debug("Starting in polling mode ..."); _synchronizer.startPeriodicFetching(); - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SYNC_MODE_UPDATE.getType(), StreamEventsEnum.SyncModeUpdateValues.POLLING_EVENT.getValue(), System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SYNC_MODE_UPDATE.getType(), + StreamEventsEnum.SyncModeUpdateValues.POLLING_EVENT.getValue(), System.currentTimeMillis())); } @VisibleForTesting @@ -179,21 +212,26 @@ private void startPollingMode() { _pushManager.startWorkers(); _pushManager.scheduleConnectionReset(); _backoff.reset(); - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.STREAMING_STATUS.getType(), StreamEventsEnum.StreamingStatusValues.STREAMING_ENABLED.getValue(), System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.STREAMING_STATUS.getType(), + StreamEventsEnum.StreamingStatusValues.STREAMING_ENABLED.getValue(), System.currentTimeMillis())); _log.info("Streaming up and running."); break; case STREAMING_DOWN: _log.info("Streaming service temporarily unavailable, working in polling mode."); _pushManager.stopWorkers(); + // if the whole SDK is being shutdown, don't start polling, + // in case the polling threads are not terminated and a graceful shutdown will fail. + if(_shuttedDown.get()) { + break; + } _synchronizer.startPeriodicFetching(); break; case STREAMING_BACKOFF: - long howLong = _backoff.interval() * 1000; - _log.info(String.format("Retryable error in streaming subsystem. Switching to polling and retrying in %d seconds", howLong/1000)); + long howLong = _backoff.interval(); + _log.info(String.format("Retryable error in streaming subsystem. Switching to polling and retrying in %d seconds", howLong)); _synchronizer.startPeriodicFetching(); - _pushManager.stopWorkers(); _pushManager.stop(); - Thread.sleep(howLong); + Thread.sleep(howLong * 1000); _incomingPushStatus.clear(); _pushManager.start(); break; @@ -212,4 +250,4 @@ private void startPollingMode() { } } } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/common/Synchronizer.java b/client/src/main/java/io/split/engine/common/Synchronizer.java index 9197baacf..d685a3ed7 100644 --- a/client/src/main/java/io/split/engine/common/Synchronizer.java +++ b/client/src/main/java/io/split/engine/common/Synchronizer.java @@ -1,10 +1,15 @@ package io.split.engine.common; +import io.split.engine.sse.dtos.SplitKillNotification; + public interface Synchronizer { boolean syncAll(); void startPeriodicFetching(); void stopPeriodicFetching(); - void refreshSplits(long targetChangeNumber); - void localKillSplit(String splitName, String defaultTreatment, long newChangeNumber); - void refreshSegment(String segmentName, long targetChangeNumber); + void refreshSplits(Long targetChangeNumber, Long ruleBasedSegmentChangeNumber); + void localKillSplit(SplitKillNotification splitKillNotification); + void refreshSegment(String segmentName, Long targetChangeNumber); + void startPeriodicDataRecording(); + void stopPeriodicDataRecording(); + void forceRefreshSegment(String segmentName); } diff --git a/client/src/main/java/io/split/engine/common/SynchronizerImp.java b/client/src/main/java/io/split/engine/common/SynchronizerImp.java index 8d26c966b..d9210578c 100644 --- a/client/src/main/java/io/split/engine/common/SynchronizerImp.java +++ b/client/src/main/java/io/split/engine/common/SynchronizerImp.java @@ -1,22 +1,24 @@ package io.split.engine.common; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import io.split.engine.SDKReadinessGates; +import io.split.client.events.EventsTask; +import io.split.client.impressions.ImpressionsManager; +import io.split.client.impressions.UniqueKeysTracker; import io.split.engine.experiments.FetchResult; import io.split.engine.experiments.SplitFetcher; import io.split.engine.experiments.SplitSynchronizationTask; import io.split.engine.segments.SegmentFetcher; import io.split.engine.segments.SegmentSynchronizationTask; +import io.split.engine.sse.dtos.SplitKillNotification; +import io.split.storages.RuleBasedSegmentCacheProducer; import io.split.storages.SegmentCacheProducer; import io.split.storages.SplitCacheProducer; +import io.split.telemetry.synchronizer.TelemetrySyncTask; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashSet; -import java.util.List; -import java.util.Map; import java.util.function.Function; +import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkNotNull; @@ -33,47 +35,53 @@ public class SynchronizerImp implements Synchronizer { private final SplitFetcher _splitFetcher; private final SegmentSynchronizationTask _segmentSynchronizationTaskImp; private final SplitCacheProducer _splitCacheProducer; + private final RuleBasedSegmentCacheProducer _ruleBasedSegmentCacheProducer; private final SegmentCacheProducer segmentCacheProducer; + private final ImpressionsManager _impressionManager; + private final EventsTask _eventsTask; + private final TelemetrySyncTask _telemetrySyncTask; + private final UniqueKeysTracker _uniqueKeysTracker; private final int _onDemandFetchRetryDelayMs; private final int _onDemandFetchMaxRetries; private final int _failedAttemptsBeforeLogging; - private final boolean _cdnResponseHeadersLogging; + private final String _sets; - private final Gson gson = new GsonBuilder().create(); - - public SynchronizerImp(SplitSynchronizationTask splitSynchronizationTask, + public SynchronizerImp(SplitTasks splitTasks, SplitFetcher splitFetcher, - SegmentSynchronizationTask segmentSynchronizationTaskImp, SplitCacheProducer splitCacheProducer, SegmentCacheProducer segmentCacheProducer, + RuleBasedSegmentCacheProducer ruleBasedSegmentCacheProducer, int onDemandFetchRetryDelayMs, int onDemandFetchMaxRetries, int failedAttemptsBeforeLogging, - boolean cdnResponseHeadersLogging, - SDKReadinessGates gates) { - _splitSynchronizationTask = checkNotNull(splitSynchronizationTask); + HashSet sets) { + _splitSynchronizationTask = checkNotNull(splitTasks.getSplitSynchronizationTask()); _splitFetcher = checkNotNull(splitFetcher); - _segmentSynchronizationTaskImp = checkNotNull(segmentSynchronizationTaskImp); + _segmentSynchronizationTaskImp = checkNotNull(splitTasks.getSegmentSynchronizationTask()); _splitCacheProducer = checkNotNull(splitCacheProducer); + _ruleBasedSegmentCacheProducer = checkNotNull(ruleBasedSegmentCacheProducer); this.segmentCacheProducer = checkNotNull(segmentCacheProducer); _onDemandFetchRetryDelayMs = checkNotNull(onDemandFetchRetryDelayMs); - _cdnResponseHeadersLogging = cdnResponseHeadersLogging; _onDemandFetchMaxRetries = onDemandFetchMaxRetries; _failedAttemptsBeforeLogging = failedAttemptsBeforeLogging; - + _impressionManager = splitTasks.getImpressionManager(); + _eventsTask = splitTasks.getEventsTask(); + _telemetrySyncTask = splitTasks.getTelemetrySyncTask(); + _uniqueKeysTracker = splitTasks.getUniqueKeysTracker(); + _sets = sets.stream().collect(Collectors.joining(",")); } @Override public boolean syncAll() { - FetchResult fetchResult = _splitFetcher.forceRefresh(new FetchOptions.Builder().cacheControlHeaders(true).build()); + FetchResult fetchResult = _splitFetcher.forceRefresh(new FetchOptions.Builder().flagSetsFilter(_sets).cacheControlHeaders(true).build()); return fetchResult.isSuccess() && _segmentSynchronizationTaskImp.fetchAllSynchronous(); } @Override public void startPeriodicFetching() { _log.debug("Starting Periodic Fetching ..."); - _splitSynchronizationTask.startPeriodicFetching(); - _segmentSynchronizationTaskImp.startPeriodicFetching(); + _splitSynchronizationTask.start(); + _segmentSynchronizationTaskImp.start(); } @Override @@ -99,7 +107,7 @@ private static class SyncResult { private final FetchResult _fetchResult; } - private SyncResult attemptSplitsSync(long targetChangeNumber, + private SyncResult attemptSplitsSync(long targetChangeNumber, long ruleBasedSegmentChangeNumber, FetchOptions opts, Function nextWaitMs, int maxRetries) { @@ -107,7 +115,11 @@ private SyncResult attemptSplitsSync(long targetChangeNumber, while(true) { remainingAttempts--; FetchResult fetchResult = _splitFetcher.forceRefresh(opts); - if (targetChangeNumber <= _splitCacheProducer.getChangeNumber()) { + if (fetchResult != null && !fetchResult.retry() && !fetchResult.isSuccess()) { + return new SyncResult(false, remainingAttempts, fetchResult); + } + if (targetChangeNumber <= _splitCacheProducer.getChangeNumber() + && ruleBasedSegmentChangeNumber <= _ruleBasedSegmentCacheProducer.getChangeNumber()) { return new SyncResult(true, remainingAttempts, fetchResult); } else if (remainingAttempts <= 0) { return new SyncResult(false, remainingAttempts, fetchResult); @@ -122,66 +134,61 @@ private SyncResult attemptSplitsSync(long targetChangeNumber, } } - private void logCdnHeaders(String prefix, int maxRetries, int remainingAttempts, List> headers) { - if (maxRetries - remainingAttempts > _failedAttemptsBeforeLogging) { - _log.info(String.format("%s: CDN Debug headers: %s", prefix, gson.toJson(headers))); - } - } - @Override - public void refreshSplits(long targetChangeNumber) { + public void refreshSplits(Long targetChangeNumber, Long ruleBasedSegmentChangeNumber) { - if (targetChangeNumber <= _splitCacheProducer.getChangeNumber()) { + if (targetChangeNumber == null || targetChangeNumber == 0) { + targetChangeNumber = _splitCacheProducer.getChangeNumber(); + } + if (ruleBasedSegmentChangeNumber == null || ruleBasedSegmentChangeNumber == 0) { + ruleBasedSegmentChangeNumber = _ruleBasedSegmentCacheProducer.getChangeNumber(); + } + + if (targetChangeNumber <= _splitCacheProducer.getChangeNumber() + && ruleBasedSegmentChangeNumber <= _ruleBasedSegmentCacheProducer.getChangeNumber()) { return; } FastlyHeadersCaptor captor = new FastlyHeadersCaptor(); FetchOptions opts = new FetchOptions.Builder() .cacheControlHeaders(true) - .fastlyDebugHeader(_cdnResponseHeadersLogging) - .responseHeadersCallback(_cdnResponseHeadersLogging ? captor::handle : null) + .flagSetsFilter(_sets) .build(); - SyncResult regularResult = attemptSplitsSync(targetChangeNumber, opts, + SyncResult regularResult = attemptSplitsSync(targetChangeNumber, ruleBasedSegmentChangeNumber, opts, (discard) -> (long) _onDemandFetchRetryDelayMs, _onDemandFetchMaxRetries); int attempts = _onDemandFetchMaxRetries - regularResult.remainingAttempts(); if (regularResult.success()) { _log.debug(String.format("Refresh completed in %s attempts.", attempts)); - if (_cdnResponseHeadersLogging) { - logCdnHeaders("[splits]", _onDemandFetchMaxRetries , regularResult.remainingAttempts(), captor.get()); - } + regularResult._fetchResult.getSegments().stream() - .forEach(segmentName -> _segmentSynchronizationTaskImp.initializeSegment(segmentName)); + .forEach(segmentName -> forceRefreshSegment(segmentName)); return; } _log.info(String.format("No changes fetched after %s attempts. Will retry bypassing CDN.", attempts)); FetchOptions withCdnBypass = new FetchOptions.Builder(opts).targetChangeNumber(targetChangeNumber).build(); Backoff backoff = new Backoff(ON_DEMAND_FETCH_BACKOFF_BASE_MS, ON_DEMAND_FETCH_BACKOFF_MAX_WAIT_MS); - SyncResult withCDNBypassed = attemptSplitsSync(targetChangeNumber, withCdnBypass, + SyncResult withCDNBypassed = attemptSplitsSync(targetChangeNumber, ruleBasedSegmentChangeNumber, withCdnBypass, (discard) -> backoff.interval(), ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES); int withoutCDNAttempts = ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - withCDNBypassed._remainingAttempts; if (withCDNBypassed.success()) { _log.debug(String.format("Refresh completed bypassing the CDN in %s attempts.", withoutCDNAttempts)); withCDNBypassed._fetchResult.getSegments().stream() - .forEach(segmentName -> _segmentSynchronizationTaskImp.initializeSegment(segmentName)); + .forEach(segmentName -> forceRefreshSegment(segmentName)); } else { _log.debug(String.format("No changes fetched after %s attempts with CDN bypassed.", withoutCDNAttempts)); } - - if (_cdnResponseHeadersLogging) { - logCdnHeaders("[splits]", _onDemandFetchMaxRetries + ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES, - withCDNBypassed.remainingAttempts(), captor.get()); - } } @Override - public void localKillSplit(String splitName, String defaultTreatment, long newChangeNumber) { - if (newChangeNumber > _splitCacheProducer.getChangeNumber()) { - _splitCacheProducer.kill(splitName, defaultTreatment, newChangeNumber); - refreshSplits(newChangeNumber); + public void localKillSplit(SplitKillNotification splitKillNotification) { + if (splitKillNotification.getChangeNumber() > _splitCacheProducer.getChangeNumber()) { + _splitCacheProducer.kill(splitKillNotification.getSplitName(), splitKillNotification.getDefaultTreatment(), + splitKillNotification.getChangeNumber()); + refreshSplits(splitKillNotification.getChangeNumber(), 0L); } } @@ -199,9 +206,9 @@ public SyncResult attemptSegmentSync(String segmentName, remainingAttempts--; fetcher.fetch(opts); if (targetChangeNumber <= segmentCacheProducer.getChangeNumber(segmentName)) { - return new SyncResult(true, remainingAttempts, new FetchResult(false, new HashSet<>())); + return new SyncResult(true, remainingAttempts, new FetchResult(false, true, new HashSet<>())); } else if (remainingAttempts <= 0) { - return new SyncResult(false, remainingAttempts, new FetchResult(false, new HashSet<>())); + return new SyncResult(false, remainingAttempts, new FetchResult(false, true, new HashSet<>())); } try { long howLong = nextWaitMs.apply(null); @@ -214,7 +221,7 @@ public SyncResult attemptSegmentSync(String segmentName, } @Override - public void refreshSegment(String segmentName, long targetChangeNumber) { + public void refreshSegment(String segmentName, Long targetChangeNumber) { if (targetChangeNumber <= segmentCacheProducer.getChangeNumber(segmentName)) { return; @@ -223,8 +230,6 @@ public void refreshSegment(String segmentName, long targetChangeNumber) { FastlyHeadersCaptor captor = new FastlyHeadersCaptor(); FetchOptions opts = new FetchOptions.Builder() .cacheControlHeaders(true) - .fastlyDebugHeader(_cdnResponseHeadersLogging) - .responseHeadersCallback(_cdnResponseHeadersLogging ? captor::handle : null) .build(); SyncResult regularResult = attemptSegmentSync(segmentName, targetChangeNumber, opts, @@ -233,9 +238,7 @@ public void refreshSegment(String segmentName, long targetChangeNumber) { int attempts = _onDemandFetchMaxRetries - regularResult.remainingAttempts(); if (regularResult.success()) { _log.debug(String.format("Segment %s refresh completed in %s attempts.", segmentName, attempts)); - if (_cdnResponseHeadersLogging) { - logCdnHeaders(String.format("[segment/%s]", segmentName), _onDemandFetchMaxRetries , regularResult.remainingAttempts(), captor.get()); - } + return; } @@ -251,10 +254,51 @@ public void refreshSegment(String segmentName, long targetChangeNumber) { } else { _log.debug(String.format("No changes fetched for segment %s after %s attempts with CDN bypassed.", segmentName, withoutCDNAttempts)); } + } + + @Override + public void startPeriodicDataRecording() { + try { + _impressionManager.start(); + } catch (Exception e) { + _log.error("Error trying to init Impression Manager synchronizer task.", e); + } + if (_uniqueKeysTracker != null){ + try { + _uniqueKeysTracker.start(); + } catch (Exception e) { + _log.error("Error trying to init Unique Keys Tracker synchronizer task.", e); + } + } + try { + _eventsTask.start(); + } catch (Exception e) { + _log.error("Error trying to init Events synchronizer task.", e); + } + try { + _telemetrySyncTask.startScheduledTask(); + } catch (Exception e) { + _log.error("Error trying to Telemetry synchronizer task.", e); + } + } - if (_cdnResponseHeadersLogging) { - logCdnHeaders(String.format("[segment/%s]", segmentName), _onDemandFetchMaxRetries + ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES, - withCDNBypassed.remainingAttempts(), captor.get()); + @Override + public void stopPeriodicDataRecording() { + _impressionManager.close(); + _log.info("Successful shutdown of impressions manager"); + if (_uniqueKeysTracker != null){ + _uniqueKeysTracker.stop(); + _log.info("Successful stop of UniqueKeysTracker"); } + _eventsTask.close(); + _log.info("Successful shutdown of eventsTask"); + _telemetrySyncTask.stopScheduledTask(); + _log.info("Successful shutdown of telemetry sync task"); + } + + @Override + public void forceRefreshSegment(String segmentName){ + SegmentFetcher segmentFetcher = _segmentSynchronizationTaskImp.getFetcher(segmentName); + segmentFetcher.fetch(new FetchOptions.Builder().build()); } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/evaluator/EvaluationContext.java b/client/src/main/java/io/split/engine/evaluator/EvaluationContext.java index 7aab69578..540acc5d3 100644 --- a/client/src/main/java/io/split/engine/evaluator/EvaluationContext.java +++ b/client/src/main/java/io/split/engine/evaluator/EvaluationContext.java @@ -1,5 +1,6 @@ package io.split.engine.evaluator; +import io.split.storages.RuleBasedSegmentCacheConsumer; import io.split.storages.SegmentCacheConsumer; import static com.google.common.base.Preconditions.checkNotNull; @@ -7,10 +8,13 @@ public class EvaluationContext { private final Evaluator _evaluator; private final SegmentCacheConsumer _segmentCacheConsumer; + private final RuleBasedSegmentCacheConsumer _ruleBasedSegmentCacheConsumer; - public EvaluationContext(Evaluator evaluator, SegmentCacheConsumer segmentCacheConsumer) { + public EvaluationContext(Evaluator evaluator, SegmentCacheConsumer segmentCacheConsumer, + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer) { _evaluator = checkNotNull(evaluator); _segmentCacheConsumer = checkNotNull(segmentCacheConsumer); + _ruleBasedSegmentCacheConsumer = checkNotNull(ruleBasedSegmentCacheConsumer); } public Evaluator getEvaluator() { @@ -20,4 +24,8 @@ public Evaluator getEvaluator() { public SegmentCacheConsumer getSegmentCache() { return _segmentCacheConsumer; } + + public RuleBasedSegmentCacheConsumer getRuleBasedSegmentCache() { + return _ruleBasedSegmentCacheConsumer; + } } diff --git a/client/src/main/java/io/split/engine/evaluator/Evaluator.java b/client/src/main/java/io/split/engine/evaluator/Evaluator.java index b1f4e2aba..f745b8d77 100644 --- a/client/src/main/java/io/split/engine/evaluator/Evaluator.java +++ b/client/src/main/java/io/split/engine/evaluator/Evaluator.java @@ -4,6 +4,10 @@ import java.util.Map; public interface Evaluator { - EvaluatorImp.TreatmentLabelAndChangeNumber evaluateFeature(String matchingKey, String bucketingKey, String split, Map attributes); - Map evaluateFeatures(String matchingKey, String bucketingKey, List splits, Map attributes); -} + EvaluatorImp.TreatmentLabelAndChangeNumber evaluateFeature(String matchingKey, String bucketingKey, String featureFlag, + Map attributes); + Map evaluateFeatures(String matchingKey, String bucketingKey, + List featureFlags, Map attributes); + Map evaluateFeaturesByFlagSets(String key, String bucketingKey, + List flagSets, Map attributes); +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/evaluator/EvaluatorImp.java b/client/src/main/java/io/split/engine/evaluator/EvaluatorImp.java index d404b5301..8d7147aa6 100644 --- a/client/src/main/java/io/split/engine/evaluator/EvaluatorImp.java +++ b/client/src/main/java/io/split/engine/evaluator/EvaluatorImp.java @@ -1,55 +1,105 @@ package io.split.engine.evaluator; - import io.split.client.dtos.ConditionType; +import io.split.client.dtos.FallbackTreatment; +import io.split.client.dtos.FallbackTreatmentCalculator; import io.split.client.exceptions.ChangeNumberExceptionWrapper; import io.split.engine.experiments.ParsedCondition; import io.split.engine.experiments.ParsedSplit; import io.split.engine.splitter.Splitter; -import io.split.grammar.Treatments; +import io.split.storages.RuleBasedSegmentCacheConsumer; import io.split.storages.SegmentCacheConsumer; import io.split.storages.SplitCacheConsumer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import static com.google.common.base.Preconditions.checkNotNull; public class EvaluatorImp implements Evaluator { - - private static final Logger _log = LoggerFactory.getLogger(EvaluatorImp.class); private final SegmentCacheConsumer _segmentCacheConsumer; private final EvaluationContext _evaluationContext; private final SplitCacheConsumer _splitCacheConsumer; + private final FallbackTreatmentCalculator _fallbackTreatmentCalculator; + private final String _evaluatorException = "Evaluator Exception"; - public EvaluatorImp(SplitCacheConsumer splitCacheConsumer, SegmentCacheConsumer segmentCache) { + public EvaluatorImp(SplitCacheConsumer splitCacheConsumer, SegmentCacheConsumer segmentCache, + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer, + FallbackTreatmentCalculator fallbackTreatmentCalculator) { _splitCacheConsumer = checkNotNull(splitCacheConsumer); _segmentCacheConsumer = checkNotNull(segmentCache); - _evaluationContext = new EvaluationContext(this, _segmentCacheConsumer); + _evaluationContext = new EvaluationContext(this, _segmentCacheConsumer, ruleBasedSegmentCacheConsumer); + _fallbackTreatmentCalculator = fallbackTreatmentCalculator; } @Override - public TreatmentLabelAndChangeNumber evaluateFeature(String matchingKey, String bucketingKey, String split, Map attributes) { - ParsedSplit parsedSplit = _splitCacheConsumer.get(split); - return evaluateParsedSplit(matchingKey, bucketingKey, split, attributes, parsedSplit); + public TreatmentLabelAndChangeNumber evaluateFeature(String matchingKey, String bucketingKey, String featureFlag, Map attributes) { + ParsedSplit parsedSplit = _splitCacheConsumer.get(featureFlag); + return evaluateParsedSplit(matchingKey, bucketingKey, attributes, parsedSplit, featureFlag); } @Override - public Map evaluateFeatures(String matchingKey, String bucketingKey, List splits, Map attributes) { + public Map evaluateFeatures(String matchingKey, String bucketingKey, List featureFlags, + Map attributes) { Map results = new HashMap<>(); - Map parsedSplits = _splitCacheConsumer.fetchMany(splits); - if(parsedSplits == null) { + Map parsedSplits = _splitCacheConsumer.fetchMany(featureFlags); + if (parsedSplits == null) { return results; } - splits.forEach(s -> results.put(s, evaluateParsedSplit(matchingKey, bucketingKey, s, attributes, parsedSplits.get(s)))); + featureFlags.forEach(s -> results.put(s, evaluateParsedSplit(matchingKey, bucketingKey, attributes, parsedSplits.get(s), s))); return results; } + @Override + public Map evaluateFeaturesByFlagSets(String key, String bucketingKey, + List flagSets, Map attributes) { + List flagSetsWithNames = getFeatureFlagNamesByFlagSets(flagSets); + try { + return evaluateFeatures(key, bucketingKey, flagSetsWithNames, attributes); + } catch (Exception e) { + _log.error(_evaluatorException, e); + return createMapControl(flagSetsWithNames, io.split.engine.evaluator.Labels.EXCEPTION); + } + } + + private Map createMapControl(List featureFlags, String label) { + Map result = new HashMap<>(); + featureFlags.forEach(s -> result.put(s, checkFallbackTreatment(s, label))); + return result; + } + + private EvaluatorImp.TreatmentLabelAndChangeNumber checkFallbackTreatment(String featureName, String label) { + FallbackTreatment fallbackTreatment = _fallbackTreatmentCalculator.resolve(featureName, label); + return new EvaluatorImp.TreatmentLabelAndChangeNumber(fallbackTreatment.getTreatment(), + fallbackTreatment.getLabel(), + null, + getFallbackConfig(fallbackTreatment), + false); + } + + private List getFeatureFlagNamesByFlagSets(List flagSets) { + HashSet ffNamesToReturn = new HashSet<>(); + Map> namesByFlagSets = _splitCacheConsumer.getNamesByFlagSets(flagSets); + for (String set: flagSets) { + HashSet flags = namesByFlagSets.get(set); + if (flags == null || flags.isEmpty()) { + _log.warn(String.format("You passed %s Flag Set that does not contain cached feature flag names, please double check " + + "what Flag Sets are in use in the Split user interface.", set)); + continue; + } + ffNamesToReturn.addAll(flags); + } + return new ArrayList<>(ffNamesToReturn); + } + /** * @param matchingKey MUST NOT be null * @param bucketingKey @@ -58,26 +108,41 @@ public Map evaluateFeatures(String matchi * @return * @throws ChangeNumberExceptionWrapper */ - private TreatmentLabelAndChangeNumber getTreatment(String matchingKey, String bucketingKey, ParsedSplit parsedSplit, Map attributes) throws ChangeNumberExceptionWrapper { + private TreatmentLabelAndChangeNumber getTreatment(String matchingKey, String bucketingKey, ParsedSplit parsedSplit, Map attributes) throws ChangeNumberExceptionWrapper { try { + String config = getConfig(parsedSplit, parsedSplit.defaultTreatment()); if (parsedSplit.killed()) { - String config = parsedSplit.configurations() != null ? parsedSplit.configurations().get(parsedSplit.defaultTreatment()) : null; - return new TreatmentLabelAndChangeNumber(parsedSplit.defaultTreatment(), Labels.KILLED, parsedSplit.changeNumber(), config); + return new TreatmentLabelAndChangeNumber( + parsedSplit.defaultTreatment(), + Labels.KILLED, + parsedSplit.changeNumber(), + config, + parsedSplit.impressionsDisabled()); + } + + String bk = getBucketingKey(bucketingKey, matchingKey); + + if (!parsedSplit.prerequisitesMatcher().match(matchingKey, bk, attributes, _evaluationContext)) { + return new TreatmentLabelAndChangeNumber( + parsedSplit.defaultTreatment(), + Labels.PREREQUISITES_NOT_MET, + parsedSplit.changeNumber(), + config, + parsedSplit.impressionsDisabled()); } /* - * There are three parts to a single Split: 1) Whitelists 2) Traffic Allocation + * There are three parts to a single Feature flag: 1) Whitelists 2) Traffic Allocation * 3) Rollout. The flag inRollout is there to understand when we move into the Rollout * section. This is because we need to make sure that the Traffic Allocation * computation happens after the whitelist but before the rollout. */ boolean inRollout = false; - String bk = (bucketingKey == null) ? matchingKey : bucketingKey; - for (ParsedCondition parsedCondition : parsedSplit.parsedConditions()) { - if (!inRollout && parsedCondition.conditionType() == ConditionType.ROLLOUT) { + if (checkRollout(inRollout, parsedCondition)) { if (parsedSplit.trafficAllocation() < 100) { // if the traffic allocation is 100%, no need to do anything special. @@ -85,8 +150,9 @@ private TreatmentLabelAndChangeNumber getTreatment(String matchingKey, String bu if (bucket > parsedSplit.trafficAllocation()) { // out of split - String config = parsedSplit.configurations() != null ? parsedSplit.configurations().get(parsedSplit.defaultTreatment()) : null; - return new TreatmentLabelAndChangeNumber(parsedSplit.defaultTreatment(), Labels.NOT_IN_SPLIT, parsedSplit.changeNumber(), config); + config = getConfig(parsedSplit, parsedSplit.defaultTreatment()); + return new TreatmentLabelAndChangeNumber(parsedSplit.defaultTreatment(), Labels.NOT_IN_SPLIT, + parsedSplit.changeNumber(), config, parsedSplit.impressionsDisabled()); } } @@ -95,32 +161,69 @@ private TreatmentLabelAndChangeNumber getTreatment(String matchingKey, String bu if (parsedCondition.matcher().match(matchingKey, bucketingKey, attributes, _evaluationContext)) { String treatment = Splitter.getTreatment(bk, parsedSplit.seed(), parsedCondition.partitions(), parsedSplit.algo()); - String config = parsedSplit.configurations() != null ? parsedSplit.configurations().get(treatment) : null; - return new TreatmentLabelAndChangeNumber(treatment, parsedCondition.label(), parsedSplit.changeNumber(), config); + config = getConfig(parsedSplit, treatment); + return new TreatmentLabelAndChangeNumber( + treatment, + parsedCondition.label(), + parsedSplit.changeNumber(), + config, + parsedSplit.impressionsDisabled()); } } - String config = parsedSplit.configurations() != null ? parsedSplit.configurations().get(parsedSplit.defaultTreatment()) : null; - return new TreatmentLabelAndChangeNumber(parsedSplit.defaultTreatment(), Labels.DEFAULT_RULE, parsedSplit.changeNumber(), config); + config = getConfig(parsedSplit, parsedSplit.defaultTreatment()); + + return new TreatmentLabelAndChangeNumber( + parsedSplit.defaultTreatment(), + Labels.DEFAULT_RULE, + parsedSplit.changeNumber(), + config, + parsedSplit.impressionsDisabled()); } catch (Exception e) { throw new ChangeNumberExceptionWrapper(e, parsedSplit.changeNumber()); } } - private TreatmentLabelAndChangeNumber evaluateParsedSplit(String matchingKey, String bucketingKey, String split, Map attributes, ParsedSplit parsedSplit) { + private boolean checkRollout(boolean inRollout, ParsedCondition parsedCondition) { + return (!inRollout && parsedCondition.conditionType() == ConditionType.ROLLOUT); + } + + private String getBucketingKey(String bucketingKey, String matchingKey) { + return (bucketingKey == null) ? matchingKey : bucketingKey; + } + + private String getConfig(ParsedSplit parsedSplit, String returnedTreatment) { + return parsedSplit.configurations() != null ? parsedSplit.configurations().get(returnedTreatment) : null; + } + + private String getFallbackConfig(FallbackTreatment fallbackTreatment) { + if (fallbackTreatment.getConfig() != null) { + return fallbackTreatment.getConfig(); + } + + return null; + } + + private TreatmentLabelAndChangeNumber evaluateParsedSplit(String matchingKey, String bucketingKey, Map attributes, + ParsedSplit parsedSplit, String featureName) { try { if (parsedSplit == null) { - return new TreatmentLabelAndChangeNumber(Treatments.CONTROL, Labels.DEFINITION_NOT_FOUND); + FallbackTreatment fallbackTreatment = _fallbackTreatmentCalculator.resolve(featureName, Labels.DEFINITION_NOT_FOUND); + return new TreatmentLabelAndChangeNumber(fallbackTreatment.getTreatment(), + fallbackTreatment.getLabel(), + null, + getFallbackConfig(fallbackTreatment), + false); } - return getTreatment(matchingKey, bucketingKey, parsedSplit, attributes); - } - catch (ChangeNumberExceptionWrapper e) { - _log.error("Evaluator Exception", e.wrappedException()); - return new EvaluatorImp.TreatmentLabelAndChangeNumber(Treatments.CONTROL, Labels.EXCEPTION, e.changeNumber()); + } catch (ChangeNumberExceptionWrapper e) { + _log.error(_evaluatorException, e.wrappedException()); + FallbackTreatment fallbackTreatment = _fallbackTreatmentCalculator.resolve(featureName, Labels.EXCEPTION); + return new TreatmentLabelAndChangeNumber(fallbackTreatment.getTreatment(), fallbackTreatment.getLabel(), e.changeNumber()); } catch (Exception e) { - _log.error("Evaluator Exception", e); - return new EvaluatorImp.TreatmentLabelAndChangeNumber(Treatments.CONTROL, Labels.EXCEPTION); + _log.error(_evaluatorException, e); + FallbackTreatment fallbackTreatment = _fallbackTreatmentCalculator.resolve(featureName, Labels.EXCEPTION); + return new TreatmentLabelAndChangeNumber(fallbackTreatment.getTreatment(), fallbackTreatment.getLabel()); } } @@ -129,20 +232,22 @@ public static final class TreatmentLabelAndChangeNumber { public final String label; public final Long changeNumber; public final String configurations; + public final boolean track; public TreatmentLabelAndChangeNumber(String treatment, String label) { - this(treatment, label, null, null); + this(treatment, label, null, null, true); } public TreatmentLabelAndChangeNumber(String treatment, String label, Long changeNumber) { - this(treatment, label, changeNumber, null); + this(treatment, label, changeNumber, null, true); } - public TreatmentLabelAndChangeNumber(String treatment, String label, Long changeNumber, String configurations) { + public TreatmentLabelAndChangeNumber(String treatment, String label, Long changeNumber, String configurations, boolean track) { this.treatment = treatment; this.label = label; this.changeNumber = changeNumber; this.configurations = configurations; + this.track = track; } } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/evaluator/Labels.java b/client/src/main/java/io/split/engine/evaluator/Labels.java index 97e486b91..28966d51e 100644 --- a/client/src/main/java/io/split/engine/evaluator/Labels.java +++ b/client/src/main/java/io/split/engine/evaluator/Labels.java @@ -6,4 +6,7 @@ public class Labels { public static final String KILLED = "killed"; public static final String DEFINITION_NOT_FOUND = "definition not found"; public static final String EXCEPTION = "exception"; -} + public static final String UNSUPPORTED_MATCHER = "targeting rule type unsupported by sdk"; + public static final String PREREQUISITES_NOT_MET = "prerequisites not met"; + public static final String NOT_READY = "not ready"; +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/experiments/FetchResult.java b/client/src/main/java/io/split/engine/experiments/FetchResult.java index 7e49ed762..c0d678c0b 100644 --- a/client/src/main/java/io/split/engine/experiments/FetchResult.java +++ b/client/src/main/java/io/split/engine/experiments/FetchResult.java @@ -4,16 +4,21 @@ public class FetchResult { private boolean _success; + private boolean _retry; private Set _segments; - public FetchResult(boolean success, Set segments) { + public FetchResult(boolean success, boolean retry, Set segments) { _success = success; + _retry = retry; _segments = segments; } public boolean isSuccess() { return _success; } + public boolean retry() { + return _retry; + } public Set getSegments() { return _segments; diff --git a/client/src/main/java/io/split/engine/experiments/ParsedCondition.java b/client/src/main/java/io/split/engine/experiments/ParsedCondition.java index 5c2b06b61..ad2e32a50 100644 --- a/client/src/main/java/io/split/engine/experiments/ParsedCondition.java +++ b/client/src/main/java/io/split/engine/experiments/ParsedCondition.java @@ -53,11 +53,12 @@ public int hashCode() { result = 31 * result + _matcher.hashCode(); int partitionsHashCode = 17; - for (Partition p : _partitions) { - partitionsHashCode = 31 * partitionsHashCode + p.treatment.hashCode(); - partitionsHashCode = 31 * partitionsHashCode + p.size; + if (_partitions != null) { + for (Partition p : _partitions) { + partitionsHashCode = 31 * partitionsHashCode + p.treatment.hashCode(); + partitionsHashCode = 31 * partitionsHashCode + p.size; + } } - result = 31 * result + partitionsHashCode; return result; } @@ -75,7 +76,9 @@ public boolean equals(Object obj) { if (!result) { return result; } - + if (_partitions == null) { + return result & (_partitions == other._partitions); + } if (_partitions.size() != other._partitions.size()) { return result; } @@ -97,6 +100,9 @@ public String toString() { bldr.append(_matcher); bldr.append(" then split "); boolean first = true; + if (_partitions == null) { + return bldr.toString(); + } for (Partition partition : _partitions) { if (!first) { bldr.append(','); diff --git a/client/src/main/java/io/split/engine/experiments/ParsedRuleBasedSegment.java b/client/src/main/java/io/split/engine/experiments/ParsedRuleBasedSegment.java new file mode 100644 index 000000000..c00439700 --- /dev/null +++ b/client/src/main/java/io/split/engine/experiments/ParsedRuleBasedSegment.java @@ -0,0 +1,134 @@ +package io.split.engine.experiments; + +import com.google.common.collect.ImmutableList; +import io.split.client.dtos.ExcludedSegments; +import io.split.engine.matchers.AttributeMatcher; +import io.split.engine.matchers.UserDefinedSegmentMatcher; + +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +public class ParsedRuleBasedSegment { + + private final String _ruleBasedSegment; + private final ImmutableList _parsedCondition; + private final String _trafficTypeName; + private final long _changeNumber; + private final List _excludedKeys; + private final List _excludedSegments; + + public static ParsedRuleBasedSegment createParsedRuleBasedSegmentForTests( + String ruleBasedSegment, + List matcherAndSplits, + String trafficTypeName, + long changeNumber, + List excludedKeys, + List excludedSegments + ) { + return new ParsedRuleBasedSegment( + ruleBasedSegment, + matcherAndSplits, + trafficTypeName, + changeNumber, + excludedKeys, + excludedSegments + ); + } + + public ParsedRuleBasedSegment( + String ruleBasedSegment, + List matcherAndSplits, + String trafficTypeName, + long changeNumber, + List excludedKeys, + List excludedSegments + ) { + _ruleBasedSegment = ruleBasedSegment; + _parsedCondition = ImmutableList.copyOf(matcherAndSplits); + _trafficTypeName = trafficTypeName; + _changeNumber = changeNumber; + _excludedKeys = excludedKeys; + _excludedSegments = excludedSegments; + } + + public String ruleBasedSegment() { + return _ruleBasedSegment; + } + + public List parsedConditions() { + return _parsedCondition; + } + + public String trafficTypeName() {return _trafficTypeName;} + + public long changeNumber() {return _changeNumber;} + + public List excludedKeys() {return _excludedKeys;} + public List excludedSegments() {return _excludedSegments;} + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + _ruleBasedSegment.hashCode(); + result = 31 * result + _parsedCondition.hashCode(); + result = 31 * result + (_trafficTypeName == null ? 0 : _trafficTypeName.hashCode()); + result = 31 * result + (int)(_changeNumber ^ (_changeNumber >>> 32)); + return result; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) return false; + if (this == obj) return true; + if (!(obj instanceof ParsedRuleBasedSegment)) return false; + + ParsedRuleBasedSegment other = (ParsedRuleBasedSegment) obj; + + return _ruleBasedSegment.equals(other._ruleBasedSegment) + && _parsedCondition.equals(other._parsedCondition) + && _trafficTypeName == null ? other._trafficTypeName == null : _trafficTypeName.equals(other._trafficTypeName) + && _changeNumber == other._changeNumber; + } + + @Override + public String toString() { + StringBuilder bldr = new StringBuilder(); + bldr.append("name:"); + bldr.append(_ruleBasedSegment); + bldr.append(", parsedConditions:"); + bldr.append(_parsedCondition); + bldr.append(", trafficTypeName:"); + bldr.append(_trafficTypeName); + bldr.append(", changeNumber:"); + bldr.append(_changeNumber); + return bldr.toString(); + + } + + public Set getSegmentsNames() { + Set segmentNames = excludedSegments() + .stream() + .filter(ExcludedSegments::isStandard) + .map(ExcludedSegments::getSegmentName) + .collect(Collectors.toSet()); + + segmentNames.addAll(parsedConditions().stream() + .flatMap(parsedCondition -> parsedCondition.matcher().attributeMatchers().stream()) + .filter(ParsedRuleBasedSegment::isSegmentMatcher) + .map(ParsedRuleBasedSegment::asSegmentMatcherForEach) + .map(UserDefinedSegmentMatcher::getSegmentName) + .collect(Collectors.toSet())); + + return segmentNames; + } + + private static boolean isSegmentMatcher(AttributeMatcher attributeMatcher) { + return ((AttributeMatcher.NegatableMatcher) attributeMatcher.matcher()).delegate() instanceof UserDefinedSegmentMatcher; + } + + private static UserDefinedSegmentMatcher asSegmentMatcherForEach(AttributeMatcher attributeMatcher) { + return (UserDefinedSegmentMatcher) ((AttributeMatcher.NegatableMatcher) attributeMatcher.matcher()).delegate(); + } + +} diff --git a/client/src/main/java/io/split/engine/experiments/ParsedSplit.java b/client/src/main/java/io/split/engine/experiments/ParsedSplit.java index 4edaa9024..e202474f0 100644 --- a/client/src/main/java/io/split/engine/experiments/ParsedSplit.java +++ b/client/src/main/java/io/split/engine/experiments/ParsedSplit.java @@ -2,7 +2,8 @@ import com.google.common.collect.ImmutableList; import io.split.engine.matchers.AttributeMatcher; -import io.split.engine.matchers.Matcher; +import io.split.engine.matchers.PrerequisitesMatcher; +import io.split.engine.matchers.RuleBasedSegmentMatcher; import io.split.engine.matchers.UserDefinedSegmentMatcher; import java.util.HashSet; @@ -32,6 +33,9 @@ public class ParsedSplit { private final int _trafficAllocationSeed; private final int _algo; private final Map _configurations; + private final HashSet _flagSets; + private final boolean _impressionsDisabled; + private PrerequisitesMatcher _prerequisitesMatcher; public static ParsedSplit createParsedSplitForTests( String feature, @@ -41,7 +45,10 @@ public static ParsedSplit createParsedSplitForTests( List matcherAndSplits, String trafficTypeName, long changeNumber, - int algo + int algo, + HashSet flagSets, + boolean impressionsDisabled, + PrerequisitesMatcher prerequisitesMatcher ) { return new ParsedSplit( feature, @@ -54,7 +61,10 @@ public static ParsedSplit createParsedSplitForTests( 100, seed, algo, - null + null, + flagSets, + impressionsDisabled, + prerequisitesMatcher ); } @@ -67,7 +77,10 @@ public static ParsedSplit createParsedSplitForTests( String trafficTypeName, long changeNumber, int algo, - Map configurations + Map configurations, + HashSet flagSets, + boolean impressionsDisabled, + PrerequisitesMatcher prerequisitesMatcher ) { return new ParsedSplit( feature, @@ -80,7 +93,10 @@ public static ParsedSplit createParsedSplitForTests( 100, seed, algo, - configurations + configurations, + flagSets, + impressionsDisabled, + prerequisitesMatcher ); } @@ -95,7 +111,10 @@ public ParsedSplit( int trafficAllocation, int trafficAllocationSeed, int algo, - Map configurations + Map configurations, + HashSet flagSets, + boolean impressionsDisabled, + PrerequisitesMatcher prerequisitesMatcher ) { _split = feature; _seed = seed; @@ -111,10 +130,11 @@ public ParsedSplit( _trafficAllocation = trafficAllocation; _trafficAllocationSeed = trafficAllocationSeed; _configurations = configurations; + _flagSets = flagSets; + _impressionsDisabled = impressionsDisabled; + _prerequisitesMatcher = prerequisitesMatcher; } - - public String feature() { return _split; } @@ -148,11 +168,19 @@ public List parsedConditions() { public long changeNumber() {return _changeNumber;} public int algo() {return _algo;} + public HashSet flagSets() { + return _flagSets; + } public Map configurations() { return _configurations; } + public boolean impressionsDisabled() { + return _impressionsDisabled; + } + public PrerequisitesMatcher prerequisitesMatcher() { return _prerequisitesMatcher; } + @Override public int hashCode() { int result = 17; @@ -165,6 +193,7 @@ public int hashCode() { result = 31 * result + (int)(_changeNumber ^ (_changeNumber >>> 32)); result = 31 * result + (_algo ^ (_algo >>> 32)); result = 31 * result + (_configurations == null? 0 : _configurations.hashCode()); + result = 31 * result + (_impressionsDisabled ? 1 : 0); return result; } @@ -175,16 +204,20 @@ public boolean equals(Object obj) { if (!(obj instanceof ParsedSplit)) return false; ParsedSplit other = (ParsedSplit) obj; + boolean trafficTypeCond = _trafficTypeName == null ? other._trafficTypeName == null : _trafficTypeName.equals(other._trafficTypeName); + boolean configCond = _configurations == null ? other._configurations == null : _configurations.equals(other._configurations); return _split.equals(other._split) && _seed == other._seed && _killed == other._killed && _defaultTreatment.equals(other._defaultTreatment) && _parsedCondition.equals(other._parsedCondition) - && _trafficTypeName == null ? other._trafficTypeName == null : _trafficTypeName.equals(other._trafficTypeName) + && trafficTypeCond && _changeNumber == other._changeNumber && _algo == other._algo - && _configurations == null ? other._configurations == null : _configurations.equals(other._configurations); + && configCond + && _impressionsDisabled == other._impressionsDisabled + && _prerequisitesMatcher == other._prerequisitesMatcher; } @Override @@ -208,6 +241,11 @@ public String toString() { bldr.append(_algo); bldr.append(", config:"); bldr.append(_configurations); + bldr.append(", impressionsDisabled:"); + bldr.append(_impressionsDisabled); + bldr.append(", prerequisites:"); + bldr.append(_prerequisitesMatcher); + return bldr.toString(); } @@ -221,6 +259,15 @@ public Set getSegmentsNames() { .collect(Collectors.toSet()); } + public Set getRuleBasedSegmentsNames() { + return parsedConditions().stream() + .flatMap(parsedCondition -> parsedCondition.matcher().attributeMatchers().stream()) + .filter(ParsedSplit::isRuleBasedSegmentMatcher) + .map(ParsedSplit::asRuleBasedSegmentMatcherForEach) + .map(RuleBasedSegmentMatcher::getSegmentName) + .collect(Collectors.toSet()); + } + private static boolean isSegmentMatcher(AttributeMatcher attributeMatcher) { return ((AttributeMatcher.NegatableMatcher) attributeMatcher.matcher()).delegate() instanceof UserDefinedSegmentMatcher; } @@ -229,4 +276,11 @@ private static UserDefinedSegmentMatcher asSegmentMatcherForEach(AttributeMatche return (UserDefinedSegmentMatcher) ((AttributeMatcher.NegatableMatcher) attributeMatcher.matcher()).delegate(); } + private static boolean isRuleBasedSegmentMatcher(AttributeMatcher attributeMatcher) { + return ((AttributeMatcher.NegatableMatcher) attributeMatcher.matcher()).delegate() instanceof RuleBasedSegmentMatcher; + } + + private static RuleBasedSegmentMatcher asRuleBasedSegmentMatcherForEach(AttributeMatcher attributeMatcher) { + return (RuleBasedSegmentMatcher) ((AttributeMatcher.NegatableMatcher) attributeMatcher.matcher()).delegate(); + } } diff --git a/client/src/main/java/io/split/engine/experiments/ParserUtils.java b/client/src/main/java/io/split/engine/experiments/ParserUtils.java new file mode 100644 index 000000000..3b1355123 --- /dev/null +++ b/client/src/main/java/io/split/engine/experiments/ParserUtils.java @@ -0,0 +1,204 @@ +package io.split.engine.experiments; + +import com.google.common.collect.Lists; +import io.split.client.dtos.MatcherType; +import io.split.client.dtos.Partition; +import io.split.client.dtos.MatcherGroup; +import io.split.client.dtos.ConditionType; +import io.split.client.dtos.Matcher; +import io.split.engine.evaluator.Labels; +import io.split.engine.matchers.CombiningMatcher; +import io.split.engine.matchers.AllKeysMatcher; +import io.split.engine.matchers.AttributeMatcher; +import io.split.engine.matchers.UserDefinedSegmentMatcher; +import io.split.engine.matchers.EqualToMatcher; +import io.split.engine.matchers.GreaterThanOrEqualToMatcher; +import io.split.engine.matchers.LessThanOrEqualToMatcher; +import io.split.engine.matchers.BetweenMatcher; +import io.split.engine.matchers.DependencyMatcher; +import io.split.engine.matchers.BooleanMatcher; +import io.split.engine.matchers.EqualToSemverMatcher; +import io.split.engine.matchers.GreaterThanOrEqualToSemverMatcher; +import io.split.engine.matchers.LessThanOrEqualToSemverMatcher; +import io.split.engine.matchers.InListSemverMatcher; +import io.split.engine.matchers.BetweenSemverMatcher; +import io.split.engine.matchers.RuleBasedSegmentMatcher; +import io.split.engine.matchers.collections.ContainsAllOfSetMatcher; +import io.split.engine.matchers.collections.ContainsAnyOfSetMatcher; +import io.split.engine.matchers.collections.EqualToSetMatcher; +import io.split.engine.matchers.collections.PartOfSetMatcher; +import io.split.engine.matchers.strings.WhitelistMatcher; +import io.split.engine.matchers.strings.StartsWithAnyOfMatcher; +import io.split.engine.matchers.strings.EndsWithAnyOfMatcher; +import io.split.engine.matchers.strings.ContainsAnyOfMatcher; +import io.split.engine.matchers.strings.RegularExpressionMatcher; + +import java.util.List; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; + +public final class ParserUtils { + + private ParserUtils() { + throw new IllegalStateException("Utility class"); + } + + public static boolean checkUnsupportedMatcherExist(List matchers) { + MatcherType typeCheck = null; + for (Matcher matcher : matchers) { + typeCheck = null; + try { + typeCheck = matcher.matcherType; + } catch (NullPointerException e) { + // If the exception is caught, it means unsupported matcher + break; + } + } + return (typeCheck == null); + } + + public static ParsedCondition getTemplateCondition() { + List templatePartitions = Lists.newArrayList(); + Partition partition = new Partition(); + partition.treatment = "control"; + partition.size = 100; + templatePartitions.add(partition); + return new ParsedCondition( + ConditionType.ROLLOUT, + CombiningMatcher.of(new AllKeysMatcher()), + templatePartitions, + Labels.UNSUPPORTED_MATCHER); + } + + public static CombiningMatcher toMatcher(MatcherGroup matcherGroup) { + List matchers = matcherGroup.matchers; + checkArgument(!matchers.isEmpty()); + + List toCombine = Lists.newArrayList(); + + for (Matcher matcher : matchers) { + toCombine.add(toMatcher(matcher)); + } + + return new CombiningMatcher(matcherGroup.combiner, toCombine); + } + + + public static AttributeMatcher toMatcher(Matcher matcher) { + io.split.engine.matchers.Matcher delegate = null; + switch (matcher.matcherType) { + case ALL_KEYS: + delegate = new AllKeysMatcher(); + break; + case IN_SEGMENT: + checkNotNull(matcher.userDefinedSegmentMatcherData); + String segmentName = matcher.userDefinedSegmentMatcherData.segmentName; + delegate = new UserDefinedSegmentMatcher(segmentName); + break; + case WHITELIST: + checkNotNull(matcher.whitelistMatcherData); + delegate = new WhitelistMatcher(matcher.whitelistMatcherData.whitelist); + break; + case EQUAL_TO: + checkNotNull(matcher.unaryNumericMatcherData); + delegate = new EqualToMatcher(matcher.unaryNumericMatcherData.value, matcher.unaryNumericMatcherData.dataType); + break; + case GREATER_THAN_OR_EQUAL_TO: + checkNotNull(matcher.unaryNumericMatcherData); + delegate = new GreaterThanOrEqualToMatcher(matcher.unaryNumericMatcherData.value, matcher.unaryNumericMatcherData.dataType); + break; + case LESS_THAN_OR_EQUAL_TO: + checkNotNull(matcher.unaryNumericMatcherData); + delegate = new LessThanOrEqualToMatcher(matcher.unaryNumericMatcherData.value, matcher.unaryNumericMatcherData.dataType); + break; + case BETWEEN: + checkNotNull(matcher.betweenMatcherData); + delegate = new BetweenMatcher(matcher.betweenMatcherData.start, matcher.betweenMatcherData.end, matcher.betweenMatcherData.dataType); + break; + case EQUAL_TO_SET: + checkNotNull(matcher.whitelistMatcherData); + delegate = new EqualToSetMatcher(matcher.whitelistMatcherData.whitelist); + break; + case PART_OF_SET: + checkNotNull(matcher.whitelistMatcherData); + delegate = new PartOfSetMatcher(matcher.whitelistMatcherData.whitelist); + break; + case CONTAINS_ALL_OF_SET: + checkNotNull(matcher.whitelistMatcherData); + delegate = new ContainsAllOfSetMatcher(matcher.whitelistMatcherData.whitelist); + break; + case CONTAINS_ANY_OF_SET: + checkNotNull(matcher.whitelistMatcherData); + delegate = new ContainsAnyOfSetMatcher(matcher.whitelistMatcherData.whitelist); + break; + case STARTS_WITH: + checkNotNull(matcher.whitelistMatcherData); + delegate = new StartsWithAnyOfMatcher(matcher.whitelistMatcherData.whitelist); + break; + case ENDS_WITH: + checkNotNull(matcher.whitelistMatcherData); + delegate = new EndsWithAnyOfMatcher(matcher.whitelistMatcherData.whitelist); + break; + case CONTAINS_STRING: + checkNotNull(matcher.whitelistMatcherData); + delegate = new ContainsAnyOfMatcher(matcher.whitelistMatcherData.whitelist); + break; + case MATCHES_STRING: + checkNotNull(matcher.stringMatcherData); + delegate = new RegularExpressionMatcher(matcher.stringMatcherData); + break; + case IN_SPLIT_TREATMENT: + checkNotNull(matcher.dependencyMatcherData, + "MatcherType is " + matcher.matcherType + + ". matcher.dependencyMatcherData() MUST NOT BE null"); + delegate = new DependencyMatcher(matcher.dependencyMatcherData.split, matcher.dependencyMatcherData.treatments); + break; + case EQUAL_TO_BOOLEAN: + checkNotNull(matcher.booleanMatcherData, + "MatcherType is " + matcher.matcherType + + ". matcher.booleanMatcherData() MUST NOT BE null"); + delegate = new BooleanMatcher(matcher.booleanMatcherData); + break; + case EQUAL_TO_SEMVER: + checkNotNull(matcher.stringMatcherData, "stringMatcherData is required for EQUAL_TO_SEMVER matcher type"); + delegate = new EqualToSemverMatcher(matcher.stringMatcherData); + break; + case GREATER_THAN_OR_EQUAL_TO_SEMVER: + checkNotNull(matcher.stringMatcherData, "stringMatcherData is required for GREATER_THAN_OR_EQUAL_TO_SEMVER matcher type"); + delegate = new GreaterThanOrEqualToSemverMatcher(matcher.stringMatcherData); + break; + case LESS_THAN_OR_EQUAL_TO_SEMVER: + checkNotNull(matcher.stringMatcherData, "stringMatcherData is required for LESS_THAN_OR_EQUAL_SEMVER matcher type"); + delegate = new LessThanOrEqualToSemverMatcher(matcher.stringMatcherData); + break; + case IN_LIST_SEMVER: + checkNotNull(matcher.whitelistMatcherData, "whitelistMatcherData is required for IN_LIST_SEMVER matcher type"); + delegate = new InListSemverMatcher(matcher.whitelistMatcherData.whitelist); + break; + case BETWEEN_SEMVER: + checkNotNull(matcher.betweenStringMatcherData, "betweenStringMatcherData is required for BETWEEN_SEMVER matcher type"); + delegate = new BetweenSemverMatcher(matcher.betweenStringMatcherData.start, matcher.betweenStringMatcherData.end); + break; + case IN_RULE_BASED_SEGMENT: + checkNotNull(matcher.userDefinedSegmentMatcherData); + String ruleBasedSegmentName = matcher.userDefinedSegmentMatcherData.segmentName; + delegate = new RuleBasedSegmentMatcher(ruleBasedSegmentName); + break; + default: + throw new IllegalArgumentException("Unknown matcher type: " + matcher.matcherType); + } + + checkNotNull(delegate, "We were not able to create a matcher for: " + matcher.matcherType); + + String attribute = null; + if (matcher.keySelector != null && matcher.keySelector.attribute != null) { + attribute = matcher.keySelector.attribute; + } + + boolean negate = matcher.negate; + + + return new AttributeMatcher(attribute, delegate, negate); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/experiments/RuleBasedSegmentParser.java b/client/src/main/java/io/split/engine/experiments/RuleBasedSegmentParser.java new file mode 100644 index 000000000..b67c5e354 --- /dev/null +++ b/client/src/main/java/io/split/engine/experiments/RuleBasedSegmentParser.java @@ -0,0 +1,54 @@ +package io.split.engine.experiments; + +import com.google.common.collect.Lists; +import io.split.client.dtos.Condition; +import io.split.client.dtos.RuleBasedSegment; +import io.split.engine.matchers.CombiningMatcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +import static io.split.engine.experiments.ParserUtils.checkUnsupportedMatcherExist; +import static io.split.engine.experiments.ParserUtils.getTemplateCondition; +import static io.split.engine.experiments.ParserUtils.toMatcher; + +public final class RuleBasedSegmentParser { + + private static final Logger _log = LoggerFactory.getLogger(RuleBasedSegmentParser.class); + + public RuleBasedSegmentParser() { + } + + public ParsedRuleBasedSegment parse(RuleBasedSegment ruleBasedSegment) { + try { + return parseWithoutExceptionHandling(ruleBasedSegment); + } catch (Throwable t) { + _log.error("Could not parse rule based segment: " + ruleBasedSegment, t); + return null; + } + } + + private ParsedRuleBasedSegment parseWithoutExceptionHandling(RuleBasedSegment ruleBasedSegment) { + List parsedConditionList = Lists.newArrayList(); + for (Condition condition : ruleBasedSegment.conditions) { + if (checkUnsupportedMatcherExist(condition.matcherGroup.matchers)) { + _log.error("Unsupported matcher type found for rule based segment: " + ruleBasedSegment.name + + " , will revert to default template matcher."); + parsedConditionList.clear(); + parsedConditionList.add(getTemplateCondition()); + break; + } + CombiningMatcher matcher = toMatcher(condition.matcherGroup); + parsedConditionList.add(new ParsedCondition(condition.conditionType, matcher, null, condition.label)); + } + + return new ParsedRuleBasedSegment( + ruleBasedSegment.name, + parsedConditionList, + ruleBasedSegment.trafficTypeName, + ruleBasedSegment.changeNumber, + ruleBasedSegment.excluded.keys, + ruleBasedSegment.excluded.segments); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/experiments/SplitChangeFetcher.java b/client/src/main/java/io/split/engine/experiments/SplitChangeFetcher.java index 7c5fbe76e..da6e185fa 100644 --- a/client/src/main/java/io/split/engine/experiments/SplitChangeFetcher.java +++ b/client/src/main/java/io/split/engine/experiments/SplitChangeFetcher.java @@ -32,5 +32,5 @@ public interface SplitChangeFetcher { * @return SegmentChange * @throws java.lang.RuntimeException if there was a problem computing split changes */ - SplitChange fetch(long since, FetchOptions options); + SplitChange fetch(long since, long sinceRBS, FetchOptions options); } diff --git a/client/src/main/java/io/split/engine/experiments/SplitFetcherImp.java b/client/src/main/java/io/split/engine/experiments/SplitFetcherImp.java index 366997f43..30d374cdf 100644 --- a/client/src/main/java/io/split/engine/experiments/SplitFetcherImp.java +++ b/client/src/main/java/io/split/engine/experiments/SplitFetcherImp.java @@ -1,28 +1,25 @@ package io.split.engine.experiments; -import io.split.client.dtos.Split; import io.split.client.dtos.SplitChange; -import io.split.client.dtos.Status; -import io.split.storages.SplitCacheConsumer; +import io.split.client.exceptions.UriTooLongException; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.utils.FeatureFlagsToUpdate; +import io.split.client.utils.RuleBasedSegmentsToUpdate; +import io.split.storages.RuleBasedSegmentCacheProducer; import io.split.storages.SplitCacheProducer; -import io.split.engine.SDKReadinessGates; -import io.split.engine.matchers.AttributeMatcher; -import io.split.engine.matchers.UserDefinedSegmentMatcher; -import io.split.telemetry.domain.enums.HTTPLatenciesEnum; import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; import io.split.engine.common.FetchOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; import java.util.HashSet; -import java.util.List; import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; import static com.google.common.base.Preconditions.checkNotNull; +import static io.split.client.utils.FeatureFlagProcessor.processFeatureFlagChanges; +import static io.split.client.utils.RuleBasedSegmentProcessor.processRuleBasedSegmentChanges; +import static io.split.client.utils.Utils.checkExitConditions; /** * An ExperimentFetcher that refreshes experiment definitions periodically. @@ -35,10 +32,12 @@ public class SplitFetcherImp implements SplitFetcher { private final SplitParser _parser; private final SplitChangeFetcher _splitChangeFetcher; - private final SplitCacheConsumer _splitCacheConsumer; private final SplitCacheProducer _splitCacheProducer; private final Object _lock = new Object(); private final TelemetryRuntimeProducer _telemetryRuntimeProducer; + private final FlagSetsFilter _flagSetsFilter; + private final RuleBasedSegmentCacheProducer _ruleBasedSegmentCacheProducer; + private final RuleBasedSegmentParser _parserRBS; /** * Contains all the traffic types that are currently being used by the splits and also the count @@ -50,43 +49,62 @@ public class SplitFetcherImp implements SplitFetcher { * an ARCHIVED split is received, we know if we need to remove a traffic type from the multiset. */ - public SplitFetcherImp(SplitChangeFetcher splitChangeFetcher, SplitParser parser, SplitCacheConsumer splitCacheConsumer, SplitCacheProducer splitCacheProducer, TelemetryRuntimeProducer telemetryRuntimeProducer) { + public SplitFetcherImp(SplitChangeFetcher splitChangeFetcher, SplitParser parser, SplitCacheProducer splitCacheProducer, + TelemetryRuntimeProducer telemetryRuntimeProducer, FlagSetsFilter flagSetsFilter, + RuleBasedSegmentParser parserRBS, RuleBasedSegmentCacheProducer ruleBasedSegmentCacheProducer) { _splitChangeFetcher = checkNotNull(splitChangeFetcher); _parser = checkNotNull(parser); - _splitCacheConsumer = checkNotNull(splitCacheConsumer); + _parserRBS = checkNotNull(parserRBS); _splitCacheProducer = checkNotNull(splitCacheProducer); + _ruleBasedSegmentCacheProducer = checkNotNull(ruleBasedSegmentCacheProducer); _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); + _flagSetsFilter = flagSetsFilter; } @Override public FetchResult forceRefresh(FetchOptions options) { - _log.debug("Force Refresh splits starting ..."); + _log.debug("Force Refresh feature flags starting ..."); final long INITIAL_CN = _splitCacheProducer.getChangeNumber(); + final long RBS_INITIAL_CN = _ruleBasedSegmentCacheProducer.getChangeNumber(); Set segments = new HashSet<>(); try { while (true) { long start = _splitCacheProducer.getChangeNumber(); + long startRBS = _ruleBasedSegmentCacheProducer.getChangeNumber(); segments.addAll(runWithoutExceptionHandling(options)); long end = _splitCacheProducer.getChangeNumber(); + long endRBS = _ruleBasedSegmentCacheProducer.getChangeNumber(); // If the previous execution was the first one, clear the `cdnBypass` flag // for the next fetches. (This will clear a local copy of the fetch options, // not the original object that was passed to this method). + FetchOptions.Builder optionsBuilder = new FetchOptions.Builder(options); if (INITIAL_CN == start) { - options = new FetchOptions.Builder(options).targetChangeNumber(FetchOptions.DEFAULT_TARGET_CHANGENUMBER).build(); + optionsBuilder.targetChangeNumber(FetchOptions.DEFAULT_TARGET_CHANGENUMBER); } - if (start >= end) { - return new FetchResult(true, segments); + if (RBS_INITIAL_CN == startRBS) { + optionsBuilder.targetChangeNumberRBS(FetchOptions.DEFAULT_TARGET_CHANGENUMBER); + } + + options = optionsBuilder.build(); + + if (start >= end && startRBS >= endRBS) { + return new FetchResult(true, false, segments); } } + } catch (UriTooLongException u) { + return new FetchResult(false, false, new HashSet<>()); } catch (InterruptedException e) { _log.warn("Interrupting split fetcher task"); Thread.currentThread().interrupt(); - return new FetchResult(false, new HashSet<>()); - } catch (Throwable t) { - _log.error("RefreshableSplitFetcher failed: " + t.getMessage()); - return new FetchResult(false, new HashSet<>()); + return new FetchResult(false, true, new HashSet<>()); + } catch (Exception e) { + _log.error("SplitFetcherImp failed: " + e.getMessage()); + if (_log.isDebugEnabled()) { + _log.debug("Reason:", e); + } + return new FetchResult(false, true, new HashSet<>()); } } @@ -95,80 +113,57 @@ public void run() { this.forceRefresh(new FetchOptions.Builder().cacheControlHeaders(false).build()); } - private Set runWithoutExceptionHandling(FetchOptions options) throws InterruptedException { - SplitChange change = _splitChangeFetcher.fetch(_splitCacheProducer.getChangeNumber(), options); + private Set runWithoutExceptionHandling(FetchOptions options) throws InterruptedException, UriTooLongException { + SplitChange change = _splitChangeFetcher.fetch(_splitCacheProducer.getChangeNumber(), + _ruleBasedSegmentCacheProducer.getChangeNumber(), options); Set segments = new HashSet<>(); if (change == null) { throw new IllegalStateException("SplitChange was null"); } - if (change.till == _splitCacheProducer.getChangeNumber()) { - // no change. - return segments; + if (change.clearCache) { + _splitCacheProducer.clear(); + _ruleBasedSegmentCacheProducer.clear(); } - if (change.since != _splitCacheProducer.getChangeNumber() || change.till < _splitCacheProducer.getChangeNumber()) { - // some other thread may have updated the shared state. exit + if (checkExitConditions(change.featureFlags, _splitCacheProducer.getChangeNumber()) || + checkExitConditions(change.ruleBasedSegments, _ruleBasedSegmentCacheProducer.getChangeNumber())) { return segments; } - if (change.splits.isEmpty()) { - // there are no changes. weird! - _splitCacheProducer.setChangeNumber(change.till); + if (change.featureFlags.d.isEmpty()) { + _splitCacheProducer.setChangeNumber(change.featureFlags.t); + } + + if (change.ruleBasedSegments.d.isEmpty()) { + _ruleBasedSegmentCacheProducer.setChangeNumber(change.ruleBasedSegments.t); + } + + if (change.featureFlags.d.isEmpty() && change.ruleBasedSegments.d.isEmpty()) { return segments; } + synchronized (_lock) { // check state one more time. - if (change.since != _splitCacheProducer.getChangeNumber() - || change.till < _splitCacheProducer.getChangeNumber()) { + if (checkExitConditions(change.featureFlags, _splitCacheProducer.getChangeNumber()) || + checkExitConditions(change.ruleBasedSegments, _ruleBasedSegmentCacheProducer.getChangeNumber())) { // some other thread may have updated the shared state. exit return segments; } - - List parsedSplits = new ArrayList<>(); - for (Split split : change.splits) { - if (Thread.currentThread().isInterrupted()) { - throw new InterruptedException(); - } - - if (split.status != Status.ACTIVE) { - // archive. - _splitCacheProducer.remove(split.name); - continue; - } - - ParsedSplit parsedSplit = _parser.parse(split); - if (parsedSplit == null) { - _log.info("We could not parse the experiment definition for: " + split.name + " so we are removing it completely to be careful"); - - _splitCacheProducer.remove(split.name); - _log.debug("Deleted feature: " + split.name); - - continue; - } - segments.addAll(parsedSplit.getSegmentsNames()); - - // If the split already exists, this is either an update, or the split has been - // deleted and recreated (possibly with a different traffic type). - // If it's an update, the traffic type should NOT be increased. - // If it's deleted & recreated, the old one should be decreased and the new one increased. - // To handle both cases, we simply delete the old one if the split is present. - // The new one is always increased. - ParsedSplit current = _splitCacheConsumer.get(split.name); // TODO (lecheverz): implement UPDATE method at Split Cache - if (current != null) { - _splitCacheProducer.remove(split.name); - } - - parsedSplits.add(parsedSplit); - _log.debug("Updated feature: " + parsedSplit.feature()); - } - - _splitCacheProducer.putMany(parsedSplits); - _splitCacheProducer.setChangeNumber(change.till); + FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(_parser, change.featureFlags.d, _flagSetsFilter); + segments = featureFlagsToUpdate.getSegments(); + _splitCacheProducer.update(featureFlagsToUpdate.getToAdd(), featureFlagsToUpdate.getToRemove(), change.featureFlags.t); + + RuleBasedSegmentsToUpdate ruleBasedSegmentsToUpdate = processRuleBasedSegmentChanges(_parserRBS, + change.ruleBasedSegments.d); + segments.addAll(ruleBasedSegmentsToUpdate.getSegments()); + _ruleBasedSegmentCacheProducer.update(ruleBasedSegmentsToUpdate.getToAdd(), + ruleBasedSegmentsToUpdate.getToRemove(), change.ruleBasedSegments.t); _telemetryRuntimeProducer.recordSuccessfulSync(LastSynchronizationRecordsEnum.SPLITS, System.currentTimeMillis()); } + return segments; } } diff --git a/client/src/main/java/io/split/engine/experiments/SplitParser.java b/client/src/main/java/io/split/engine/experiments/SplitParser.java index 374ef101c..5771c9ae4 100644 --- a/client/src/main/java/io/split/engine/experiments/SplitParser.java +++ b/client/src/main/java/io/split/engine/experiments/SplitParser.java @@ -1,40 +1,21 @@ package io.split.engine.experiments; import com.google.common.collect.Lists; + import io.split.client.dtos.Condition; -import io.split.client.dtos.Matcher; -import io.split.client.dtos.MatcherGroup; import io.split.client.dtos.Partition; import io.split.client.dtos.Split; -import io.split.client.dtos.Status; -import io.split.engine.matchers.AllKeysMatcher; -import io.split.engine.matchers.AttributeMatcher; -import io.split.engine.matchers.BetweenMatcher; -import io.split.engine.matchers.BooleanMatcher; import io.split.engine.matchers.CombiningMatcher; -import io.split.engine.matchers.DependencyMatcher; -import io.split.engine.matchers.EqualToMatcher; -import io.split.engine.matchers.GreaterThanOrEqualToMatcher; -import io.split.engine.matchers.LessThanOrEqualToMatcher; -import io.split.engine.matchers.UserDefinedSegmentMatcher; -import io.split.engine.matchers.collections.ContainsAllOfSetMatcher; -import io.split.engine.matchers.collections.ContainsAnyOfSetMatcher; -import io.split.engine.matchers.collections.EqualToSetMatcher; -import io.split.engine.matchers.collections.PartOfSetMatcher; -import io.split.engine.matchers.strings.ContainsAnyOfMatcher; -import io.split.engine.matchers.strings.EndsWithAnyOfMatcher; -import io.split.engine.matchers.strings.RegularExpressionMatcher; -import io.split.engine.matchers.strings.StartsWithAnyOfMatcher; -import io.split.engine.matchers.strings.WhitelistMatcher; -import io.split.engine.segments.SegmentSynchronizationTask; -import io.split.storages.SegmentCacheConsumer; +import io.split.engine.matchers.PrerequisitesMatcher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; +import java.util.Objects; -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; +import static io.split.engine.experiments.ParserUtils.checkUnsupportedMatcherExist; +import static io.split.engine.experiments.ParserUtils.getTemplateCondition; +import static io.split.engine.experiments.ParserUtils.toMatcher; /** * Converts io.codigo.dtos.Experiment to io.codigo.engine.splits.ParsedExperiment. @@ -58,126 +39,37 @@ public ParsedSplit parse(Split split) { } private ParsedSplit parseWithoutExceptionHandling(Split split) { - if (split.status != Status.ACTIVE) { - return null; - } - List parsedConditionList = Lists.newArrayList(); - + if (Objects.isNull(split.impressionsDisabled)) { + _log.debug("impressionsDisabled field not detected for Feature flag `" + split.name + "`, setting it to `false`."); + split.impressionsDisabled = false; + } for (Condition condition : split.conditions) { List partitions = condition.partitions; + if (checkUnsupportedMatcherExist(condition.matcherGroup.matchers)) { + _log.error("Unsupported matcher type found for feature flag: " + split.name + " , will revert to default template matcher."); + parsedConditionList.clear(); + parsedConditionList.add(getTemplateCondition()); + break; + } CombiningMatcher matcher = toMatcher(condition.matcherGroup); parsedConditionList.add(new ParsedCondition(condition.conditionType, matcher, partitions, condition.label)); } - return new ParsedSplit(split.name, split.seed, split.killed, split.defaultTreatment, parsedConditionList, split.trafficTypeName, split.changeNumber, split.trafficAllocation, split.trafficAllocationSeed, split.algo, split.configurations); - } - - private CombiningMatcher toMatcher(MatcherGroup matcherGroup) { - List matchers = matcherGroup.matchers; - checkArgument(!matchers.isEmpty()); - - List toCombine = Lists.newArrayList(); - - for (io.split.client.dtos.Matcher matcher : matchers) { - toCombine.add(toMatcher(matcher)); - } - - return new CombiningMatcher(matcherGroup.combiner, toCombine); - } - - - private AttributeMatcher toMatcher(Matcher matcher) { - io.split.engine.matchers.Matcher delegate = null; - switch (matcher.matcherType) { - case ALL_KEYS: - delegate = new AllKeysMatcher(); - break; - case IN_SEGMENT: - checkNotNull(matcher.userDefinedSegmentMatcherData); - String segmentName = matcher.userDefinedSegmentMatcherData.segmentName; - delegate = new UserDefinedSegmentMatcher(segmentName); - break; - case WHITELIST: - checkNotNull(matcher.whitelistMatcherData); - delegate = new WhitelistMatcher(matcher.whitelistMatcherData.whitelist); - break; - case EQUAL_TO: - checkNotNull(matcher.unaryNumericMatcherData); - delegate = new EqualToMatcher(matcher.unaryNumericMatcherData.value, matcher.unaryNumericMatcherData.dataType); - break; - case GREATER_THAN_OR_EQUAL_TO: - checkNotNull(matcher.unaryNumericMatcherData); - delegate = new GreaterThanOrEqualToMatcher(matcher.unaryNumericMatcherData.value, matcher.unaryNumericMatcherData.dataType); - break; - case LESS_THAN_OR_EQUAL_TO: - checkNotNull(matcher.unaryNumericMatcherData); - delegate = new LessThanOrEqualToMatcher(matcher.unaryNumericMatcherData.value, matcher.unaryNumericMatcherData.dataType); - break; - case BETWEEN: - checkNotNull(matcher.betweenMatcherData); - delegate = new BetweenMatcher(matcher.betweenMatcherData.start, matcher.betweenMatcherData.end, matcher.betweenMatcherData.dataType); - break; - case EQUAL_TO_SET: - checkNotNull(matcher.whitelistMatcherData); - delegate = new EqualToSetMatcher(matcher.whitelistMatcherData.whitelist); - break; - case PART_OF_SET: - checkNotNull(matcher.whitelistMatcherData); - delegate = new PartOfSetMatcher(matcher.whitelistMatcherData.whitelist); - break; - case CONTAINS_ALL_OF_SET: - checkNotNull(matcher.whitelistMatcherData); - delegate = new ContainsAllOfSetMatcher(matcher.whitelistMatcherData.whitelist); - break; - case CONTAINS_ANY_OF_SET: - checkNotNull(matcher.whitelistMatcherData); - delegate = new ContainsAnyOfSetMatcher(matcher.whitelistMatcherData.whitelist); - break; - case STARTS_WITH: - checkNotNull(matcher.whitelistMatcherData); - delegate = new StartsWithAnyOfMatcher(matcher.whitelistMatcherData.whitelist); - break; - case ENDS_WITH: - checkNotNull(matcher.whitelistMatcherData); - delegate = new EndsWithAnyOfMatcher(matcher.whitelistMatcherData.whitelist); - break; - case CONTAINS_STRING: - checkNotNull(matcher.whitelistMatcherData); - delegate = new ContainsAnyOfMatcher(matcher.whitelistMatcherData.whitelist); - break; - case MATCHES_STRING: - checkNotNull(matcher.stringMatcherData); - delegate = new RegularExpressionMatcher(matcher.stringMatcherData); - break; - case IN_SPLIT_TREATMENT: - checkNotNull(matcher.dependencyMatcherData, - "MatcherType is " + matcher.matcherType - + ". matcher.dependencyMatcherData() MUST NOT BE null"); - delegate = new DependencyMatcher(matcher.dependencyMatcherData.split, matcher.dependencyMatcherData.treatments); - break; - case EQUAL_TO_BOOLEAN: - checkNotNull(matcher.booleanMatcherData, - "MatcherType is " + matcher.matcherType - + ". matcher.booleanMatcherData() MUST NOT BE null"); - delegate = new BooleanMatcher(matcher.booleanMatcherData); - break; - default: - throw new IllegalArgumentException("Unknown matcher type: " + matcher.matcherType); - } - - checkNotNull(delegate, "We were not able to create a matcher for: " + matcher.matcherType); - - String attribute = null; - if (matcher.keySelector != null && matcher.keySelector.attribute != null) { - attribute = matcher.keySelector.attribute; - } - - boolean negate = matcher.negate; - - - return new AttributeMatcher(attribute, delegate, negate); + return new ParsedSplit( + split.name, + split.seed, + split.killed, + split.defaultTreatment, + parsedConditionList, + split.trafficTypeName, + split.changeNumber, + split.trafficAllocation, + split.trafficAllocationSeed, + split.algo, + split.configurations, + split.sets, + split.impressionsDisabled, + new PrerequisitesMatcher(split.prerequisites)); } - - -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/experiments/SplitSynchronizationTask.java b/client/src/main/java/io/split/engine/experiments/SplitSynchronizationTask.java index f42beda07..6f4f99ab7 100644 --- a/client/src/main/java/io/split/engine/experiments/SplitSynchronizationTask.java +++ b/client/src/main/java/io/split/engine/experiments/SplitSynchronizationTask.java @@ -1,30 +1,30 @@ package io.split.engine.experiments; -import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.split.storages.SplitCacheProducer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; import java.util.List; -import java.util.concurrent.Executors; + import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.TimeUnit; import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import static io.split.client.utils.SplitExecutorFactory.buildSingleThreadScheduledExecutor; /** * Provides an instance of RefreshableExperimentFetcher that is guaranteed to be a singleton. * * @author adil */ -public class SplitSynchronizationTask implements Closeable { +public class SplitSynchronizationTask implements SyncTask, Closeable { private static final Logger _log = LoggerFactory.getLogger(SplitSynchronizationTask.class); private final AtomicReference _splitFetcher = new AtomicReference<>(); @@ -36,30 +36,26 @@ public class SplitSynchronizationTask implements Closeable { private ScheduledFuture _scheduledFuture; - public SplitSynchronizationTask(SplitFetcher splitFetcher, SplitCacheProducer splitCachesplitCacheProducer, long refreshEveryNSeconds) { + public SplitSynchronizationTask(SplitFetcher splitFetcher, SplitCacheProducer splitCachesplitCacheProducer, long refreshEveryNSeconds, + ThreadFactory threadFactory) { _splitFetcher.set(checkNotNull(splitFetcher)); _splitCacheProducer.set(checkNotNull(splitCachesplitCacheProducer)); checkArgument(refreshEveryNSeconds >= 0L); _refreshEveryNSeconds = new AtomicLong(refreshEveryNSeconds); - ThreadFactory threadFactory = new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat("split-splitFetcher-%d") - .build(); - - _scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(threadFactory); + _scheduledExecutorService = buildSingleThreadScheduledExecutor(threadFactory, "split-splitFetcher-%d"); _executorService.set(_scheduledExecutorService); _running = new AtomicBoolean(); } - public void startPeriodicFetching() { + public void start() { if (_running.getAndSet(true)) { _log.debug("Splits PeriodicFetching is running..."); return; } - _log.debug("Starting PeriodicFetching Splits ..."); + _log.debug("Starting PeriodicFetching Feature flags ..."); _scheduledFuture = _scheduledExecutorService.scheduleWithFixedDelay(_splitFetcher.get(), 0L, _refreshEveryNSeconds.get(), TimeUnit.SECONDS); } @@ -70,7 +66,7 @@ public void stop() { } _scheduledFuture.cancel(false); - _log.debug("Stopped PeriodicFetching Splits ..."); + _log.debug("Stopped PeriodicFetching Feature flags ..."); } @Override @@ -99,8 +95,13 @@ public void close() { } } catch (InterruptedException e) { // reset the interrupt. - _log.warn("Shutdown hook for split fetchers has been interrupted"); + _log.warn("Shutdown hook for feature flag fetchers has been interrupted"); Thread.currentThread().interrupt(); } } -} + + @Override + public boolean isRunning() { + return _running.get(); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/experiments/SyncTask.java b/client/src/main/java/io/split/engine/experiments/SyncTask.java new file mode 100644 index 000000000..622eb58f1 --- /dev/null +++ b/client/src/main/java/io/split/engine/experiments/SyncTask.java @@ -0,0 +1,9 @@ +package io.split.engine.experiments; + +public interface SyncTask { + + void start(); + void stop(); + void close(); + boolean isRunning(); +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/matchers/AllKeysMatcher.java b/client/src/main/java/io/split/engine/matchers/AllKeysMatcher.java index 54de94ce0..790224ab1 100644 --- a/client/src/main/java/io/split/engine/matchers/AllKeysMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/AllKeysMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import java.util.Map; diff --git a/client/src/main/java/io/split/engine/matchers/AttributeMatcher.java b/client/src/main/java/io/split/engine/matchers/AttributeMatcher.java index 487a5d3de..92deb0140 100644 --- a/client/src/main/java/io/split/engine/matchers/AttributeMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/AttributeMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import java.util.Map; import java.util.Objects; diff --git a/client/src/main/java/io/split/engine/matchers/BetweenMatcher.java b/client/src/main/java/io/split/engine/matchers/BetweenMatcher.java index 79ccd676c..a0ccfc1b7 100644 --- a/client/src/main/java/io/split/engine/matchers/BetweenMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/BetweenMatcher.java @@ -2,7 +2,6 @@ import io.split.client.dtos.DataType; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import java.util.Map; diff --git a/client/src/main/java/io/split/engine/matchers/BetweenSemverMatcher.java b/client/src/main/java/io/split/engine/matchers/BetweenSemverMatcher.java new file mode 100644 index 000000000..326e21830 --- /dev/null +++ b/client/src/main/java/io/split/engine/matchers/BetweenSemverMatcher.java @@ -0,0 +1,58 @@ +package io.split.engine.matchers; + +import io.split.engine.evaluator.EvaluationContext; + +import java.util.Map; + +public class BetweenSemverMatcher implements Matcher { + + private final Semver _semverStart; + private final Semver _semverEnd; + + public BetweenSemverMatcher(String semverStart, String semverEnd) { + _semverStart = Semver.build(semverStart); + _semverEnd = Semver.build(semverEnd); + } + + @Override + public boolean match(Object matchValue, String bucketingKey, Map attributes, EvaluationContext evaluationContext) { + if (!(matchValue instanceof String) || _semverStart == null || _semverEnd == null) { + return false; + } + Semver matchSemver = Semver.build(matchValue.toString()); + if (matchSemver == null) { + return false; + } + + return matchSemver.compare(_semverStart) >= 0 && matchSemver.compare(_semverEnd) <= 0; + } + + @Override + public String toString() { + StringBuilder bldr = new StringBuilder(); + bldr.append("between semver "); + bldr.append(_semverStart.version()); + bldr.append(" and "); + bldr.append(_semverEnd.version()); + return bldr.toString(); + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + _semverStart.hashCode() + _semverEnd.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) return false; + if (this == obj) return true; + if (!(obj instanceof BetweenSemverMatcher)) return false; + + BetweenSemverMatcher other = (BetweenSemverMatcher) obj; + + return _semverStart == other._semverStart && _semverEnd == other._semverEnd; + } + +} diff --git a/client/src/main/java/io/split/engine/matchers/BooleanMatcher.java b/client/src/main/java/io/split/engine/matchers/BooleanMatcher.java index 0a7418bb7..79d5a303f 100644 --- a/client/src/main/java/io/split/engine/matchers/BooleanMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/BooleanMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import java.util.Map; diff --git a/client/src/main/java/io/split/engine/matchers/CombiningMatcher.java b/client/src/main/java/io/split/engine/matchers/CombiningMatcher.java index da75c53f3..4097ef851 100644 --- a/client/src/main/java/io/split/engine/matchers/CombiningMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/CombiningMatcher.java @@ -2,10 +2,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import io.split.client.SplitClientImpl; import io.split.client.dtos.MatcherCombiner; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import java.util.List; import java.util.Map; diff --git a/client/src/main/java/io/split/engine/matchers/DependencyMatcher.java b/client/src/main/java/io/split/engine/matchers/DependencyMatcher.java index 11d6f2b33..a3c3c4640 100644 --- a/client/src/main/java/io/split/engine/matchers/DependencyMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/DependencyMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import java.util.List; import java.util.Map; @@ -11,11 +10,11 @@ * Supports the logic: if user is in split "feature" treatments ["on","off"] */ public class DependencyMatcher implements Matcher { - private String _split; + private String _featureFlag; private List _treatments; - public DependencyMatcher(String split, List treatments) { - _split = split; + public DependencyMatcher(String featureFlag, List treatments) { + _featureFlag = featureFlag; _treatments = treatments; } @@ -29,7 +28,7 @@ public boolean match(Object matchValue, String bucketingKey, Map return false; } - String result = evaluationContext.getEvaluator().evaluateFeature((String) matchValue, bucketingKey, _split, attributes).treatment; + String result = evaluationContext.getEvaluator().evaluateFeature((String) matchValue, bucketingKey, _featureFlag, attributes).treatment; return _treatments.contains(result); } @@ -38,7 +37,7 @@ public boolean match(Object matchValue, String bucketingKey, Map public String toString() { StringBuilder bldr = new StringBuilder(); bldr.append("in split \""); - bldr.append(this._split); + bldr.append(this._featureFlag); bldr.append("\" treatment "); bldr.append(this._treatments); return bldr.toString(); @@ -51,13 +50,13 @@ public boolean equals(Object o) { DependencyMatcher that = (DependencyMatcher) o; - if (!Objects.equals(_split, that._split)) return false; + if (!Objects.equals(_featureFlag, that._featureFlag)) return false; return Objects.equals(_treatments, that._treatments); } @Override public int hashCode() { - int result = _split != null ? _split.hashCode() : 0; + int result = _featureFlag != null ? _featureFlag.hashCode() : 0; result = 31 * result + (_treatments != null ? _treatments.hashCode() : 0); return result; } diff --git a/client/src/main/java/io/split/engine/matchers/EqualToMatcher.java b/client/src/main/java/io/split/engine/matchers/EqualToMatcher.java index dece1e539..9a1e32f37 100644 --- a/client/src/main/java/io/split/engine/matchers/EqualToMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/EqualToMatcher.java @@ -2,7 +2,6 @@ import io.split.client.dtos.DataType; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import java.util.Map; diff --git a/client/src/main/java/io/split/engine/matchers/EqualToSemverMatcher.java b/client/src/main/java/io/split/engine/matchers/EqualToSemverMatcher.java new file mode 100644 index 000000000..64d9135d2 --- /dev/null +++ b/client/src/main/java/io/split/engine/matchers/EqualToSemverMatcher.java @@ -0,0 +1,54 @@ +package io.split.engine.matchers; + +import io.split.engine.evaluator.EvaluationContext; + +import java.util.Map; + +public class EqualToSemverMatcher implements Matcher { + + private final Semver _semVer; + + public EqualToSemverMatcher(String semVer) { + _semVer = Semver.build(semVer); + } + + @Override + public boolean match(Object matchValue, String bucketingKey, Map attributes, EvaluationContext evaluationContext) { + if (!(matchValue instanceof String) || _semVer == null) { + return false; + } + Semver matchSemver = Semver.build(matchValue.toString()); + if (matchSemver == null) { + return false; + } + + return matchSemver.version().equals(_semVer.version()); + } + + @Override + public String toString() { + StringBuilder bldr = new StringBuilder(); + bldr.append("== semver "); + bldr.append(_semVer.version()); + return bldr.toString(); + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + _semVer.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) return false; + if (this == obj) return true; + if (!(obj instanceof EqualToSemverMatcher)) return false; + + EqualToSemverMatcher other = (EqualToSemverMatcher) obj; + + return _semVer == other._semVer; + } + +} diff --git a/client/src/main/java/io/split/engine/matchers/GreaterThanOrEqualToMatcher.java b/client/src/main/java/io/split/engine/matchers/GreaterThanOrEqualToMatcher.java index 21620b920..1b83dc2c3 100644 --- a/client/src/main/java/io/split/engine/matchers/GreaterThanOrEqualToMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/GreaterThanOrEqualToMatcher.java @@ -2,7 +2,6 @@ import io.split.client.dtos.DataType; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import java.util.Map; diff --git a/client/src/main/java/io/split/engine/matchers/GreaterThanOrEqualToSemverMatcher.java b/client/src/main/java/io/split/engine/matchers/GreaterThanOrEqualToSemverMatcher.java new file mode 100644 index 000000000..ffc714cca --- /dev/null +++ b/client/src/main/java/io/split/engine/matchers/GreaterThanOrEqualToSemverMatcher.java @@ -0,0 +1,54 @@ +package io.split.engine.matchers; + +import io.split.engine.evaluator.EvaluationContext; + +import java.util.Map; + +public class GreaterThanOrEqualToSemverMatcher implements Matcher { + + private final Semver _semVer; + + public GreaterThanOrEqualToSemverMatcher(String semVer) { + _semVer = Semver.build(semVer); + } + + @Override + public boolean match(Object matchValue, String bucketingKey, Map attributes, EvaluationContext evaluationContext) { + if (!(matchValue instanceof String)|| _semVer == null) { + return false; + } + Semver matchSemver = Semver.build(matchValue.toString()); + if (matchSemver == null) { + return false; + } + + return matchSemver.compare(_semVer) >= 0; + } + + @Override + public String toString() { + StringBuilder bldr = new StringBuilder(); + bldr.append(">= semver "); + bldr.append(_semVer.version()); + return bldr.toString(); + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + _semVer.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) return false; + if (this == obj) return true; + if (!(obj instanceof GreaterThanOrEqualToSemverMatcher)) return false; + + GreaterThanOrEqualToSemverMatcher other = (GreaterThanOrEqualToSemverMatcher) obj; + + return _semVer == other._semVer; + } + +} diff --git a/client/src/main/java/io/split/engine/matchers/InListSemverMatcher.java b/client/src/main/java/io/split/engine/matchers/InListSemverMatcher.java new file mode 100644 index 000000000..69fd1ea45 --- /dev/null +++ b/client/src/main/java/io/split/engine/matchers/InListSemverMatcher.java @@ -0,0 +1,77 @@ +package io.split.engine.matchers; + +import io.split.engine.evaluator.EvaluationContext; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +public class InListSemverMatcher implements Matcher { + + private final Set _semverlist = new HashSet<>(); + + public InListSemverMatcher(Collection whitelist) { + for (String item : whitelist) { + Semver semver = Semver.build(item); + if (semver == null) continue; + + _semverlist.add(semver); + } + } + + @Override + public boolean match(Object matchValue, String bucketingKey, Map attributes, EvaluationContext evaluationContext) { + if (!(matchValue instanceof String) || _semverlist.isEmpty()) { + return false; + } + Semver matchSemver = Semver.build(matchValue.toString()); + if (matchSemver == null) { + return false; + } + + for (Semver semverItem : _semverlist) { + if (semverItem.version().equals(matchSemver.version())) return true; + } + return false; + } + + @Override + public String toString() { + StringBuilder bldr = new StringBuilder(); + bldr.append("in semver list ["); + boolean first = true; + + for (Semver item : _semverlist) { + if (!first) { + bldr.append(','); + } + bldr.append('"'); + bldr.append(item.version()); + bldr.append('"'); + first = false; + } + + bldr.append("]"); + return bldr.toString(); + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + _semverlist.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) return false; + if (this == obj) return true; + if (!(obj instanceof InListSemverMatcher)) return false; + + InListSemverMatcher other = (InListSemverMatcher) obj; + + return _semverlist == other._semverlist; + } + +} diff --git a/client/src/main/java/io/split/engine/matchers/LessThanOrEqualToMatcher.java b/client/src/main/java/io/split/engine/matchers/LessThanOrEqualToMatcher.java index bd4e779f2..24a74aaba 100644 --- a/client/src/main/java/io/split/engine/matchers/LessThanOrEqualToMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/LessThanOrEqualToMatcher.java @@ -2,7 +2,6 @@ import io.split.client.dtos.DataType; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import java.util.Map; diff --git a/client/src/main/java/io/split/engine/matchers/LessThanOrEqualToSemverMatcher.java b/client/src/main/java/io/split/engine/matchers/LessThanOrEqualToSemverMatcher.java new file mode 100644 index 000000000..dd05f8c4d --- /dev/null +++ b/client/src/main/java/io/split/engine/matchers/LessThanOrEqualToSemverMatcher.java @@ -0,0 +1,54 @@ +package io.split.engine.matchers; + +import io.split.engine.evaluator.EvaluationContext; + +import java.util.Map; + +public class LessThanOrEqualToSemverMatcher implements Matcher { + + private final Semver _semVer; + + public LessThanOrEqualToSemverMatcher(String semVer) { + _semVer = Semver.build(semVer); + } + + @Override + public boolean match(Object matchValue, String bucketingKey, Map attributes, EvaluationContext evaluationContext) { + if (!(matchValue instanceof String) || _semVer == null) { + return false; + } + Semver matchSemver = Semver.build(matchValue.toString()); + if (matchSemver == null) { + return false; + } + + return matchSemver.compare(_semVer) <= 0; + } + + @Override + public String toString() { + StringBuilder bldr = new StringBuilder(); + bldr.append("<= semver "); + bldr.append(_semVer.version()); + return bldr.toString(); + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + _semVer.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) return false; + if (this == obj) return true; + if (!(obj instanceof LessThanOrEqualToSemverMatcher)) return false; + + LessThanOrEqualToSemverMatcher other = (LessThanOrEqualToSemverMatcher) obj; + + return _semVer == other._semVer; + } + +} diff --git a/client/src/main/java/io/split/engine/matchers/PrerequisitesMatcher.java b/client/src/main/java/io/split/engine/matchers/PrerequisitesMatcher.java new file mode 100644 index 000000000..122784498 --- /dev/null +++ b/client/src/main/java/io/split/engine/matchers/PrerequisitesMatcher.java @@ -0,0 +1,71 @@ +package io.split.engine.matchers; + +import io.split.client.dtos.Prerequisites; +import io.split.engine.evaluator.EvaluationContext; + +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +public class PrerequisitesMatcher implements Matcher { + private List _prerequisites; + + public PrerequisitesMatcher(List prerequisites) { + _prerequisites = prerequisites; + } + + public List getPrerequisites() { return _prerequisites; } + + @Override + public boolean match(Object matchValue, String bucketingKey, Map attributes, EvaluationContext evaluationContext) { + if (matchValue == null) { + return false; + } + + if (!(matchValue instanceof String)) { + return false; + } + + if (_prerequisites == null) { + return true; + } + + for (Prerequisites prerequisites : _prerequisites) { + String treatment = evaluationContext.getEvaluator().evaluateFeature((String) matchValue, bucketingKey, + prerequisites.featureFlagName, attributes). treatment; + if (!prerequisites.treatments.contains(treatment)) { + return false; + } + } + return true; + } + + @Override + public String toString() { + StringBuilder bldr = new StringBuilder(); + bldr.append("prerequisites: "); + if (this._prerequisites != null) { + bldr.append(this._prerequisites.stream().map(pr -> pr.featureFlagName + " " + + pr.treatments.toString()).map(Object::toString).collect(Collectors.joining(", "))); + } + return bldr.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PrerequisitesMatcher that = (PrerequisitesMatcher) o; + + return Objects.equals(_prerequisites, that._prerequisites); + } + + @Override + public int hashCode() { + int result = _prerequisites != null ? _prerequisites.hashCode() : 0; + result = 31 * result + (_prerequisites != null ? _prerequisites.hashCode() : 0); + return result; + } +} diff --git a/client/src/main/java/io/split/engine/matchers/RuleBasedSegmentMatcher.java b/client/src/main/java/io/split/engine/matchers/RuleBasedSegmentMatcher.java new file mode 100644 index 000000000..4c74527be --- /dev/null +++ b/client/src/main/java/io/split/engine/matchers/RuleBasedSegmentMatcher.java @@ -0,0 +1,105 @@ +package io.split.engine.matchers; + +import io.split.client.dtos.ExcludedSegments; +import io.split.engine.evaluator.EvaluationContext; +import io.split.engine.experiments.ParsedCondition; +import io.split.engine.experiments.ParsedRuleBasedSegment; + +import java.util.List; +import java.util.Map; + +import static com.google.common.base.Preconditions.checkNotNull; + +/** + * A matcher that checks if the key is part of a user defined segment. This class + * assumes that the logic for refreshing what keys are part of a segment is delegated + * to SegmentFetcher. + * + * @author adil + */ +public class RuleBasedSegmentMatcher implements Matcher { + private final String _segmentName; + + public RuleBasedSegmentMatcher(String segmentName) { + _segmentName = checkNotNull(segmentName); + } + + @Override + public boolean match(Object matchValue, String bucketingKey, Map attributes, EvaluationContext evaluationContext) { + if (!(matchValue instanceof String)) { + return false; + } + ParsedRuleBasedSegment parsedRuleBasedSegment = evaluationContext.getRuleBasedSegmentCache().get(_segmentName); + if (parsedRuleBasedSegment == null) { + return false; + } + + if (parsedRuleBasedSegment.excludedKeys().contains(matchValue)) { + return false; + } + + if (matchExcludedSegments(parsedRuleBasedSegment.excludedSegments(), matchValue, bucketingKey, attributes, evaluationContext)) { + return false; + } + + return matchConditions(parsedRuleBasedSegment.parsedConditions(), matchValue, bucketingKey, attributes, evaluationContext); + } + + private boolean matchExcludedSegments(List excludedSegments, Object matchValue, String bucketingKey, + Map attributes, EvaluationContext evaluationContext) { + for (ExcludedSegments excludedSegment: excludedSegments) { + if (excludedSegment.isStandard() && evaluationContext.getSegmentCache().isInSegment(excludedSegment.name, (String) matchValue)) { + return true; + } + + if (excludedSegment.isRuleBased()) { + RuleBasedSegmentMatcher excludedRbsMatcher = new RuleBasedSegmentMatcher(excludedSegment.name); + if (excludedRbsMatcher.match(matchValue, bucketingKey, attributes, evaluationContext)) { + return true; + } + } + } + + return false; + } + + private boolean matchConditions(List conditions, Object matchValue, String bucketingKey, + Map attributes, EvaluationContext evaluationContext) { + for (ParsedCondition parsedCondition : conditions) { + if (parsedCondition.matcher().match((String) matchValue, bucketingKey, attributes, evaluationContext)) { + return true; + } + } + return false; + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + _segmentName.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) return false; + if (this == obj) return true; + if (!(obj instanceof RuleBasedSegmentMatcher)) return false; + + RuleBasedSegmentMatcher other = (RuleBasedSegmentMatcher) obj; + + return _segmentName.equals(other._segmentName); + } + + @Override + public String toString() { + StringBuilder bldr = new StringBuilder(); + bldr.append("in segment "); + bldr.append(_segmentName); + return bldr.toString(); + } + + public String getSegmentName() { + return _segmentName; + } +} diff --git a/client/src/main/java/io/split/engine/matchers/Semver.java b/client/src/main/java/io/split/engine/matchers/Semver.java new file mode 100644 index 000000000..7a85a0d72 --- /dev/null +++ b/client/src/main/java/io/split/engine/matchers/Semver.java @@ -0,0 +1,176 @@ +package io.split.engine.matchers; + +import io.split.client.exceptions.SemverParseException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; + +public class Semver { + private static final String METADATA_DELIMITER = "+"; + private static final String PRERELEASE_DELIMITER = "-"; + private static final String VALUE_DELIMITER = "\\."; + private static final Logger _log = LoggerFactory.getLogger(Semver.class); + + private Long _major; + private Long _minor; + private Long _patch; + private String[] _preRelease = new String[] {}; + private boolean _isStable; + private String _metadata; + private String _version; + + public static Semver build(String version) { + if (version.isEmpty()) return null; + try { + return new Semver(version); + } catch (Exception ex) { + _log.error("An error occurred during the creation of a Semver instance:", ex.getMessage()); + return null; + } + } + + public String version() { + return _version; + } + + public Long major() { + return _major; + } + + public Long minor() { + return _minor; + } + + public Long patch() { + return _patch; + } + + public String[] prerelease() { + return _preRelease; + } + + public String metadata() { + return _metadata; + } + + public boolean isStable() { + return _isStable; + } + + /** + * Precedence comparision between 2 Semver objects. + * + * @return the value {@code 0} if {@code this == toCompare}; + * a value less than {@code 0} if {@code this < toCompare}; and + * a value greater than {@code 0} if {@code this > toCompare} + */ + public int compare(Semver toCompare) { + if (_version.equals(toCompare.version())) { + return 0; + } + // Compare major, minor, and patch versions numerically + int result = Long.compare(_major, toCompare.major()); + if (result != 0) { + return result; + } + result = Long.compare(_minor, toCompare.minor()); + if (result != 0) { + return result; + } + result = Long.compare(_patch, toCompare.patch()); + if (result != 0) { + return result; + } + if (!_isStable && toCompare.isStable()) { + return -1; + } else if (_isStable && !toCompare.isStable()) { + return 1; + } + // Compare pre-release versions lexically + int minLength = Math.min(_preRelease.length, toCompare.prerelease().length); + for (int i = 0; i < minLength; i++) { + if (_preRelease[i].equals(toCompare.prerelease()[i])) { + continue; + } + if ( isNumeric(_preRelease[i]) && isNumeric(toCompare._preRelease[i])) { + return Long.compare(Integer.parseInt(_preRelease[i]), Long.parseLong(toCompare._preRelease[i])); + } + return adjustNumber(_preRelease[i].compareTo(toCompare._preRelease[i])); + } + // Compare lengths of pre-release versions + return Integer.compare(_preRelease.length, toCompare._preRelease.length); + } + + private int adjustNumber(int number) { + if (number > 0) return 1; + if (number < 0) return -1; + return 0; + } + private Semver(String version) throws SemverParseException { + String vWithoutMetadata = setAndRemoveMetadataIfExists(version); + String vWithoutPreRelease = setAndRemovePreReleaseIfExists(vWithoutMetadata); + setMajorMinorAndPatch(vWithoutPreRelease); + _version = setVersion(); + } + private String setAndRemoveMetadataIfExists(String version) throws SemverParseException { + int index = version.indexOf(METADATA_DELIMITER); + if (index == -1) { + return version; + } + _metadata = version.substring(index+1); + if (_metadata == null || _metadata.isEmpty()) { + throw new SemverParseException("Unable to convert to Semver, incorrect pre release data"); + } + return version.substring(0, index); + } + private String setAndRemovePreReleaseIfExists(String vWithoutMetadata) throws SemverParseException { + int index = vWithoutMetadata.indexOf(PRERELEASE_DELIMITER); + if (index == -1) { + _isStable = true; + return vWithoutMetadata; + } + String preReleaseData = vWithoutMetadata.substring(index+1); + _preRelease = preReleaseData.split(VALUE_DELIMITER); + if (_preRelease == null || Arrays.stream(_preRelease).allMatch(pr -> pr == null || pr.isEmpty())) { + throw new SemverParseException("Unable to convert to Semver, incorrect pre release data"); + } + return vWithoutMetadata.substring(0, index); + } + private void setMajorMinorAndPatch(String version) throws SemverParseException { + String[] vParts = version.split(VALUE_DELIMITER); + if (vParts.length != 3) + throw new SemverParseException("Unable to convert to Semver, incorrect format: " + version); + _major = Long.parseLong(vParts[0]); + _minor = Long.parseLong(vParts[1]); + _patch = Long.parseLong(vParts[2]); + } + + private String setVersion() { + String toReturn = _major + VALUE_DELIMITER + _minor + VALUE_DELIMITER + _patch; + if (_preRelease != null && _preRelease.length != 0) + { + for (int i = 0; i < _preRelease.length; i++) + { + if (isNumeric(_preRelease[i])) + { + _preRelease[i] = Long.toString(Long.parseLong(_preRelease[i])); + } + } + toReturn = toReturn + PRERELEASE_DELIMITER + String.join(VALUE_DELIMITER, _preRelease); + } + if (_metadata != null && !_metadata.isEmpty()) { + toReturn = toReturn + METADATA_DELIMITER + _metadata; + } + return toReturn; + } + + private static boolean isNumeric(String str) { + try { + Double.parseDouble(str); + return true; + } catch(NumberFormatException e){ + return false; + } + } +} diff --git a/client/src/main/java/io/split/engine/matchers/collections/ContainsAllOfSetMatcher.java b/client/src/main/java/io/split/engine/matchers/collections/ContainsAllOfSetMatcher.java index a2e477df7..5f4f9433a 100644 --- a/client/src/main/java/io/split/engine/matchers/collections/ContainsAllOfSetMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/collections/ContainsAllOfSetMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers.collections; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.Matcher; import java.util.Collection; diff --git a/client/src/main/java/io/split/engine/matchers/collections/ContainsAnyOfSetMatcher.java b/client/src/main/java/io/split/engine/matchers/collections/ContainsAnyOfSetMatcher.java index 93c7c2815..3a2514401 100644 --- a/client/src/main/java/io/split/engine/matchers/collections/ContainsAnyOfSetMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/collections/ContainsAnyOfSetMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers.collections; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.Matcher; import java.util.Collection; diff --git a/client/src/main/java/io/split/engine/matchers/collections/EqualToSetMatcher.java b/client/src/main/java/io/split/engine/matchers/collections/EqualToSetMatcher.java index bf811c70a..4a09c9efc 100644 --- a/client/src/main/java/io/split/engine/matchers/collections/EqualToSetMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/collections/EqualToSetMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers.collections; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.Matcher; import java.util.Collection; diff --git a/client/src/main/java/io/split/engine/matchers/collections/PartOfSetMatcher.java b/client/src/main/java/io/split/engine/matchers/collections/PartOfSetMatcher.java index 88974cb58..8bb5f1399 100644 --- a/client/src/main/java/io/split/engine/matchers/collections/PartOfSetMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/collections/PartOfSetMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers.collections; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.Matcher; import java.util.Collection; diff --git a/client/src/main/java/io/split/engine/matchers/strings/ContainsAnyOfMatcher.java b/client/src/main/java/io/split/engine/matchers/strings/ContainsAnyOfMatcher.java index 40f950cbe..b8cbe8fca 100644 --- a/client/src/main/java/io/split/engine/matchers/strings/ContainsAnyOfMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/strings/ContainsAnyOfMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers.strings; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.Matcher; import java.util.Collection; diff --git a/client/src/main/java/io/split/engine/matchers/strings/EndsWithAnyOfMatcher.java b/client/src/main/java/io/split/engine/matchers/strings/EndsWithAnyOfMatcher.java index 971fe01cc..32ac9f7f3 100644 --- a/client/src/main/java/io/split/engine/matchers/strings/EndsWithAnyOfMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/strings/EndsWithAnyOfMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers.strings; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.Matcher; import java.util.Collection; diff --git a/client/src/main/java/io/split/engine/matchers/strings/RegularExpressionMatcher.java b/client/src/main/java/io/split/engine/matchers/strings/RegularExpressionMatcher.java index f63dbcca8..f64b3264b 100644 --- a/client/src/main/java/io/split/engine/matchers/strings/RegularExpressionMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/strings/RegularExpressionMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers.strings; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.Matcher; import java.util.Map; diff --git a/client/src/main/java/io/split/engine/matchers/strings/StartsWithAnyOfMatcher.java b/client/src/main/java/io/split/engine/matchers/strings/StartsWithAnyOfMatcher.java index bb74ee185..7f1ed2cad 100644 --- a/client/src/main/java/io/split/engine/matchers/strings/StartsWithAnyOfMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/strings/StartsWithAnyOfMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers.strings; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.Matcher; import java.util.Collection; diff --git a/client/src/main/java/io/split/engine/matchers/strings/WhitelistMatcher.java b/client/src/main/java/io/split/engine/matchers/strings/WhitelistMatcher.java index d41123e53..5068c1437 100644 --- a/client/src/main/java/io/split/engine/matchers/strings/WhitelistMatcher.java +++ b/client/src/main/java/io/split/engine/matchers/strings/WhitelistMatcher.java @@ -1,7 +1,6 @@ package io.split.engine.matchers.strings; import io.split.engine.evaluator.EvaluationContext; -import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.Matcher; import java.util.Collection; diff --git a/client/src/main/java/io/split/engine/segments/SegmentFetcher.java b/client/src/main/java/io/split/engine/segments/SegmentFetcher.java index 05cb511b2..3d5c3a48e 100644 --- a/client/src/main/java/io/split/engine/segments/SegmentFetcher.java +++ b/client/src/main/java/io/split/engine/segments/SegmentFetcher.java @@ -9,9 +9,7 @@ public interface SegmentFetcher { /** * fetch */ - void fetch(FetchOptions opts); + boolean fetch(FetchOptions opts); boolean runWhitCacheHeader(); - - void fetchAll(); } diff --git a/client/src/main/java/io/split/engine/segments/SegmentFetcherImp.java b/client/src/main/java/io/split/engine/segments/SegmentFetcherImp.java index bd710bd0a..526ca02d1 100644 --- a/client/src/main/java/io/split/engine/segments/SegmentFetcherImp.java +++ b/client/src/main/java/io/split/engine/segments/SegmentFetcherImp.java @@ -1,8 +1,6 @@ package io.split.engine.segments; -import com.google.common.annotations.VisibleForTesting; import io.split.client.dtos.SegmentChange; -import io.split.engine.SDKReadinessGates; import io.split.storages.SegmentCacheProducer; import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; @@ -21,52 +19,61 @@ public class SegmentFetcherImp implements SegmentFetcher { private final String _segmentName; private final SegmentChangeFetcher _segmentChangeFetcher; private final SegmentCacheProducer _segmentCacheProducer; - private final SDKReadinessGates _gates; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; private final Object _lock = new Object(); - public SegmentFetcherImp(String segmentName, SegmentChangeFetcher segmentChangeFetcher, SDKReadinessGates gates, SegmentCacheProducer segmentCacheProducer, TelemetryRuntimeProducer telemetryRuntimeProducer) { + public SegmentFetcherImp(String segmentName, SegmentChangeFetcher segmentChangeFetcher, SegmentCacheProducer segmentCacheProducer, + TelemetryRuntimeProducer telemetryRuntimeProducer) { _segmentName = checkNotNull(segmentName); _segmentChangeFetcher = checkNotNull(segmentChangeFetcher); _segmentCacheProducer = checkNotNull(segmentCacheProducer); - _gates = checkNotNull(gates); _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); _segmentCacheProducer.updateSegment(segmentName, new ArrayList<>(), new ArrayList<>(), -1L); } @Override - public void fetch(FetchOptions opts){ + public boolean fetch(FetchOptions opts){ try { - callLoopRun(opts); - } catch (Throwable t) { - _log.error("RefreshableSegmentFetcher failed: " + t.getMessage()); + final long INITIAL_CN = _segmentCacheProducer.getChangeNumber(_segmentName); + while (true) { + long start = _segmentCacheProducer.getChangeNumber(_segmentName); + runWithoutExceptionHandling(opts); + if (INITIAL_CN == start) { + opts = new FetchOptions.Builder(opts).targetChangeNumber(FetchOptions.DEFAULT_TARGET_CHANGENUMBER).build(); + } + long end = _segmentCacheProducer.getChangeNumber(_segmentName); + if (start >= end) { + break; + } + } + return true; + } catch (Exception e){ + _log.error("RefreshableSegmentFetcher failed: " + e.getMessage()); if (_log.isDebugEnabled()) { - _log.debug("Reason:", t); + _log.debug("Reason:", e); } + return false; } } private void runWithoutExceptionHandling(FetchOptions options) { + if (_log.isDebugEnabled()) { + _log.debug(String.format("Synchronizing segment %s", _segmentName)); + } SegmentChange change = _segmentChangeFetcher.fetch(_segmentName, _segmentCacheProducer.getChangeNumber(_segmentName), options); if (change == null) { throw new IllegalStateException("SegmentChange was null"); } - if (change.till == _segmentCacheProducer.getChangeNumber(_segmentName)) { - // no change. - return; - } - if (change.since != _segmentCacheProducer.getChangeNumber(_segmentName) || change.since < _segmentCacheProducer.getChangeNumber(_segmentName)) { // some other thread may have updated the shared state. exit return; } - if (change.added.isEmpty() && change.removed.isEmpty()) { // there are no changes. weird! _segmentCacheProducer.setChangeNumber(_segmentName,change.till); @@ -114,49 +121,8 @@ private String summarize(List changes) { return bldr.toString(); } - @VisibleForTesting - void callLoopRun(FetchOptions opts){ - final long INITIAL_CN = _segmentCacheProducer.getChangeNumber(_segmentName); - while (true) { - long start = _segmentCacheProducer.getChangeNumber(_segmentName); - runWithoutExceptionHandling(opts); - if (INITIAL_CN == start) { - opts = new FetchOptions.Builder(opts).targetChangeNumber(FetchOptions.DEFAULT_TARGET_CHANGENUMBER).build(); - } - long end = _segmentCacheProducer.getChangeNumber(_segmentName); - if (start >= end) { - break; - } - } - } - @Override public boolean runWhitCacheHeader(){ - return this.fetchAndUpdate(new FetchOptions.Builder().cacheControlHeaders(true).build()); - } - - /** - * Calls callLoopRun and after fetchs segment. - * @param opts contains all soft of options used when issuing the fetch request - */ - @VisibleForTesting - boolean fetchAndUpdate(FetchOptions opts) { - try { - // Do this again in case the previous call errored out. - callLoopRun(opts); - return true; - - } catch (Throwable t) { - _log.error("RefreshableSegmentFetcher failed: " + t.getMessage()); - if (_log.isDebugEnabled()) { - _log.debug("Reason:", t); - } - return false; - } - } - - @Override - public void fetchAll() { - this.fetchAndUpdate(new FetchOptions.Builder().build()); + return this.fetch(new FetchOptions.Builder().cacheControlHeaders(true).build()); } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/segments/SegmentSynchronizationTask.java b/client/src/main/java/io/split/engine/segments/SegmentSynchronizationTask.java index 1a1764ed9..85a10a20e 100644 --- a/client/src/main/java/io/split/engine/segments/SegmentSynchronizationTask.java +++ b/client/src/main/java/io/split/engine/segments/SegmentSynchronizationTask.java @@ -1,6 +1,8 @@ package io.split.engine.segments; -public interface SegmentSynchronizationTask extends Runnable { +import io.split.engine.experiments.SyncTask; + +public interface SegmentSynchronizationTask extends SyncTask { /** * initializes the segment * @param segmentName @@ -17,7 +19,7 @@ public interface SegmentSynchronizationTask extends Runnable { /** * starts the fetching */ - void startPeriodicFetching(); + void start(); /** * stops the thread @@ -34,4 +36,5 @@ public interface SegmentSynchronizationTask extends Runnable { * fetch every Segment Synchronous */ boolean fetchAllSynchronous(); + void close(); } diff --git a/client/src/main/java/io/split/engine/segments/SegmentSynchronizationTaskImp.java b/client/src/main/java/io/split/engine/segments/SegmentSynchronizationTaskImp.java index a12b96f55..857493087 100644 --- a/client/src/main/java/io/split/engine/segments/SegmentSynchronizationTaskImp.java +++ b/client/src/main/java/io/split/engine/segments/SegmentSynchronizationTaskImp.java @@ -1,25 +1,22 @@ package io.split.engine.segments; import com.google.common.collect.Maps; -import com.google.common.util.concurrent.ThreadFactoryBuilder; -import io.split.engine.SDKReadinessGates; -import io.split.engine.experiments.ParsedSplit; -import io.split.engine.matchers.UserDefinedSegmentMatcher; +import io.split.client.utils.SplitExecutorFactory; +import io.split.engine.common.FetchOptions; +import io.split.storages.RuleBasedSegmentCacheConsumer; import io.split.storages.SegmentCacheProducer; import io.split.storages.SplitCacheConsumer; -import io.split.storages.memory.InMemoryCacheImp; import io.split.telemetry.storage.TelemetryRuntimeProducer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; @@ -41,42 +38,30 @@ public class SegmentSynchronizationTaskImp implements SegmentSynchronizationTask private final Object _lock = new Object(); private final ConcurrentMap _segmentFetchers = Maps.newConcurrentMap(); private final SegmentCacheProducer _segmentCacheProducer; - private final SDKReadinessGates _gates; private final ScheduledExecutorService _scheduledExecutorService; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; private final SplitCacheConsumer _splitCacheConsumer; + private final RuleBasedSegmentCacheConsumer _ruleBasedSegmentCacheConsumer; private ScheduledFuture _scheduledFuture; - public SegmentSynchronizationTaskImp(SegmentChangeFetcher segmentChangeFetcher, long refreshEveryNSeconds, int numThreads, SDKReadinessGates gates, SegmentCacheProducer segmentCacheProducer, - TelemetryRuntimeProducer telemetryRuntimeProducer, SplitCacheConsumer splitCacheConsumer) { + public SegmentSynchronizationTaskImp(SegmentChangeFetcher segmentChangeFetcher, long refreshEveryNSeconds, int numThreads, + SegmentCacheProducer segmentCacheProducer, TelemetryRuntimeProducer telemetryRuntimeProducer, + SplitCacheConsumer splitCacheConsumer, ThreadFactory threadFactory, + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer) { _segmentChangeFetcher = checkNotNull(segmentChangeFetcher); checkArgument(refreshEveryNSeconds >= 0L); _refreshEveryNSeconds = new AtomicLong(refreshEveryNSeconds); - - _gates = checkNotNull(gates); - - ThreadFactory threadFactory = new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat("split-segmentFetcher-" + "%d") - .build(); - - _scheduledExecutorService = Executors.newScheduledThreadPool(numThreads, threadFactory); - + _scheduledExecutorService = SplitExecutorFactory.buildScheduledExecutorService(threadFactory, "split-segmentFetcher-" + "%d", numThreads); _running = new AtomicBoolean(false); _segmentCacheProducer = checkNotNull(segmentCacheProducer); _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); _splitCacheConsumer = checkNotNull(splitCacheConsumer); + _ruleBasedSegmentCacheConsumer = checkNotNull(ruleBasedSegmentCacheConsumer); } - @Override - public void run() { - this.fetchAll(false); - } - - @Override public void initializeSegment(String segmentName) { SegmentFetcher segment = _segmentFetchers.get(segmentName); if (segment != null) { @@ -92,17 +77,16 @@ public void initializeSegment(String segmentName) { return; } - segment = new SegmentFetcherImp(segmentName, _segmentChangeFetcher, _gates, _segmentCacheProducer, _telemetryRuntimeProducer); + SegmentFetcher newSegment = new SegmentFetcherImp(segmentName, _segmentChangeFetcher, _segmentCacheProducer, _telemetryRuntimeProducer); if (_running.get()) { - _scheduledExecutorService.submit(segment::fetchAll); + _scheduledExecutorService.submit(() -> newSegment.fetch(new FetchOptions.Builder().build())); } - _segmentFetchers.putIfAbsent(segmentName, segment); + _segmentFetchers.putIfAbsent(segmentName, newSegment); } } - @Override public SegmentFetcher getFetcher(String segmentName) { initializeSegment(segmentName); @@ -110,14 +94,16 @@ public SegmentFetcher getFetcher(String segmentName) { } @Override - public void startPeriodicFetching() { + public void start() { if (_running.getAndSet(true) ) { _log.debug("Segments PeriodicFetching is running..."); return; } _log.debug("Starting PeriodicFetching Segments ..."); - _scheduledFuture = _scheduledExecutorService.scheduleWithFixedDelay(this, 0L, _refreshEveryNSeconds.get(), TimeUnit.SECONDS); + _scheduledFuture = _scheduledExecutorService.scheduleWithFixedDelay(() -> { + fetchAll(false); + }, 0L, _refreshEveryNSeconds.get(), TimeUnit.SECONDS); } @Override @@ -151,8 +137,13 @@ public void close() { } @Override + public boolean isRunning() { + return _running.get(); + } + public void fetchAll(boolean addCacheHeader) { - _splitCacheConsumer.getSegments().forEach(this::initialize); + Set names = getSegmentNames(); + names.forEach(this::initialize); for (Map.Entry entry : _segmentFetchers.entrySet()) { SegmentFetcher fetcher = entry.getValue(); @@ -165,13 +156,13 @@ public void fetchAll(boolean addCacheHeader) { continue; } - _scheduledExecutorService.submit(fetcher::fetchAll); + _scheduledExecutorService.submit(() -> fetcher.fetch(new FetchOptions.Builder().build())); } } - @Override public boolean fetchAllSynchronous() { - _splitCacheConsumer.getSegments().forEach(this::initialize); + Set names = getSegmentNames(); + names.forEach(this::initialize); List> segmentFetchExecutions = _segmentFetchers.entrySet() .stream().map(e -> _scheduledExecutorService.submit(e.getValue()::runWhitCacheHeader)) .collect(Collectors.toList()); @@ -204,9 +195,16 @@ private void initialize(String segmentName) { return; } - segment = new SegmentFetcherImp(segmentName, _segmentChangeFetcher, _gates, _segmentCacheProducer, _telemetryRuntimeProducer); + segment = new SegmentFetcherImp(segmentName, _segmentChangeFetcher, _segmentCacheProducer, _telemetryRuntimeProducer); _segmentFetchers.putIfAbsent(segmentName, segment); } } -} + + private Set getSegmentNames() { + Set names = new HashSet<>(_splitCacheConsumer.getSegments()); + names.addAll(_ruleBasedSegmentCacheConsumer.getSegments()); + + return names; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/splitter/Splitter.java b/client/src/main/java/io/split/engine/splitter/Splitter.java index 60bb6d740..c867a81db 100644 --- a/client/src/main/java/io/split/engine/splitter/Splitter.java +++ b/client/src/main/java/io/split/engine/splitter/Splitter.java @@ -33,15 +33,15 @@ public static String getTreatment(String key, int seed, List partitio static long hash(String key, int seed, int algo) { switch (algo) { case ALGO_MURMUR: - return murmur_hash(key, seed); + return murmurHash(key, seed); case ALGO_LEGACY: default: - return legacy_hash(key, seed); + return legacyHash(key, seed); } } /*package private*/ - static long murmur_hash(String key, int seed) { + static long murmurHash(String key, int seed) { return MurmurHash3.murmurhash3_x86_32(key, 0, key.length(), seed); } @@ -56,7 +56,7 @@ public static int getBucket(String key, int seed, int algo) { } /*package private*/ - static int legacy_hash(String key, int seed) { + static int legacyHash(String key, int seed) { int h = 0; for (int i = 0; i < key.length(); i++) { h = 31 * h + key.charAt(i); diff --git a/client/src/main/java/io/split/engine/sse/AuthApiClientImp.java b/client/src/main/java/io/split/engine/sse/AuthApiClientImp.java index 6b4971cc4..5c45e1b7f 100644 --- a/client/src/main/java/io/split/engine/sse/AuthApiClientImp.java +++ b/client/src/main/java/io/split/engine/sse/AuthApiClientImp.java @@ -1,34 +1,34 @@ package io.split.engine.sse; import com.google.gson.JsonObject; +import io.split.Spec; +import io.split.client.dtos.SplitHttpResponse; import io.split.client.utils.Json; +import io.split.engine.common.FetchOptions; import io.split.engine.sse.dtos.AuthenticationResponse; import io.split.engine.sse.dtos.RawAuthResponse; +import io.split.service.SplitHttpClient; import io.split.telemetry.domain.enums.HTTPLatenciesEnum; import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.classic.methods.HttpGet; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; -import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; import org.apache.hc.core5.http.HttpStatus; -import org.apache.hc.core5.http.io.entity.EntityUtils; import org.apache.hc.core5.net.URIBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.URI; -import java.nio.charset.StandardCharsets; import static com.google.common.base.Preconditions.checkNotNull; public class AuthApiClientImp implements AuthApiClient { - private static final Logger _log = LoggerFactory.getLogger(AuthApiClient.class); + private static final Logger _log = LoggerFactory.getLogger(AuthApiClientImp.class); - private final CloseableHttpClient _httpClient; + private static final String SPEC = "s"; + private final SplitHttpClient _httpClient; private final String _target; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; - public AuthApiClientImp(String url, CloseableHttpClient httpClient, TelemetryRuntimeProducer telemetryRuntimeProducer) { + public AuthApiClientImp(String url, SplitHttpClient httpClient, TelemetryRuntimeProducer telemetryRuntimeProducer) { _httpClient = checkNotNull(httpClient); _target = checkNotNull(url); _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); @@ -38,20 +38,17 @@ public AuthApiClientImp(String url, CloseableHttpClient httpClient, TelemetryRun public AuthenticationResponse Authenticate() { try { long initTime = System.currentTimeMillis(); - URI uri = new URIBuilder(_target).build(); - HttpGet request = new HttpGet(uri); - - CloseableHttpResponse response = _httpClient.execute(request); - Integer statusCode = response.getCode(); + URI uri = new URIBuilder(_target).addParameter(SPEC, "" + Spec.SPEC_1_3).build(); + SplitHttpResponse response = _httpClient.get(uri, new FetchOptions.Builder().cacheControlHeaders(false).build(), null); + Integer statusCode = response.statusCode(); if (statusCode == HttpStatus.SC_OK) { _log.debug(String.format("Success connection to: %s", _target)); - String jsonContent = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); _telemetryRuntimeProducer.recordTokenRefreshes(); _telemetryRuntimeProducer.recordSuccessfulSync(LastSynchronizationRecordsEnum.TOKEN, System.currentTimeMillis()); _telemetryRuntimeProducer.recordSyncLatency(HTTPLatenciesEnum.TOKEN, System.currentTimeMillis()-initTime); - return getSuccessResponse(jsonContent); + return getSuccessResponse(response.body()); } _log.error(String.format("Problem to connect to : %s. Response status: %s", _target, statusCode)); diff --git a/client/src/main/java/io/split/engine/sse/EventSourceClientImp.java b/client/src/main/java/io/split/engine/sse/EventSourceClientImp.java index 772ccfb48..212d929f3 100644 --- a/client/src/main/java/io/split/engine/sse/EventSourceClientImp.java +++ b/client/src/main/java/io/split/engine/sse/EventSourceClientImp.java @@ -1,11 +1,13 @@ package io.split.engine.sse; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Strings; +import io.split.client.RequestDecorator; import io.split.engine.sse.client.RawEvent; import io.split.engine.sse.client.SSEClient; import io.split.engine.sse.dtos.SegmentQueueDto; import io.split.engine.sse.exceptions.EventParsingException; -import io.split.engine.sse.workers.SplitsWorker; +import io.split.engine.sse.workers.FeatureFlagsWorker; import io.split.engine.sse.workers.Worker; import io.split.telemetry.storage.TelemetryRuntimeProducer; import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; @@ -15,6 +17,7 @@ import java.net.URI; import java.net.URISyntaxException; +import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicBoolean; import static com.google.common.base.Preconditions.checkNotNull; @@ -37,7 +40,9 @@ public class EventSourceClientImp implements EventSourceClient { NotificationProcessor notificationProcessor, PushStatusTracker pushStatusTracker, CloseableHttpClient sseHttpClient, - TelemetryRuntimeProducer telemetryRuntimeProducer) { + TelemetryRuntimeProducer telemetryRuntimeProducer, + ThreadFactory threadFactory, + RequestDecorator requestDecorator) { _baseStreamingUrl = checkNotNull(baseStreamingUrl); _notificationParser = checkNotNull(notificationParser); _notificationProcessor = checkNotNull(notificationProcessor); @@ -46,21 +51,29 @@ public class EventSourceClientImp implements EventSourceClient { _sseClient = new SSEClient( inboundEvent -> { onMessage(inboundEvent); return null; }, status -> { _pushStatusTracker.handleSseStatus(status); return null; }, - sseHttpClient, telemetryRuntimeProducer); + sseHttpClient, + telemetryRuntimeProducer, + threadFactory, + requestDecorator); _firstEvent = new AtomicBoolean(); } public static EventSourceClientImp build(String baseStreamingUrl, - SplitsWorker splitsWorker, + FeatureFlagsWorker featureFlagsWorker, Worker segmentWorker, PushStatusTracker pushStatusTracker, - CloseableHttpClient sseHttpClient, TelemetryRuntimeProducer telemetryRuntimeProducer) { + CloseableHttpClient sseHttpClient, + TelemetryRuntimeProducer telemetryRuntimeProducer, + ThreadFactory threadFactory, + RequestDecorator requestDecorator) { return new EventSourceClientImp(baseStreamingUrl, new NotificationParserImp(), - NotificationProcessorImp.build(splitsWorker, segmentWorker, pushStatusTracker), + NotificationProcessorImp.build(featureFlagsWorker, segmentWorker, pushStatusTracker), pushStatusTracker, sseHttpClient, - telemetryRuntimeProducer); + telemetryRuntimeProducer, + threadFactory, + requestDecorator); } @Override @@ -80,6 +93,7 @@ public boolean start(String channelList, String token) { @Override public void stop() { + _log.info("Stopping EventSourceClientImp"); if (!_sseClient.isOpen()) { _log.info("Event Source Client is closed."); return; @@ -102,7 +116,7 @@ private void onMessage(RawEvent event) { if(_firstEvent.compareAndSet(false, true) && !ERROR.equals(type)){ _pushStatusTracker.handleSseStatus(SSEClient.StatusMessage.FIRST_EVENT); } - if (payload.length() > 0) { + if (!Strings.isNullOrEmpty(payload)) { _log.debug(String.format("Payload received: %s", payload)); switch (type) { case MESSAGE: @@ -118,7 +132,7 @@ private void onMessage(RawEvent event) { } catch (EventParsingException ex) { _log.debug(String.format("Error parsing the event: %s. Payload: %s", ex.getMessage(), ex.getPayload())); } catch (Exception e) { - _log.debug(String.format("Error onMessage: %s", e.getMessage())); + _log.debug(String.format("Error parsing the event id: %s. OnMessage: %s", event.id(), e.getMessage()), e); } } } \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/NotificationParserImp.java b/client/src/main/java/io/split/engine/sse/NotificationParserImp.java index 802d94b54..8bfaf886c 100644 --- a/client/src/main/java/io/split/engine/sse/NotificationParserImp.java +++ b/client/src/main/java/io/split/engine/sse/NotificationParserImp.java @@ -1,7 +1,18 @@ package io.split.engine.sse; +import io.split.client.dtos.RuleBasedSegment; +import io.split.client.dtos.Split; import io.split.client.utils.Json; -import io.split.engine.sse.dtos.*; + +import io.split.engine.sse.dtos.ControlNotification; +import io.split.engine.sse.dtos.ErrorNotification; +import io.split.engine.sse.dtos.CommonChangeNotification; +import io.split.engine.sse.dtos.GenericNotificationData; +import io.split.engine.sse.dtos.IncomingNotification; +import io.split.engine.sse.dtos.OccupancyNotification; +import io.split.engine.sse.dtos.RawMessageNotification; +import io.split.engine.sse.dtos.SegmentChangeNotification; +import io.split.engine.sse.dtos.SplitKillNotification; import io.split.engine.sse.exceptions.EventParsingException; public class NotificationParserImp implements NotificationParser { @@ -13,11 +24,9 @@ public IncomingNotification parseMessage(String payload) throws EventParsingExce RawMessageNotification rawMessageNotification = Json.fromJson(payload, RawMessageNotification.class); GenericNotificationData genericNotificationData = Json.fromJson(rawMessageNotification.getData(), GenericNotificationData.class); genericNotificationData.setChannel(rawMessageNotification.getChannel()); - if (rawMessageNotification.getChannel().contains(OCCUPANCY_PREFIX)) { return parseControlChannelMessage(genericNotificationData); } - return parseNotification(genericNotificationData); } catch (Exception ex) { throw new EventParsingException("Error parsing event.", ex, payload); @@ -28,11 +37,9 @@ public IncomingNotification parseMessage(String payload) throws EventParsingExce public ErrorNotification parseError(String payload) throws EventParsingException { try { ErrorNotification messageError = Json.fromJson(payload, ErrorNotification.class); - if (messageError.getMessage() == null || messageError.getStatusCode() == null) { throw new Exception("Wrong notification format."); } - return messageError; } catch (Exception ex) { throw new EventParsingException("Error parsing event.", ex, payload); @@ -42,7 +49,9 @@ public ErrorNotification parseError(String payload) throws EventParsingException private IncomingNotification parseNotification(GenericNotificationData genericNotificationData) throws Exception { switch (genericNotificationData.getType()) { case SPLIT_UPDATE: - return new SplitChangeNotification(genericNotificationData); + return new CommonChangeNotification(genericNotificationData, Split.class); + case RB_SEGMENT_UPDATE: + return new CommonChangeNotification(genericNotificationData, RuleBasedSegment.class); case SPLIT_KILL: return new SplitKillNotification(genericNotificationData); case SEGMENT_UPDATE: @@ -59,7 +68,6 @@ private IncomingNotification parseControlChannelMessage(GenericNotificationData if (genericNotificationData.getControlType() != null) { return new ControlNotification(genericNotificationData); } - return new OccupancyNotification(genericNotificationData); } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/NotificationProcessor.java b/client/src/main/java/io/split/engine/sse/NotificationProcessor.java index 20f8af7fa..fce86757c 100644 --- a/client/src/main/java/io/split/engine/sse/NotificationProcessor.java +++ b/client/src/main/java/io/split/engine/sse/NotificationProcessor.java @@ -1,12 +1,13 @@ package io.split.engine.sse; import io.split.engine.sse.dtos.IncomingNotification; +import io.split.engine.sse.dtos.SplitKillNotification; import io.split.engine.sse.dtos.StatusNotification; public interface NotificationProcessor { void process(IncomingNotification notification); - void processSplitUpdate(long changeNumber); - void processSplitKill(long changeNumber, String splitName, String defaultTreatment); + void processUpdates(IncomingNotification notification); + void processSplitKill(SplitKillNotification splitKillNotification); void processSegmentUpdate(long changeNumber, String segmentName); void processStatus(StatusNotification statusNotification); -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/NotificationProcessorImp.java b/client/src/main/java/io/split/engine/sse/NotificationProcessorImp.java index c8271c9ec..b833efc31 100644 --- a/client/src/main/java/io/split/engine/sse/NotificationProcessorImp.java +++ b/client/src/main/java/io/split/engine/sse/NotificationProcessorImp.java @@ -1,46 +1,53 @@ package io.split.engine.sse; import com.google.common.annotations.VisibleForTesting; +import io.split.client.dtos.Split; +import io.split.engine.sse.dtos.GenericNotificationData; import io.split.engine.sse.dtos.IncomingNotification; +import io.split.engine.sse.dtos.SplitKillNotification; import io.split.engine.sse.dtos.StatusNotification; import io.split.engine.sse.dtos.SegmentQueueDto; -import io.split.engine.sse.workers.SplitsWorker; +import io.split.engine.sse.dtos.CommonChangeNotification; +import io.split.engine.sse.workers.FeatureFlagsWorker; import io.split.engine.sse.workers.Worker; import static com.google.common.base.Preconditions.checkNotNull; public class NotificationProcessorImp implements NotificationProcessor { - private final SplitsWorker _splitsWorker; + private final FeatureFlagsWorker _featureFlagsWorker; private final Worker _segmentWorker; private final PushStatusTracker _pushStatusTracker; @VisibleForTesting - /* package private */ NotificationProcessorImp(SplitsWorker splitsWorker, + /* package private */ NotificationProcessorImp(FeatureFlagsWorker featureFlagsWorker, Worker segmentWorker, PushStatusTracker pushStatusTracker) { - _splitsWorker = checkNotNull(splitsWorker); + _featureFlagsWorker = checkNotNull(featureFlagsWorker); _segmentWorker = checkNotNull(segmentWorker); _pushStatusTracker = checkNotNull(pushStatusTracker); } - public static NotificationProcessorImp build(SplitsWorker splitsWorker, Worker segmentWorker, PushStatusTracker pushStatusTracker) { - return new NotificationProcessorImp(splitsWorker, segmentWorker, pushStatusTracker); + public static NotificationProcessorImp build(FeatureFlagsWorker featureFlagsWorker, Worker segmentWorker, + PushStatusTracker pushStatusTracker) { + return new NotificationProcessorImp(featureFlagsWorker, segmentWorker, pushStatusTracker); } - @Override - public void process(IncomingNotification notification) { - notification.handler(this); + public void processUpdates(IncomingNotification notification) { + _featureFlagsWorker.addToQueue(notification); } @Override - public void processSplitUpdate(long changeNumber) { - _splitsWorker.addToQueue(changeNumber); + public void process(IncomingNotification notification) { + notification.handler(this); } @Override - public void processSplitKill(long changeNumber, String splitName, String defaultTreatment) { - _splitsWorker.killSplit(changeNumber, splitName, defaultTreatment); - _splitsWorker.addToQueue(changeNumber); + public void processSplitKill(SplitKillNotification splitKillNotification) { + _featureFlagsWorker.kill(splitKillNotification); + _featureFlagsWorker.addToQueue(new CommonChangeNotification<>(GenericNotificationData.builder() + .changeNumber(splitKillNotification.getChangeNumber()) + .channel(splitKillNotification.getChannel()) + .build(), Split.class)); } @Override diff --git a/client/src/main/java/io/split/engine/sse/PushStatusTrackerImp.java b/client/src/main/java/io/split/engine/sse/PushStatusTrackerImp.java index cee18333c..083fc0d37 100644 --- a/client/src/main/java/io/split/engine/sse/PushStatusTrackerImp.java +++ b/client/src/main/java/io/split/engine/sse/PushStatusTrackerImp.java @@ -52,7 +52,8 @@ public void handleSseStatus(SSEClient.StatusMessage newStatus) { case FIRST_EVENT: if (SSEClient.StatusMessage.CONNECTED.equals(_sseStatus.get())) { _statusMessages.offer(PushManager.Status.STREAMING_READY); - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.CONNECTION_ESTABLISHED.getType(),0l, System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.CONNECTION_ESTABLISHED.getType(), + 0l, System.currentTimeMillis())); } case CONNECTED: _sseStatus.compareAndSet(SSEClient.StatusMessage.INITIALIZATION_IN_PROGRESS, SSEClient.StatusMessage.CONNECTED); @@ -98,14 +99,16 @@ public void handleIncomingControlEvent(ControlNotification controlNotification) } break; case STREAMING_PAUSED: - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.STREAMING_STATUS.getType(), StreamEventsEnum.StreamingStatusValues.STREAMING_PAUSED.getValue(), System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.STREAMING_STATUS.getType(), + StreamEventsEnum.StreamingStatusValues.STREAMING_PAUSED.getValue(), System.currentTimeMillis())); if (_backendStatus.compareAndSet(ControlType.STREAMING_RESUMED, ControlType.STREAMING_PAUSED) && _publishersOnline.get()) { // If there are no publishers online, the STREAMING_DOWN message should have already been sent _statusMessages.offer(PushManager.Status.STREAMING_DOWN); } break; case STREAMING_DISABLED: - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.STREAMING_STATUS.getType(), StreamEventsEnum.StreamingStatusValues.STREAMING_DISABLED.getValue(), System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.STREAMING_STATUS.getType(), + StreamEventsEnum.StreamingStatusValues.STREAMING_DISABLED.getValue(), System.currentTimeMillis())); _backendStatus.set(ControlType.STREAMING_DISABLED); _statusMessages.offer(PushManager.Status.STREAMING_OFF); break; @@ -130,7 +133,8 @@ public void handleIncomingOccupancyEvent(OccupancyNotification occupancyNotifica @Override public void handleIncomingAblyError(ErrorNotification notification) { _log.debug(String.format("handleIncomingAblyError: %s", notification.getMessage())); - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.ABLY_ERROR.getType(), notification.getCode(), System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.ABLY_ERROR.getType(), notification.getCode(), + System.currentTimeMillis())); if (_backendStatus.get().equals(ControlType.STREAMING_DISABLED)) { return; // Ignore } @@ -164,10 +168,12 @@ private boolean isPublishers() { private void recordTelemetryOcuppancy(OccupancyNotification occupancyNotification, int publishers) { if (CONTROL_PRI_CHANNEL.equals(occupancyNotification.getChannel())) { - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.OCCUPANCY_PRI.getType(), publishers, System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.OCCUPANCY_PRI.getType(), + publishers, System.currentTimeMillis())); } else if (CONTROL_SEC_CHANNEL.equals(occupancyNotification.getChannel())){ - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.OCCUPANCY_SEC.getType(), publishers, System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.OCCUPANCY_SEC.getType(), + publishers, System.currentTimeMillis())); } } diff --git a/client/src/main/java/io/split/engine/sse/client/SSEClient.java b/client/src/main/java/io/split/engine/sse/client/SSEClient.java index abb21fee5..4b5114062 100644 --- a/client/src/main/java/io/split/engine/sse/client/SSEClient.java +++ b/client/src/main/java/io/split/engine/sse/client/SSEClient.java @@ -1,13 +1,15 @@ package io.split.engine.sse.client; import com.google.common.base.Strings; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import io.split.client.RequestDecorator; +import io.split.client.utils.ApacheRequestDecorator; import io.split.telemetry.domain.StreamingEvent; import io.split.telemetry.domain.enums.StreamEventsEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; import org.apache.hc.client5.http.classic.methods.HttpGet; import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; +import org.apache.hc.core5.io.CloseMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -20,13 +22,16 @@ import java.net.URI; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; import java.util.function.Function; import static com.google.common.base.Preconditions.checkNotNull; +import static io.split.client.utils.SplitExecutorFactory.buildExecutorService; public class SSEClient { @@ -47,12 +52,9 @@ private enum ConnectionState { private final static String SOCKET_CLOSED_MESSAGE = "Socket closed"; private final static String KEEP_ALIVE_PAYLOAD = ":keepalive\n"; private final static long CONNECT_TIMEOUT = 30000; + private static final Lock lock = new ReentrantLock(); private static final Logger _log = LoggerFactory.getLogger(SSEClient.class); - - private final ExecutorService _connectionExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat("SPLIT-SSEConnection-%d") - .build()); + private final ExecutorService _connectionExecutor; private final CloseableHttpClient _client; private final Function _eventCallback; private final Function _statusCallback; @@ -60,69 +62,87 @@ private enum ConnectionState { private final AtomicReference _ongoingResponse = new AtomicReference<>(); private final AtomicReference _ongoingRequest = new AtomicReference<>(); private AtomicBoolean _forcedStop; - + private final RequestDecorator _requestDecorator; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; public SSEClient(Function eventCallback, - Function statusCallback, - CloseableHttpClient client, - TelemetryRuntimeProducer telemetryRuntimeProducer) { + Function statusCallback, + CloseableHttpClient client, + TelemetryRuntimeProducer telemetryRuntimeProducer, + ThreadFactory threadFactory, + RequestDecorator requestDecorator) { _eventCallback = eventCallback; _statusCallback = statusCallback; _client = client; _forcedStop = new AtomicBoolean(); _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); + _connectionExecutor = buildExecutorService(threadFactory, "SPLIT-SSEConnection-%d"); + _requestDecorator = requestDecorator; } - public synchronized boolean open(URI uri) { - if (isOpen()) { - _log.info("SSEClient already open."); - return false; - } + public boolean open(URI uri) { + try { + lock.lock(); + if (isOpen()) { + _log.info("SSEClient already open."); + return false; + } - _statusCallback.apply(StatusMessage.INITIALIZATION_IN_PROGRESS); + _statusCallback.apply(StatusMessage.INITIALIZATION_IN_PROGRESS); - CountDownLatch signal = new CountDownLatch(1); - _connectionExecutor.submit(() -> connectAndLoop(uri, signal)); - try { - if (!signal.await(CONNECT_TIMEOUT, TimeUnit.SECONDS)) { + CountDownLatch signal = new CountDownLatch(1); + _connectionExecutor.submit(() -> connectAndLoop(uri, signal)); + try { + if (!signal.await(CONNECT_TIMEOUT, TimeUnit.SECONDS)) { + return false; + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + if (e.getMessage() == null) { + _log.info("The thread was interrupted while opening SSEClient"); + return false; + } + _log.info(e.getMessage()); return false; } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - _log.info(e.getMessage()); - return false; + return isOpen(); + } finally { + lock.unlock(); } - return isOpen(); } public boolean isOpen() { return (ConnectionState.OPEN.equals(_state.get())); } - public synchronized void close() { - _forcedStop.set(true); - if (_state.compareAndSet(ConnectionState.OPEN, ConnectionState.CLOSED)) { - if (_ongoingResponse.get() != null) { - try { + public void close() { + _log.debug("closing SSE client"); + try { + lock.lock(); + _forcedStop.set(true); + if (_state.compareAndSet(ConnectionState.OPEN, ConnectionState.CLOSED)) { + if (_ongoingResponse.get() != null) { _ongoingRequest.get().abort(); - _ongoingResponse.get().close(); - } catch (IOException e) { - _log.debug(String.format("SSEClient close forced: %s", e.getMessage())); + _ongoingResponse.get().close(CloseMode.IMMEDIATE); } } + } catch (Exception e) { + _log.debug("Exception in closing SSE client: " + e.getMessage()); + } finally { + lock.unlock(); } } private void connectAndLoop(URI uri, CountDownLatch signal) { checkNotNull(uri); checkNotNull(signal); - if (!establishConnection(uri, signal)) { - _statusCallback.apply(StatusMessage.NONRETRYABLE_ERROR); - return; - } try { + if (!establishConnection(uri, signal)) { + _statusCallback.apply(StatusMessage.RETRYABLE_ERROR); + return; + } + final InputStream stream = _ongoingResponse.get().getEntity().getContent(); final BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); @@ -133,36 +153,46 @@ private void connectAndLoop(URI uri, CountDownLatch signal) { _log.debug(exc.getMessage()); if (SOCKET_CLOSED_MESSAGE.equals(exc.getMessage())) { // Connection closed by us _statusCallback.apply(StatusMessage.FORCED_STOP); - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), StreamEventsEnum.SseConnectionErrorValues.REQUESTED_CONNECTION_ERROR.getValue(), System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents( + new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), + StreamEventsEnum.SseConnectionErrorValues.REQUESTED_CONNECTION_ERROR.getValue(), + System.currentTimeMillis())); return; } // Connection closed by server _statusCallback.apply(StatusMessage.RETRYABLE_ERROR); - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), StreamEventsEnum.SseConnectionErrorValues.NON_REQUESTED_CONNECTION_ERROR.getValue(), System.currentTimeMillis())); + _telemetryRuntimeProducer + .recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), + StreamEventsEnum.SseConnectionErrorValues.NON_REQUESTED_CONNECTION_ERROR.getValue(), + System.currentTimeMillis())); return; } catch (IOException exc) { // Other type of connection error - if(!_forcedStop.get()) { + if (!_forcedStop.get()) { _log.debug(String.format("SSE connection ended abruptly: %s. Retying", exc.getMessage())); - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), StreamEventsEnum.SseConnectionErrorValues.REQUESTED_CONNECTION_ERROR.getValue(), System.currentTimeMillis())); + _telemetryRuntimeProducer.recordStreamingEvents( + new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), + StreamEventsEnum.SseConnectionErrorValues.REQUESTED_CONNECTION_ERROR.getValue(), + System.currentTimeMillis())); _statusCallback.apply(StatusMessage.RETRYABLE_ERROR); return; } - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), StreamEventsEnum.SseConnectionErrorValues.NON_REQUESTED_CONNECTION_ERROR.getValue(), System.currentTimeMillis())); + _telemetryRuntimeProducer + .recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), + StreamEventsEnum.SseConnectionErrorValues.NON_REQUESTED_CONNECTION_ERROR.getValue(), + System.currentTimeMillis())); } } } catch (Exception e) { // Any other error non related to the connection disables streaming altogether - - _telemetryRuntimeProducer.recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), StreamEventsEnum.SseConnectionErrorValues.NON_REQUESTED_CONNECTION_ERROR.getValue(), System.currentTimeMillis())); + _log.debug(String.format("SSE connection exception: %s", e.getMessage())); + _telemetryRuntimeProducer + .recordStreamingEvents(new StreamingEvent(StreamEventsEnum.SSE_CONNECTION_ERROR.getType(), + StreamEventsEnum.SseConnectionErrorValues.NON_REQUESTED_CONNECTION_ERROR.getValue(), + System.currentTimeMillis())); _log.warn(e.getMessage(), e); _statusCallback.apply(StatusMessage.NONRETRYABLE_ERROR); } finally { - try { - _ongoingResponse.get().close(); - } catch (IOException e) { - _log.debug(e.getMessage()); - } - + _ongoingResponse.get().close(CloseMode.IMMEDIATE); _state.set(ConnectionState.CLOSED); _log.debug("SSEClient finished."); _forcedStop.set(false); @@ -170,17 +200,20 @@ private void connectAndLoop(URI uri, CountDownLatch signal) { } private boolean establishConnection(URI uri, CountDownLatch signal) { - _ongoingRequest.set(new HttpGet(uri)); - + HttpGet request = new HttpGet(uri); + request = (HttpGet) ApacheRequestDecorator.decorate(request, _requestDecorator); + _ongoingRequest.set(request); try { _ongoingResponse.set(_client.execute(_ongoingRequest.get())); if (_ongoingResponse.get().getCode() != 200) { + _log.error(String.format("Establishing connection, code error: %s. The url is %s", + _ongoingResponse.get().getCode(), uri.toURL())); return false; } _state.set(ConnectionState.OPEN); _statusCallback.apply(StatusMessage.CONNECTED); } catch (IOException exc) { - _log.error(String.format("Error establishConnection: %s", exc)); + _log.error(String.format("Error establishConnection to %s", uri), exc); return false; } finally { signal.countDown(); @@ -211,4 +244,4 @@ private void handleMessage(String message) { RawEvent e = RawEvent.fromString(message); _eventCallback.apply(e); } -} \ No newline at end of file +} diff --git a/client/src/main/java/io/split/engine/sse/dtos/CommonChangeNotification.java b/client/src/main/java/io/split/engine/sse/dtos/CommonChangeNotification.java new file mode 100644 index 000000000..f5d335ae5 --- /dev/null +++ b/client/src/main/java/io/split/engine/sse/dtos/CommonChangeNotification.java @@ -0,0 +1,88 @@ +package io.split.engine.sse.dtos; + +import io.split.client.utils.Json; +import io.split.engine.segments.SegmentSynchronizationTaskImp; +import io.split.engine.sse.NotificationProcessor; +import io.split.engine.sse.enums.CompressType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.zip.DataFormatException; + +import static io.split.engine.sse.utils.DecompressionUtil.gZipDecompress; +import static io.split.engine.sse.utils.DecompressionUtil.zLibDecompress; + +public class CommonChangeNotification extends IncomingNotification { + private static final Logger _log = LoggerFactory.getLogger(SegmentSynchronizationTaskImp.class); + private final long changeNumber; + private long previousChangeNumber; + private CompressType compressType; + private Y definition; + private Class _definitionClass; + + public CommonChangeNotification(GenericNotificationData genericNotificationData, + Class definitionClass) { + super(genericNotificationData.getType(), genericNotificationData.getChannel()); + changeNumber = genericNotificationData.getChangeNumber(); + _definitionClass = definitionClass; + + if(genericNotificationData.getPreviousChangeNumber() != null) { + previousChangeNumber = genericNotificationData.getPreviousChangeNumber(); + } + compressType = CompressType.from(genericNotificationData.getCompressType()); + if (compressType == null || genericNotificationData.getDefinition() == null) { + return; + } + try { + byte[] decodedBytes = Base64.getDecoder().decode(genericNotificationData.getDefinition()); + switch (compressType) { + case GZIP: + decodedBytes = gZipDecompress(decodedBytes); + break; + case ZLIB: + decodedBytes = zLibDecompress(decodedBytes); + break; + } + + updateDefinition(decodedBytes); + } catch (UnsupportedEncodingException | IllegalArgumentException e) { + _log.warn("Could not decode base64 data in definition", e); + } catch (DataFormatException d) { + _log.warn("Could not decompress definition with zlib algorithm", d); + } catch (IOException i) { + _log.warn("Could not decompress definition with gzip algorithm", i); + } + } + + public long getChangeNumber() { + return changeNumber; + } + public long getPreviousChangeNumber() { + return previousChangeNumber; + } + public CompressType getCompressType() { + return compressType; + } + + public Y getDefinition() { + return definition; + } + + @Override + public void handler(NotificationProcessor notificationProcessor) { + notificationProcessor.processUpdates(this); + } + + @Override + public String toString() { + return String.format("Type: %s; Channel: %s; ChangeNumber: %s", getType(), getChannel(), getChangeNumber()); + } + + private void updateDefinition(byte[] decodedBytes) { + definition = (Y) Json.fromJson(new String(decodedBytes, StandardCharsets.UTF_8), _definitionClass); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/dtos/GenericNotificationData.java b/client/src/main/java/io/split/engine/sse/dtos/GenericNotificationData.java index 416ab8375..998434ec9 100644 --- a/client/src/main/java/io/split/engine/sse/dtos/GenericNotificationData.java +++ b/client/src/main/java/io/split/engine/sse/dtos/GenericNotificationData.java @@ -1,5 +1,7 @@ package io.split.engine.sse.dtos; +import com.google.gson.annotations.SerializedName; + public class GenericNotificationData { private final Long changeNumber; private final String defaultTreatment; @@ -9,15 +11,24 @@ public class GenericNotificationData { private final String segmentName; private final IncomingNotification.Type type; private String channel; + @SerializedName("pcn") + private Long previousChangeNumber; + @SerializedName("d") + private String featureFlagDefinition; + @SerializedName("c") + private Integer compressType; - public GenericNotificationData (Long changeNumber, + private GenericNotificationData (Long changeNumber, String defaultTreatment, String splitName, ControlType controlType, OccupancyMetrics occupancyMetrics, String segmentName, IncomingNotification.Type type, - String channel) { + String channel, + Long previousChangeNumber, + String data, + Integer compressType) { this.changeNumber = changeNumber; this.defaultTreatment = defaultTreatment; this.splitName = splitName; @@ -26,6 +37,9 @@ public GenericNotificationData (Long changeNumber, this.segmentName = segmentName; this.type = type; this.channel = channel; + this.previousChangeNumber = previousChangeNumber; + this.featureFlagDefinition = data; + this.compressType = compressType; } public long getChangeNumber() { @@ -57,8 +71,100 @@ public IncomingNotification.Type getType() { } public String getChannel() { return channel; } + public Long getPreviousChangeNumber() { + return previousChangeNumber; + } + + public String getDefinition() { + return featureFlagDefinition; + } + + public Integer getCompressType() { + return compressType; + } public void setChannel(String channel) { this.channel = channel; } -} + + public static GenericNotificationData.Builder builder() { + return new GenericNotificationData.Builder(); + } + + public static final class Builder { + private Long changeNumber; + private String defaultTreatment; + private String featureFlagName; + private ControlType controlType; + private OccupancyMetrics metrics; + private String segmentName; + private IncomingNotification.Type type; + private String channel; + private Long previousChangeNumber; + private String featureFlagDefinition; + private Integer compressType; + + public Builder() { + } + + public Builder changeNumber(Long changeNumber) { + this.changeNumber = changeNumber; + return this; + } + + public Builder defaultTreatment(String defaultTreatment) { + this.defaultTreatment = defaultTreatment; + return this; + } + + public Builder featureFlagName(String featureFlagName) { + this.featureFlagName = featureFlagName; + return this; + } + + public Builder controlType(ControlType controlType) { + this.controlType = controlType; + return this; + } + + public Builder metrics(OccupancyMetrics occupancyMetrics) { + this.metrics = occupancyMetrics; + return this; + } + + public Builder segmentName(String segmentName) { + this.segmentName = segmentName; + return this; + } + + public Builder type(IncomingNotification.Type type) { + this.type = type; + return this; + } + + public Builder channel(String channel) { + this.channel = channel; + return this; + } + + public Builder previousChangeNumber(Long previousChangeNumber) { + this.previousChangeNumber = previousChangeNumber; + return this; + } + + public Builder featureFlagDefinition(String featureFlagDefinition) { + this.featureFlagDefinition = featureFlagDefinition; + return this; + } + + public Builder compressType(Integer compressType) { + this.compressType = compressType; + return this; + } + + public GenericNotificationData build() { + return new GenericNotificationData(changeNumber, defaultTreatment, featureFlagName, controlType, metrics, + segmentName, type, channel, previousChangeNumber, featureFlagDefinition, compressType); + } + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/dtos/IncomingNotification.java b/client/src/main/java/io/split/engine/sse/dtos/IncomingNotification.java index aa476e431..ee00bafe4 100644 --- a/client/src/main/java/io/split/engine/sse/dtos/IncomingNotification.java +++ b/client/src/main/java/io/split/engine/sse/dtos/IncomingNotification.java @@ -5,6 +5,7 @@ public abstract class IncomingNotification { public enum Type { SPLIT_UPDATE, + RB_SEGMENT_UPDATE, SPLIT_KILL, SEGMENT_UPDATE, CONTROL, diff --git a/client/src/main/java/io/split/engine/sse/dtos/RawAuthResponse.java b/client/src/main/java/io/split/engine/sse/dtos/RawAuthResponse.java index 4e0026420..08f21d8a4 100644 --- a/client/src/main/java/io/split/engine/sse/dtos/RawAuthResponse.java +++ b/client/src/main/java/io/split/engine/sse/dtos/RawAuthResponse.java @@ -22,7 +22,7 @@ public RawAuthResponse(boolean pushEnabled, String token) { this.pushEnabled = pushEnabled; this.token = token; - if (token != null && token != "") { + if (token != null && !token.isEmpty()) { String tokenDecoded = decodeJwt(); this.jwt = Json.fromJson(tokenDecoded, Jwt.class); } else { @@ -60,4 +60,4 @@ private String addPrefixControlChannels(String channels) { .replace("control_pri", "[?occupancy=metrics.publishers]control_pri") .replace("control_sec", "[?occupancy=metrics.publishers]control_sec"); } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/dtos/RawMessageNotification.java b/client/src/main/java/io/split/engine/sse/dtos/RawMessageNotification.java index f39bc8b20..9fc5ad6cd 100644 --- a/client/src/main/java/io/split/engine/sse/dtos/RawMessageNotification.java +++ b/client/src/main/java/io/split/engine/sse/dtos/RawMessageNotification.java @@ -1,7 +1,5 @@ package io.split.engine.sse.dtos; -import java.util.Map; - public class RawMessageNotification { private String id; private String clientId; diff --git a/client/src/main/java/io/split/engine/sse/dtos/SegmentChangeNotification.java b/client/src/main/java/io/split/engine/sse/dtos/SegmentChangeNotification.java index de542cfda..7eb2f9122 100644 --- a/client/src/main/java/io/split/engine/sse/dtos/SegmentChangeNotification.java +++ b/client/src/main/java/io/split/engine/sse/dtos/SegmentChangeNotification.java @@ -27,6 +27,7 @@ public void handler(NotificationProcessor notificationProcessor) { @Override public String toString() { - return String.format("Type: %s; Channel: %s; ChangeNumber: %s; SegmentName: %s", getType(), getChannel(), getChangeNumber(), getSegmentName()); + return String.format("Type: %s; Channel: %s; ChangeNumber: %s; SegmentName: %s", getType(), getChannel(), getChangeNumber(), + getSegmentName()); } } diff --git a/client/src/main/java/io/split/engine/sse/dtos/SplitChangeNotification.java b/client/src/main/java/io/split/engine/sse/dtos/SplitChangeNotification.java deleted file mode 100644 index 56b8c32c5..000000000 --- a/client/src/main/java/io/split/engine/sse/dtos/SplitChangeNotification.java +++ /dev/null @@ -1,26 +0,0 @@ -package io.split.engine.sse.dtos; - -import io.split.engine.sse.NotificationProcessor; - -public class SplitChangeNotification extends IncomingNotification { - private final long changeNumber; - - public SplitChangeNotification(GenericNotificationData genericNotificationData) { - super(Type.SPLIT_UPDATE, genericNotificationData.getChannel()); - this.changeNumber = genericNotificationData.getChangeNumber(); - } - - public long getChangeNumber() { - return changeNumber; - } - - @Override - public void handler(NotificationProcessor notificationProcessor) { - notificationProcessor.processSplitUpdate(getChangeNumber()); - } - - @Override - public String toString() { - return String.format("Type: %s; Channel: %s; ChangeNumber: %s", getType(), getChannel(), getChangeNumber()); - } -} diff --git a/client/src/main/java/io/split/engine/sse/dtos/SplitKillNotification.java b/client/src/main/java/io/split/engine/sse/dtos/SplitKillNotification.java index ed4700352..c5a02a662 100644 --- a/client/src/main/java/io/split/engine/sse/dtos/SplitKillNotification.java +++ b/client/src/main/java/io/split/engine/sse/dtos/SplitKillNotification.java @@ -28,11 +28,12 @@ public String getSplitName() { @Override public void handler(NotificationProcessor notificationProcessor) { - notificationProcessor.processSplitKill(getChangeNumber(), getSplitName(), getDefaultTreatment()); + notificationProcessor.processSplitKill(this); } @Override public String toString() { - return String.format("Type: %s; Channel: %s; ChangeNumber: %s; DefaultTreatment: %s; SplitName: %s", getType(), getChannel(), getChangeNumber(), getDefaultTreatment(), getSplitName()); + return String.format("Type: %s; Channel: %s; ChangeNumber: %s; DefaultTreatment: %s; SplitName: %s", getType(), getChannel(), + getChangeNumber(), getDefaultTreatment(), getSplitName()); } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/enums/CompressType.java b/client/src/main/java/io/split/engine/sse/enums/CompressType.java new file mode 100644 index 000000000..ed78a33b8 --- /dev/null +++ b/client/src/main/java/io/split/engine/sse/enums/CompressType.java @@ -0,0 +1,39 @@ +package io.split.engine.sse.enums; + +import java.util.HashMap; +import java.util.Map; + +public enum CompressType { + NOT_COMPRESSED(0), + GZIP(1), + ZLIB(2); + + private final Integer value; + + CompressType(Integer value) { + this.value = value; + } + + public long getValue() { + return value; + } + + // Mapping compress type to compress type id + private static final Map _map = new HashMap<>(); + static { + for (CompressType compressType : CompressType.values()) + _map.put(compressType.value, compressType); + } + + /** + * Get compress type from value + * @param value value + * @return CompressType + */ + public static CompressType from(Integer value) { + if (value == null || _map.size() <= value){ + return null; + } + return _map.get(value); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/utils/DecompressionUtil.java b/client/src/main/java/io/split/engine/sse/utils/DecompressionUtil.java new file mode 100644 index 000000000..522425e45 --- /dev/null +++ b/client/src/main/java/io/split/engine/sse/utils/DecompressionUtil.java @@ -0,0 +1,42 @@ +package io.split.engine.sse.utils; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.zip.DataFormatException; +import java.util.zip.GZIPInputStream; +import java.util.zip.Inflater; + +public class DecompressionUtil { + + public static byte[] zLibDecompress(byte[] toDecompress) throws DataFormatException { + ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(toDecompress.length); + Inflater decompressor = new Inflater(); + try { + decompressor.setInput(toDecompress); + final byte[] buf = new byte[toDecompress.length]; + while (!decompressor.finished()) { + int count = decompressor.inflate(buf); + byteArrayOutputStream.write(buf, 0, count); + } + } finally { + decompressor.end(); + } + return byteArrayOutputStream.toByteArray(); + } + + public static byte[] gZipDecompress(byte[] toDecompress) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + try (GZIPInputStream gzipInputStream = new GZIPInputStream(new ByteArrayInputStream(toDecompress))){ + int res = 0; + byte buf[] = new byte[toDecompress.length]; + while (res >= 0) { + res = gzipInputStream.read(buf, 0, buf.length); + if (res > 0) { + out.write(buf, 0, res); + } + } + } + return out.toByteArray(); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/workers/FeatureFlagWorkerImp.java b/client/src/main/java/io/split/engine/sse/workers/FeatureFlagWorkerImp.java new file mode 100644 index 000000000..d15d2a438 --- /dev/null +++ b/client/src/main/java/io/split/engine/sse/workers/FeatureFlagWorkerImp.java @@ -0,0 +1,136 @@ +package io.split.engine.sse.workers; + +import io.split.client.dtos.RuleBasedSegment; +import io.split.client.dtos.Split; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.utils.FeatureFlagsToUpdate; +import io.split.client.utils.RuleBasedSegmentsToUpdate; +import io.split.engine.common.Synchronizer; +import io.split.engine.experiments.RuleBasedSegmentParser; +import io.split.engine.experiments.SplitParser; +import io.split.engine.sse.dtos.CommonChangeNotification; +import io.split.engine.sse.dtos.IncomingNotification; +import io.split.engine.sse.dtos.SplitKillNotification; +import io.split.storages.RuleBasedSegmentCache; +import io.split.storages.SplitCacheProducer; +import io.split.telemetry.domain.enums.UpdatesFromSSEEnum; +import io.split.telemetry.storage.TelemetryRuntimeProducer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collections; +import java.util.Set; + +import static com.google.common.base.Preconditions.checkNotNull; +import static io.split.client.utils.FeatureFlagProcessor.processFeatureFlagChanges; +import static io.split.client.utils.RuleBasedSegmentProcessor.processRuleBasedSegmentChanges; + +public class FeatureFlagWorkerImp extends Worker implements FeatureFlagsWorker { + private static final Logger _log = LoggerFactory.getLogger(FeatureFlagWorkerImp.class); + private final Synchronizer _synchronizer; + private final SplitParser _splitParser; + private final RuleBasedSegmentParser _ruleBasedSegmentParser; + private final SplitCacheProducer _splitCacheProducer; + private final RuleBasedSegmentCache _ruleBasedSegmentCache; + private final TelemetryRuntimeProducer _telemetryRuntimeProducer; + private final FlagSetsFilter _flagSetsFilter; + + public FeatureFlagWorkerImp(Synchronizer synchronizer, SplitParser splitParser, RuleBasedSegmentParser ruleBasedSegmentParser, + SplitCacheProducer splitCacheProducer, + RuleBasedSegmentCache ruleBasedSegmentCache, + TelemetryRuntimeProducer telemetryRuntimeProducer, FlagSetsFilter flagSetsFilter) { + super("Feature flags"); + _synchronizer = checkNotNull(synchronizer); + _splitParser = splitParser; + _ruleBasedSegmentParser = ruleBasedSegmentParser; + _splitCacheProducer = splitCacheProducer; + _telemetryRuntimeProducer = telemetryRuntimeProducer; + _flagSetsFilter = flagSetsFilter; + _ruleBasedSegmentCache = ruleBasedSegmentCache; + } + + @Override + public void kill(SplitKillNotification splitKillNotification) { + try { + _synchronizer.localKillSplit(splitKillNotification); + _log.debug(String.format("Kill feature flag: %s, changeNumber: %s, defaultTreatment: %s", splitKillNotification.getSplitName(), + splitKillNotification.getChangeNumber(), splitKillNotification.getDefaultTreatment())); + } catch (Exception ex) { + _log.warn(String.format("Exception on FeatureFlagWorker kill: %s", ex.getMessage())); + } + } + + @Override + protected void executeRefresh(IncomingNotification incomingNotification) { + boolean success; + long changeNumber = 0L; + long changeNumberRBS = 0L; + if (incomingNotification.getType() == IncomingNotification.Type.SPLIT_UPDATE) { + CommonChangeNotification featureFlagChangeNotification = (CommonChangeNotification) incomingNotification; + success = addOrUpdateFeatureFlag(featureFlagChangeNotification); + changeNumber = featureFlagChangeNotification.getChangeNumber(); + } else { + CommonChangeNotification ruleBasedSegmentChangeNotification = (CommonChangeNotification) incomingNotification; + success = addOrUpdateRuleBasedSegment(ruleBasedSegmentChangeNotification); + changeNumberRBS = ruleBasedSegmentChangeNotification.getChangeNumber(); + } + if (!success) + _synchronizer.refreshSplits(changeNumber, changeNumberRBS); + } + + private boolean addOrUpdateRuleBasedSegment(CommonChangeNotification ruleBasedSegmentChangeNotification) { + if (ruleBasedSegmentChangeNotification.getChangeNumber() <= _ruleBasedSegmentCache.getChangeNumber()) { + return true; + } + try { + if (ruleBasedSegmentChangeNotification.getDefinition() != null && + ruleBasedSegmentChangeNotification.getPreviousChangeNumber() == _ruleBasedSegmentCache.getChangeNumber()) { + RuleBasedSegment ruleBasedSegment = (RuleBasedSegment) ruleBasedSegmentChangeNotification.getDefinition(); + RuleBasedSegmentsToUpdate ruleBasedSegmentsToUpdate = processRuleBasedSegmentChanges(_ruleBasedSegmentParser, + Collections.singletonList(ruleBasedSegment)); + _ruleBasedSegmentCache.update(ruleBasedSegmentsToUpdate.getToAdd(), ruleBasedSegmentsToUpdate.getToRemove(), + ruleBasedSegmentChangeNotification.getChangeNumber()); + Set segments = ruleBasedSegmentsToUpdate.getSegments(); + for (String segmentName: segments) { + _synchronizer.forceRefreshSegment(segmentName); + } + // TODO: Add Telemetry once it is spec'd +// _telemetryRuntimeProducer.recordUpdatesFromSSE(UpdatesFromSSEEnum.RULE_BASED_SEGMENTS); + return true; + } + } catch (Exception e) { + _log.warn("Something went wrong processing a Rule based Segment notification", e); + } + return false; + } + private boolean addOrUpdateFeatureFlag(CommonChangeNotification featureFlagChangeNotification) { + if (featureFlagChangeNotification.getChangeNumber() <= _splitCacheProducer.getChangeNumber()) { + return true; + } + try { + if (featureFlagChangeNotification.getDefinition() != null && + featureFlagChangeNotification.getPreviousChangeNumber() == _splitCacheProducer.getChangeNumber()) { + Split featureFlag = (Split) featureFlagChangeNotification.getDefinition(); + FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(_splitParser, Collections.singletonList(featureFlag), + _flagSetsFilter); + _splitCacheProducer.update(featureFlagsToUpdate.getToAdd(), featureFlagsToUpdate.getToRemove(), + featureFlagChangeNotification.getChangeNumber()); + Set segments = featureFlagsToUpdate.getSegments(); + for (String segmentName: segments) { + _synchronizer.forceRefreshSegment(segmentName); + } + if (featureFlagsToUpdate.getToAdd().stream().count() > 0) { + Set ruleBasedSegments = featureFlagsToUpdate.getToAdd().get(0).getRuleBasedSegmentsNames(); + if (!ruleBasedSegments.isEmpty() && !_ruleBasedSegmentCache.contains(ruleBasedSegments)) { + return false; + } + } + _telemetryRuntimeProducer.recordUpdatesFromSSE(UpdatesFromSSEEnum.SPLITS); + return true; + } + } catch (Exception e) { + _log.warn("Something went wrong processing a Feature Flag notification", e); + } + return false; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/workers/FeatureFlagsWorker.java b/client/src/main/java/io/split/engine/sse/workers/FeatureFlagsWorker.java new file mode 100644 index 000000000..b2cc1fbbc --- /dev/null +++ b/client/src/main/java/io/split/engine/sse/workers/FeatureFlagsWorker.java @@ -0,0 +1,11 @@ +package io.split.engine.sse.workers; + +import io.split.engine.sse.dtos.IncomingNotification; +import io.split.engine.sse.dtos.SplitKillNotification; + +public interface FeatureFlagsWorker { + void addToQueue(IncomingNotification incomingNotification); + void start(); + void stop(); + void kill(SplitKillNotification splitKillNotification); +} \ No newline at end of file diff --git a/client/src/main/java/io/split/engine/sse/workers/SplitsWorker.java b/client/src/main/java/io/split/engine/sse/workers/SplitsWorker.java deleted file mode 100644 index 3664b7cd4..000000000 --- a/client/src/main/java/io/split/engine/sse/workers/SplitsWorker.java +++ /dev/null @@ -1,8 +0,0 @@ -package io.split.engine.sse.workers; - -public interface SplitsWorker { - void addToQueue(Long element); - void start(); - void stop(); - void killSplit(long changeNumber, String splitName, String defaultTreatment); -} diff --git a/client/src/main/java/io/split/engine/sse/workers/SplitsWorkerImp.java b/client/src/main/java/io/split/engine/sse/workers/SplitsWorkerImp.java deleted file mode 100644 index 16e155ed7..000000000 --- a/client/src/main/java/io/split/engine/sse/workers/SplitsWorkerImp.java +++ /dev/null @@ -1,29 +0,0 @@ -package io.split.engine.sse.workers; - -import io.split.engine.common.Synchronizer; - -import static com.google.common.base.Preconditions.checkNotNull; - -public class SplitsWorkerImp extends Worker implements SplitsWorker { - private final Synchronizer _synchronizer; - - public SplitsWorkerImp(Synchronizer synchronizer) { - super("Splits"); - _synchronizer = checkNotNull(synchronizer); - } - - @Override - public void killSplit(long changeNumber, String splitName, String defaultTreatment) { - try { - _synchronizer.localKillSplit(splitName, defaultTreatment, changeNumber); - _log.debug(String.format("Kill split: %s, changeNumber: %s, defaultTreatment: %s", splitName, changeNumber, defaultTreatment)); - } catch (Exception ex) { - _log.warn(String.format("Exception on SplitWorker killSplit: %s", ex.getMessage())); - } - } - - @Override - protected void executeRefresh(Long changeNumber) { - _synchronizer.refreshSplits(changeNumber); - } -} diff --git a/client/src/main/java/io/split/engine/sse/workers/Worker.java b/client/src/main/java/io/split/engine/sse/workers/Worker.java index 7d2dd21ab..b5ba84f24 100644 --- a/client/src/main/java/io/split/engine/sse/workers/Worker.java +++ b/client/src/main/java/io/split/engine/sse/workers/Worker.java @@ -22,11 +22,11 @@ public Worker(String workerName) { public void start() { if (_running.compareAndSet(false, true)) { - _log.debug(String.format("%s Worker starting ...", _workerName)); _queue.clear(); _thread = new Thread( this); _thread.setName(String.format("%s-worker", _workerName)); _thread.start(); + _log.debug(String.format("%s Worker started ...", _workerName)); } else { _log.debug(String.format("%s Worker already running.", _workerName)); return; diff --git a/client/src/main/java/io/split/inputValidation/FallbackTreatmentValidator.java b/client/src/main/java/io/split/inputValidation/FallbackTreatmentValidator.java new file mode 100644 index 000000000..9fafe5eea --- /dev/null +++ b/client/src/main/java/io/split/inputValidation/FallbackTreatmentValidator.java @@ -0,0 +1,67 @@ +package io.split.inputValidation; + +import io.split.client.dtos.FallbackTreatment; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.regex.Pattern; + +import static io.split.inputValidation.SplitNameValidator.isValid; + +public class FallbackTreatmentValidator { + private static final Logger _log = LoggerFactory.getLogger(FallbackTreatmentValidator.class); + private static final Pattern TREATMENT_MATCHER = Pattern.compile("^[0-9]+[.a-zA-Z0-9_-]*$|^[a-zA-Z]+[a-zA-Z0-9_-]*$"); + private static final int MAX_LENGTH = 100; + + public static String isValidTreatment(String name, String method) { + if (name == null) { + _log.error(String.format("%s: you passed a null treatment, fallback treatment must be a non-empty string", method)); + return null; + } + + if (name.isEmpty()) { + _log.error(String.format("%s: you passed an empty treatment, fallback treatment must be a non-empty string", method)); + return null; + } + + String trimmed = name.trim(); + if (!trimmed.equals(name)) { + _log.warn(String.format("%s: fallback treatment %s has extra whitespace, trimming", method, name)); + name = trimmed; + } + + if (name.length() > MAX_LENGTH) { + return null; + } + + if (!TREATMENT_MATCHER.matcher(name).find()) { + _log.error(String.format("%s: you passed %s, treatment must adhere to the regular expression " + + "^[0-9]+[.a-zA-Z0-9_-]*$|^[a-zA-Z]+[a-zA-Z0-9_-]*$", method, name)); + return null; + } + + return name; + } + + public static Map isValidByFlagTreatment(Map byFlagTreatment, String method) { + Map result = new HashMap<>(); + for (Map.Entry entry : byFlagTreatment.entrySet()) { + Optional featureName = isValid(entry.getKey(), method); + if (featureName.equals(Optional.empty()) || !featureName.isPresent()) { + continue; + } + + FallbackTreatment fallbackTreatment = entry.getValue(); + String treatment = isValidTreatment(fallbackTreatment.getTreatment(), method); + if (treatment != null) { + result.put(featureName.get(), new FallbackTreatment(treatment, fallbackTreatment.getConfig())); + } + } + + return result; + } +} diff --git a/client/src/main/java/io/split/inputValidation/FlagSetsValidResult.java b/client/src/main/java/io/split/inputValidation/FlagSetsValidResult.java new file mode 100644 index 000000000..a17cb7b38 --- /dev/null +++ b/client/src/main/java/io/split/inputValidation/FlagSetsValidResult.java @@ -0,0 +1,21 @@ +package io.split.inputValidation; + +import java.util.HashSet; + +public class FlagSetsValidResult { + private final Boolean _valid; + private final HashSet _flagSets; + + public FlagSetsValidResult(Boolean valid, HashSet flagSets) { + _valid = valid; + _flagSets = flagSets; + } + + public Boolean getValid() { + return _valid; + } + + public HashSet getFlagSets() { + return _flagSets; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/inputValidation/FlagSetsValidator.java b/client/src/main/java/io/split/inputValidation/FlagSetsValidator.java new file mode 100644 index 000000000..6b6df2d88 --- /dev/null +++ b/client/src/main/java/io/split/inputValidation/FlagSetsValidator.java @@ -0,0 +1,44 @@ +package io.split.inputValidation; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Set; +import java.util.TreeSet; +import java.util.regex.Pattern; + +public final class FlagSetsValidator { + + private static final String FLAG_SET_REGEX = "^[a-z0-9][_a-z0-9]{0,49}$"; + private static final Logger _log = LoggerFactory.getLogger(FlagSetsValidator.class); + + private FlagSetsValidator() { + throw new IllegalStateException("Utility class"); + } + + public static Set cleanup(List flagSets) { + TreeSet cleanFlagSets = new TreeSet<>(); + if (flagSets == null || flagSets.isEmpty()) { + return cleanFlagSets; + } + for (String flagSet: flagSets) { + if(!flagSet.equals(flagSet.toLowerCase())) { + _log.warn(String.format("Flag Set name %s should be all lowercase - converting string to lowercase", flagSet)); + flagSet = flagSet.toLowerCase(); + } + if (!flagSet.equals(flagSet.trim())) { + _log.warn(String.format("Flag Set name %s has extra whitespace, trimming", flagSet)); + flagSet = flagSet.trim(); + } + if (!Pattern.matches(FLAG_SET_REGEX, flagSet)) { + _log.warn(String.format("you passed %s, Flag Set must adhere to the regular expressions %s. This means a Flag Set must be " + + "start with a letter or number, be in lowercase, alphanumeric and have a max length of 50 characters. %s was discarded.", + flagSet, FLAG_SET_REGEX, flagSet)); + continue; + } + cleanFlagSets.add(flagSet); + } + return cleanFlagSets; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/inputValidation/ImpressionPropertiesValidator.java b/client/src/main/java/io/split/inputValidation/ImpressionPropertiesValidator.java new file mode 100644 index 000000000..3fca11635 --- /dev/null +++ b/client/src/main/java/io/split/inputValidation/ImpressionPropertiesValidator.java @@ -0,0 +1,21 @@ +package io.split.inputValidation; + +import java.util.Map; + +public class ImpressionPropertiesValidator { + ImpressionPropertiesValidator() { + throw new IllegalStateException("Utility class"); + } + + public static ImpressionPropertiesValidatorResult propertiesAreValid(Map properties) { + EventsValidator.EventValidatorResult result = EventsValidator.propertiesAreValid(properties); + return new ImpressionPropertiesValidatorResult(result.getSuccess(), result.getEventSize(), result.getValue()); + } + + public static class ImpressionPropertiesValidatorResult extends EventsValidator.EventValidatorResult { + public ImpressionPropertiesValidatorResult(boolean success, int eventSize, Map value) { + super(success, eventSize, value); + } + } +} + diff --git a/client/src/main/java/io/split/inputValidation/SplitNameValidator.java b/client/src/main/java/io/split/inputValidation/SplitNameValidator.java index a2f784349..f138f51c1 100644 --- a/client/src/main/java/io/split/inputValidation/SplitNameValidator.java +++ b/client/src/main/java/io/split/inputValidation/SplitNameValidator.java @@ -6,33 +6,46 @@ import java.util.List; import java.util.Objects; import java.util.Optional; +import java.util.regex.Pattern; import java.util.stream.Collectors; public class SplitNameValidator { private static final Logger _log = LoggerFactory.getLogger(SplitNameValidator.class); + private static final int MAX_LENGTH = 100; + private static final Pattern NAME_MATCHER = Pattern.compile("^[0-9]+[.a-zA-Z0-9_-]*$|^[a-zA-Z]+[a-zA-Z0-9_-]*$"); public static Optional isValid(String name, String method) { if (name == null) { - _log.error(String.format("%s: you passed a null split name, split name must be a non-empty string", method)); + _log.error(String.format("%s: you passed a null feature flag name, feature flag name must be a non-empty string", method)); return Optional.empty(); } if (name.isEmpty()) { - _log.error(String.format("%s: you passed an empty split name, split name must be a non-empty string", method)); + _log.error(String.format("%s: you passed an empty feature flag name, feature flag name must be a non-empty string", method)); return Optional.empty(); } String trimmed = name.trim(); if (!trimmed.equals(name)) { - _log.warn(String.format("%s: split name %s has extra whitespace, trimming", method, name)); + _log.warn(String.format("%s: feature flag name %s has extra whitespace, trimming", method, name)); name = trimmed; } + if (name.length() > MAX_LENGTH) { + return Optional.empty(); + } + + if (!NAME_MATCHER.matcher(name).find()) { + _log.error(String.format("%s: you passed %s, feature flag name must adhere to the regular expression " + + "^[0-9]+[.a-zA-Z0-9_-]*$|^[a-zA-Z]+[a-zA-Z0-9_-]*$", method, name)); + return Optional.empty(); + } + return Optional.of(name); } - public static List areValid(List splits, String method) { - return splits.stream().distinct() + public static List areValid(List featureFlags, String method) { + return featureFlags.stream().distinct() .map(s -> isValid(s, method).orElse(null)) .filter(Objects::nonNull) .collect(Collectors.toList()); diff --git a/client/src/main/java/io/split/inputValidation/TrafficTypeValidator.java b/client/src/main/java/io/split/inputValidation/TrafficTypeValidator.java index a923a59c1..6533011a6 100644 --- a/client/src/main/java/io/split/inputValidation/TrafficTypeValidator.java +++ b/client/src/main/java/io/split/inputValidation/TrafficTypeValidator.java @@ -26,8 +26,8 @@ public static Optional isValid(String trafficTypeName, SplitCacheConsume } if (!splitCacheConsumer.trafficTypeExists(trafficTypeName)) { - _log.warn(String.format("%s: Traffic Type %s does not have any corresponding Splits in this environment, " + - "make sure you’re tracking your events to a valid traffic type defined in the Split console.", method, trafficTypeName)); + _log.warn(String.format("%s: Traffic Type %s does not have any corresponding Feature flags in this environment, " + + "make sure you’re tracking your events to a valid traffic type defined in the Split user interface.", method, trafficTypeName)); } return Optional.of(trafficTypeName); diff --git a/client/src/main/java/io/split/integrations/IntegrationsConfig.java b/client/src/main/java/io/split/integrations/IntegrationsConfig.java index b13f6f114..1a0fa4846 100644 --- a/client/src/main/java/io/split/integrations/IntegrationsConfig.java +++ b/client/src/main/java/io/split/integrations/IntegrationsConfig.java @@ -51,10 +51,14 @@ public Builder() { } public Builder impressionsListener(ImpressionListener listener, int queueSize) { + return impressionsListener(listener, queueSize, Execution.ASYNC); + } + + public Builder impressionsListener(ImpressionListener listener, int queueSize, Execution executionType) { if (queueSize <= 0) { throw new IllegalArgumentException("An ImpressionListener was provided, but its capacity was non-positive: " + queueSize); } - _listeners.add(new ImpressionListenerWithMeta(listener, Execution.ASYNC, queueSize)); + _listeners.add(new ImpressionListenerWithMeta(listener, executionType, queueSize)); return this; } diff --git a/client/src/main/java/io/split/service/CustomHttpModule.java b/client/src/main/java/io/split/service/CustomHttpModule.java new file mode 100644 index 000000000..001648fb3 --- /dev/null +++ b/client/src/main/java/io/split/service/CustomHttpModule.java @@ -0,0 +1,11 @@ +package io.split.service; + +import io.split.client.RequestDecorator; +import io.split.client.utils.SDKMetadata; + +import java.io.IOException; + +public interface CustomHttpModule { + public SplitHttpClient createClient(String apiToken, SDKMetadata sdkMetadata, RequestDecorator decorator) + throws IOException; +} diff --git a/client/src/main/java/io/split/service/HttpPostImp.java b/client/src/main/java/io/split/service/HttpPostImp.java index 2e0212630..b33bf2103 100644 --- a/client/src/main/java/io/split/service/HttpPostImp.java +++ b/client/src/main/java/io/split/service/HttpPostImp.java @@ -1,50 +1,46 @@ package io.split.service; -import io.split.client.utils.Utils; -import io.split.telemetry.domain.enums.HTTPLatenciesEnum; +import io.split.client.dtos.SplitHttpResponse; +import io.split.client.utils.Json; import io.split.telemetry.domain.enums.HttpParamsWrapper; -import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; -import io.split.telemetry.domain.enums.ResourceEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; -import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; -import org.apache.hc.core5.http.HttpEntity; import org.apache.hc.core5.http.HttpStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hc.client5.http.classic.methods.HttpPost; import java.net.URI; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static com.google.common.base.Preconditions.checkNotNull; public class HttpPostImp { private static final Logger _logger = LoggerFactory.getLogger(HttpPostImp.class); - private CloseableHttpClient _client; + private SplitHttpClient _client; private final TelemetryRuntimeProducer _telemetryRuntimeProducer; - public HttpPostImp(CloseableHttpClient client, TelemetryRuntimeProducer telemetryRuntimeProducer) { + public HttpPostImp(SplitHttpClient client, TelemetryRuntimeProducer telemetryRuntimeProducer) { _client = client; _telemetryRuntimeProducer = checkNotNull(telemetryRuntimeProducer); } public void post(URI uri, Object object, String posted, HttpParamsWrapper httpParamsWrapper) { long initTime = System.currentTimeMillis(); - HttpEntity entity = Utils.toJsonEntity(object); - HttpPost request = new HttpPost(uri); - request.setEntity(entity); - try (CloseableHttpResponse response = _client.execute(request)) { - - int status = response.getCode(); - - if (status < HttpStatus.SC_OK || status >= HttpStatus.SC_MULTIPLE_CHOICES) { - _telemetryRuntimeProducer.recordSyncError(httpParamsWrapper.getResourceEnum(), status); - _logger.warn("Response status was: " + status); + try { + Map> headers = new HashMap<>(); + headers.put("Content-Type", Collections.singletonList("application/json")); + SplitHttpResponse response = _client.post(uri, Json.toJson(object), headers); + if (response.statusCode() < HttpStatus.SC_OK || response.statusCode() >= HttpStatus.SC_MULTIPLE_CHOICES) { + _telemetryRuntimeProducer.recordSyncError(httpParamsWrapper.getResourceEnum(), response.statusCode()); return; } - _telemetryRuntimeProducer.recordSyncLatency(httpParamsWrapper.getHttpLatenciesEnum(), System.currentTimeMillis() - initTime); - _telemetryRuntimeProducer.recordSuccessfulSync(httpParamsWrapper.getLastSynchronizationRecordsEnum(), System.currentTimeMillis()); + _telemetryRuntimeProducer.recordSyncLatency(httpParamsWrapper.getHttpLatenciesEnum(), + System.currentTimeMillis() - initTime); + _telemetryRuntimeProducer.recordSuccessfulSync(httpParamsWrapper.getLastSynchronizationRecordsEnum(), + System.currentTimeMillis()); } catch (Throwable t) { _logger.warn("Exception when posting " + posted + object, t); } diff --git a/client/src/main/java/io/split/service/SplitHttpClient.java b/client/src/main/java/io/split/service/SplitHttpClient.java new file mode 100644 index 000000000..899fcf56b --- /dev/null +++ b/client/src/main/java/io/split/service/SplitHttpClient.java @@ -0,0 +1,33 @@ +package io.split.service; + +import io.split.engine.common.FetchOptions; +import io.split.client.dtos.SplitHttpResponse; + +import java.io.Closeable; +import java.io.IOException; +import java.net.URI; +import java.util.List; +import java.util.Map; + +public interface SplitHttpClient extends Closeable { + /** + * Wrapper for HTTP get method + * + * @param uri the URL to be used + * @param options The FetchOptions object that contains headers. + * @return The response structure SplitHttpResponse + */ + public SplitHttpResponse get(URI uri, FetchOptions options, Map> additionalHeaders); + + /** + * Wrapper for HTTP post method + * + * @param uri the URL to be used + * @param entity HttpEntity object that has The body load + * @param additionalHeaders Any additional headers to be added. + * @return The response structure SplitHttpResponse + */ + public SplitHttpResponse post(URI uri, + String entity, + Map> additionalHeaders) throws IOException; +} diff --git a/client/src/main/java/io/split/service/SplitHttpClientImpl.java b/client/src/main/java/io/split/service/SplitHttpClientImpl.java new file mode 100644 index 000000000..7d0939777 --- /dev/null +++ b/client/src/main/java/io/split/service/SplitHttpClientImpl.java @@ -0,0 +1,171 @@ +package io.split.service; + +import io.split.client.RequestDecorator; +import io.split.client.utils.ApacheRequestDecorator; +import io.split.client.utils.SDKMetadata; +import io.split.client.utils.Utils; +import io.split.engine.common.FetchOptions; +import io.split.client.dtos.SplitHttpResponse; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpRequest; +import org.apache.hc.core5.http.HttpStatus; +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.apache.hc.core5.http.io.entity.HttpEntities; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public final class SplitHttpClientImpl implements SplitHttpClient { + + private static final Logger _log = LoggerFactory.getLogger(SplitHttpClient.class); + private static final String HEADER_CACHE_CONTROL_NAME = "Cache-Control"; + private static final String HEADER_CACHE_CONTROL_VALUE = "no-cache"; + private static final String HEADER_API_KEY = "Authorization"; + private static final String HEADER_CLIENT_KEY = "SplitSDKClientKey"; + private static final String HEADER_CLIENT_MACHINE_NAME = "SplitSDKMachineName"; + private static final String HEADER_CLIENT_MACHINE_IP = "SplitSDKMachineIP"; + private static final String HEADER_CLIENT_VERSION = "SplitSDKVersion"; + + private final CloseableHttpClient _client; + private final RequestDecorator _requestDecorator; + private final String _apikey; + private final SDKMetadata _metadata; + + public static SplitHttpClientImpl create(CloseableHttpClient client, + RequestDecorator requestDecorator, + String apikey, + SDKMetadata metadata) throws URISyntaxException { + return new SplitHttpClientImpl(client, requestDecorator, apikey, metadata); + } + + private SplitHttpClientImpl(CloseableHttpClient client, + RequestDecorator requestDecorator, + String apikey, + SDKMetadata metadata) { + _client = client; + _requestDecorator = requestDecorator; + _apikey = apikey; + _metadata = metadata; + } + + public SplitHttpResponse get(URI uri, FetchOptions options, Map> additionalHeaders) { + CloseableHttpResponse response = null; + + try { + HttpGet request = new HttpGet(uri); + setBasicHeaders(request); + if (additionalHeaders != null) { + for (Map.Entry> entry : additionalHeaders.entrySet()) { + for (String value : entry.getValue()) { + request.addHeader(entry.getKey(), value); + } + } + } + if (options.cacheControlHeadersEnabled()) { + request.setHeader(HEADER_CACHE_CONTROL_NAME, HEADER_CACHE_CONTROL_VALUE); + } + + request = (HttpGet) ApacheRequestDecorator.decorate(request, _requestDecorator); + + response = _client.execute(request); + + if (_log.isDebugEnabled()) { + _log.debug(String.format("[%s] %s. Status code: %s", request.getMethod(), uri.toURL(), + response.getCode())); + } + + String statusMessage = ""; + int code = response.getCode(); + if (code < HttpStatus.SC_OK || code >= HttpStatus.SC_MULTIPLE_CHOICES) { + statusMessage = response.getReasonPhrase(); + _log.warn(String.format("Response status was: %s. Reason: %s", code, statusMessage)); + } + + String body = extractBodyFromResponse(response); + + return new SplitHttpResponse(code, + statusMessage, + body, + Arrays.stream(response.getHeaders()).map( + h -> new SplitHttpResponse.Header(h.getName(), Collections.singletonList(h.getValue()))) + .collect(Collectors.toList())); + } catch (Exception e) { + throw new IllegalStateException(String.format("Problem in http get operation: %s", e), e); + } finally { + Utils.forceClose(response); + } + } + + private String extractBodyFromResponse(CloseableHttpResponse response) { + String body = ""; + try { + body = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); + } catch (Exception e) { + _log.warn("Error parsing Response.body", e); + } + return body; + } + + public SplitHttpResponse post(URI uri, String body, Map> additionalHeaders) + throws IOException { + + CloseableHttpResponse response = null; + try { + HttpPost request = new HttpPost(uri); + setBasicHeaders(request); + if (additionalHeaders != null) { + for (Map.Entry> entry : additionalHeaders.entrySet()) { + for (String value : entry.getValue()) { + request.addHeader(entry.getKey(), value); + } + } + } + request.setEntity(HttpEntities.create(body, ContentType.APPLICATION_JSON)); + request = (HttpPost) ApacheRequestDecorator.decorate(request, _requestDecorator); + + response = _client.execute(request); + + String statusMessage = ""; + if (response.getCode() < HttpStatus.SC_OK || response.getCode() >= HttpStatus.SC_MULTIPLE_CHOICES) { + statusMessage = response.getReasonPhrase(); + _log.warn(String.format("Response status was: %s. Reason: %s", response.getCode(), + response.getReasonPhrase())); + } + return new SplitHttpResponse(response.getCode(), statusMessage, "", + Arrays.stream(response.getHeaders()).map( + h -> new SplitHttpResponse.Header(h.getName(), Collections.singletonList(h.getValue()))) + .collect(Collectors.toList())); + } catch (Exception e) { + throw new IOException(String.format("Problem in http post operation: %s", e), e); + } finally { + Utils.forceClose(response); + } + } + + private void setBasicHeaders(HttpRequest request) { + request.setHeader(HEADER_API_KEY, "Bearer " + _apikey); + request.setHeader(HEADER_CLIENT_VERSION, _metadata.getSdkVersion()); + request.setHeader(HEADER_CLIENT_MACHINE_IP, _metadata.getMachineIp()); + request.setHeader(HEADER_CLIENT_MACHINE_NAME, _metadata.getMachineName()); + request.setHeader(HEADER_CLIENT_KEY, _apikey.length() > 4 + ? _apikey.substring(_apikey.length() - 4) + : _apikey); + } + + @Override + public void close() throws IOException { + _client.close(); + } +} diff --git a/client/src/main/java/io/split/storages/RuleBasedSegmentCache.java b/client/src/main/java/io/split/storages/RuleBasedSegmentCache.java new file mode 100644 index 000000000..5ba55b819 --- /dev/null +++ b/client/src/main/java/io/split/storages/RuleBasedSegmentCache.java @@ -0,0 +1,4 @@ +package io.split.storages; + +public interface RuleBasedSegmentCache extends RuleBasedSegmentCacheConsumer, RuleBasedSegmentCacheProducer { +} diff --git a/client/src/main/java/io/split/storages/RuleBasedSegmentCacheConsumer.java b/client/src/main/java/io/split/storages/RuleBasedSegmentCacheConsumer.java new file mode 100644 index 000000000..348159dd9 --- /dev/null +++ b/client/src/main/java/io/split/storages/RuleBasedSegmentCacheConsumer.java @@ -0,0 +1,16 @@ +package io.split.storages; + +import io.split.engine.experiments.ParsedRuleBasedSegment; + +import java.util.Collection; +import java.util.List; +import java.util.Set; + +public interface RuleBasedSegmentCacheConsumer { + ParsedRuleBasedSegment get(String name); + Collection getAll(); + List ruleBasedSegmentNames(); + boolean contains(Set ruleBasedSegmentNames); + long getChangeNumber(); + Set getSegments(); +} \ No newline at end of file diff --git a/client/src/main/java/io/split/storages/RuleBasedSegmentCacheProducer.java b/client/src/main/java/io/split/storages/RuleBasedSegmentCacheProducer.java new file mode 100644 index 000000000..a87d38a6f --- /dev/null +++ b/client/src/main/java/io/split/storages/RuleBasedSegmentCacheProducer.java @@ -0,0 +1,13 @@ +package io.split.storages; + +import io.split.engine.experiments.ParsedRuleBasedSegment; + +import java.util.List; + +public interface RuleBasedSegmentCacheProducer { + boolean remove(String name); + void setChangeNumber(long changeNumber); + long getChangeNumber(); + void update(List toAdd, List toRemove, long changeNumber); + void clear(); +} diff --git a/client/src/main/java/io/split/storages/SplitCacheConsumer.java b/client/src/main/java/io/split/storages/SplitCacheConsumer.java index e802d247f..7fbc57486 100644 --- a/client/src/main/java/io/split/storages/SplitCacheConsumer.java +++ b/client/src/main/java/io/split/storages/SplitCacheConsumer.java @@ -3,13 +3,15 @@ import io.split.engine.experiments.ParsedSplit; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; public interface SplitCacheConsumer extends SplitCacheCommons{ ParsedSplit get(String name); Collection getAll(); Map fetchMany(List names); boolean trafficTypeExists(String trafficTypeName); -} + List splitNames(); + Map> getNamesByFlagSets(List flagSets); +} \ No newline at end of file diff --git a/client/src/main/java/io/split/storages/SplitCacheProducer.java b/client/src/main/java/io/split/storages/SplitCacheProducer.java index a237b06f0..02a64ddc9 100644 --- a/client/src/main/java/io/split/storages/SplitCacheProducer.java +++ b/client/src/main/java/io/split/storages/SplitCacheProducer.java @@ -12,4 +12,5 @@ public interface SplitCacheProducer extends SplitCacheCommons{ void putMany(List splits); void increaseTrafficType(String trafficType); void decreaseTrafficType(String trafficType); + void update(List toAdd, List toRemove, long changeNumber); } diff --git a/client/src/main/java/io/split/storages/memory/InMemoryCacheImp.java b/client/src/main/java/io/split/storages/memory/InMemoryCacheImp.java index fc82b14d6..83e9f3b77 100644 --- a/client/src/main/java/io/split/storages/memory/InMemoryCacheImp.java +++ b/client/src/main/java/io/split/storages/memory/InMemoryCacheImp.java @@ -4,6 +4,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Multiset; import com.google.common.collect.Sets; +import io.split.client.interceptors.FlagSetsFilter; import io.split.engine.experiments.ParsedSplit; import io.split.storages.SplitCache; import org.slf4j.Logger; @@ -12,6 +13,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -24,27 +26,33 @@ public class InMemoryCacheImp implements SplitCache { private static final Logger _log = LoggerFactory.getLogger(InMemoryCacheImp.class); private final ConcurrentMap _concurrentMap; + private final ConcurrentMap> _flagSets; private final Multiset _concurrentTrafficTypeNameSet; + private final FlagSetsFilter _flagSetsFilter; private AtomicLong _changeNumber; - public InMemoryCacheImp() { - this(-1); + public InMemoryCacheImp(FlagSetsFilter flagSets) { + this(-1, flagSets); } - public InMemoryCacheImp(long startingChangeNumber) { + public InMemoryCacheImp(long startingChangeNumber, FlagSetsFilter flagSets) { _concurrentMap = Maps.newConcurrentMap(); _changeNumber = new AtomicLong(startingChangeNumber); _concurrentTrafficTypeNameSet = ConcurrentHashMultiset.create(); + _flagSets = Maps.newConcurrentMap(); + _flagSetsFilter = flagSets; } @Override public boolean remove(String name) { ParsedSplit removed = _concurrentMap.remove(name); - if (removed != null && removed.trafficTypeName() != null) { - this.decreaseTrafficType(removed.trafficTypeName()); + if (removed != null) { + removeFromFlagSets(removed.feature()); + if (removed.trafficTypeName() != null) { + this.decreaseTrafficType(removed.trafficTypeName()); + } } - return removed != null; } @@ -75,7 +83,7 @@ public long getChangeNumber() { @Override public void setChangeNumber(long changeNumber) { if (changeNumber < _changeNumber.get()) { - _log.error("ChangeNumber for splits cache is less than previous"); + _log.error("ChangeNumber for feature flags cache is less than previous"); } _changeNumber.set(changeNumber); @@ -88,6 +96,25 @@ public boolean trafficTypeExists(String trafficTypeName) { return Sets.newHashSet(_concurrentTrafficTypeNameSet.elementSet()).contains(trafficTypeName); } + @Override + public List splitNames() { + List splitNamesList = new ArrayList<>(); + for (String key: _concurrentMap.keySet()) { + splitNamesList.add(_concurrentMap.get(key).feature()); + } + return splitNamesList; + } + + @Override + public Map> getNamesByFlagSets(List flagSets) { + Map> toReturn = new HashMap<>(); + for (String set: flagSets) { + HashSet keys = _flagSets.get(set); + toReturn.put(set, keys); + } + return toReturn; + } + @Override public void kill(String splitName, String defaultTreatment, long changeNumber) { ParsedSplit parsedSplit = _concurrentMap.get(splitName); @@ -102,7 +129,11 @@ public void kill(String splitName, String defaultTreatment, long changeNumber) { parsedSplit.trafficAllocation(), parsedSplit.trafficAllocationSeed(), parsedSplit.algo(), - parsedSplit.configurations()); + parsedSplit.configurations(), + parsedSplit.flagSets(), + parsedSplit.impressionsDisabled(), + parsedSplit.prerequisitesMatcher() + ); _concurrentMap.put(splitName, updatedSplit); } @@ -110,17 +141,20 @@ public void kill(String splitName, String defaultTreatment, long changeNumber) { @Override public void clear() { _concurrentMap.clear(); + _changeNumber.set(-1); _concurrentTrafficTypeNameSet.clear(); + _flagSets.clear(); } @Override public void putMany(List splits) { for (ParsedSplit split : splits) { _concurrentMap.put(split.feature(), split); - if (split.trafficTypeName() != null) { this.increaseTrafficType(split.trafficTypeName()); } + removeFromFlagSets(split.feature()); + addToFlagSets(split); } } @@ -133,9 +167,46 @@ public void increaseTrafficType(String trafficType) { public void decreaseTrafficType(String trafficType) { _concurrentTrafficTypeNameSet.remove(trafficType); } - + + @Override + public void update(List toAdd, List toRemove, long changeNumber) { + if(toAdd != null) { + putMany(toAdd); + } + if(toRemove != null) { + for(String featureFlag : toRemove) { + remove(featureFlag); + } + } + setChangeNumber(changeNumber); + } + public Set getSegments() { return _concurrentMap.values().stream() .flatMap(parsedSplit -> parsedSplit.getSegmentsNames().stream()).collect(Collectors.toSet()); } -} + + private void addToFlagSets(ParsedSplit featureFlag) { + HashSet sets = featureFlag.flagSets(); + if(sets == null) { + return; + } + for (String set: sets) { + if (!_flagSetsFilter.intersect(set)) { + continue; + } + HashSet features = _flagSets.get(set); + if (features == null) { + features = new HashSet<>(); + } + features.add(featureFlag.feature()); + _flagSets.put(set, features); + } + } + + private void removeFromFlagSets(String featureFlagName) { + for (String set: _flagSets.keySet()) { + _flagSets.get(set).remove(featureFlagName); + } + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/storages/memory/RuleBasedSegmentCacheInMemoryImp.java b/client/src/main/java/io/split/storages/memory/RuleBasedSegmentCacheInMemoryImp.java new file mode 100644 index 000000000..660811ca8 --- /dev/null +++ b/client/src/main/java/io/split/storages/memory/RuleBasedSegmentCacheInMemoryImp.java @@ -0,0 +1,109 @@ +package io.split.storages.memory; + +import com.google.common.collect.Maps; +import io.split.engine.experiments.ParsedRuleBasedSegment; +import io.split.storages.RuleBasedSegmentCache; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.ArrayList; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; +import java.util.Map; + +public class RuleBasedSegmentCacheInMemoryImp implements RuleBasedSegmentCache { + + private static final Logger _log = LoggerFactory.getLogger(RuleBasedSegmentCacheInMemoryImp.class); + + private final ConcurrentMap _concurrentMap; + + private AtomicLong _changeNumber; + + public RuleBasedSegmentCacheInMemoryImp() { + this(-1); + } + + public RuleBasedSegmentCacheInMemoryImp(long startingChangeNumber) { + _concurrentMap = Maps.newConcurrentMap(); + _changeNumber = new AtomicLong(startingChangeNumber); + } + + @Override + public boolean remove(String name) { + ParsedRuleBasedSegment removed = _concurrentMap.remove(name); + return removed != null; + } + + @Override + public ParsedRuleBasedSegment get(String name) { + return _concurrentMap.get(name); + } + + @Override + public Collection getAll() { + return _concurrentMap.values(); + } + + @Override + public long getChangeNumber() { + return _changeNumber.get(); + } + + @Override + public void setChangeNumber(long changeNumber) { + if (changeNumber < _changeNumber.get()) { + _log.error("ChangeNumber for feature flags cache is less than previous"); + } + + _changeNumber.set(changeNumber); + } + + @Override + public List ruleBasedSegmentNames() { + List ruleBasedSegmentNamesList = new ArrayList<>(); + for (Map.Entry key: _concurrentMap.entrySet()) { + ruleBasedSegmentNamesList.add(key.getValue().ruleBasedSegment()); + } + return ruleBasedSegmentNamesList; + } + + @Override + public void clear() { + _changeNumber.set(-1); + _concurrentMap.clear(); + } + + private void putMany(List ruleBasedSegments) { + for (ParsedRuleBasedSegment ruleBasedSegment : ruleBasedSegments) { + _concurrentMap.put(ruleBasedSegment.ruleBasedSegment(), ruleBasedSegment); + } + } + + @Override + public void update(List toAdd, List toRemove, long changeNumber) { + if(toAdd != null) { + putMany(toAdd); + } + if(toRemove != null) { + for(String ruleBasedSegment : toRemove) { + remove(ruleBasedSegment); + } + } + setChangeNumber(changeNumber); + } + + public Set getSegments() { + return _concurrentMap.values().stream() + .flatMap(parsedRuleBasedSegment -> parsedRuleBasedSegment.getSegmentsNames().stream()).collect(Collectors.toSet()); + } + + @Override + public boolean contains(Set ruleBasedSegmentNames) { + return _concurrentMap.keySet().containsAll(ruleBasedSegmentNames); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomEventAdapterConsumer.java b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomEventAdapterConsumer.java index 8491fd7a9..afecdddcc 100644 --- a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomEventAdapterConsumer.java +++ b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomEventAdapterConsumer.java @@ -4,10 +4,24 @@ import io.split.client.events.EventsStorageConsumer; import io.split.client.events.WrappedEvent; +import java.util.ArrayList; +import java.util.List; + public class UserCustomEventAdapterConsumer implements EventsStorageConsumer { @Override public WrappedEvent pop() { //No-Op return new WrappedEvent(new Event(), 0L); } + + @Override + public List popAll(){ + //No-op + return new ArrayList<>(); + } + + @Override + public boolean isFull() { + return false; + } } diff --git a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomEventAdapterProducer.java b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomEventAdapterProducer.java index 23e241580..6252c303f 100644 --- a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomEventAdapterProducer.java +++ b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomEventAdapterProducer.java @@ -6,7 +6,7 @@ import io.split.client.utils.Json; import io.split.storages.pluggable.domain.EventConsumer; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import pluggable.CustomStorageWrapper; import java.util.List; @@ -17,18 +17,18 @@ public class UserCustomEventAdapterProducer implements EventsStorageProducer { - private final SafeUserStorageWrapper _safeUserStorageWrapper; + private final UserStorageWrapper _userStorageWrapper; private Metadata _metadata; public UserCustomEventAdapterProducer(CustomStorageWrapper customStorageWrapper, Metadata metadata) { - _safeUserStorageWrapper = new SafeUserStorageWrapper(checkNotNull(customStorageWrapper)); + _userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); _metadata = metadata; } @Override public boolean track(Event event, int eventSize) { List events = Stream.of(Json.toJson(new EventConsumer(_metadata, event))).collect(Collectors.toList()); - _safeUserStorageWrapper.pushItems(PrefixAdapter.buildEvent(), events); + _userStorageWrapper.pushItems(PrefixAdapter.buildEvent(), events); return true; } } diff --git a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomImpressionAdapterProducer.java b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomImpressionAdapterProducer.java index 185bb8a1c..c0845f0cc 100644 --- a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomImpressionAdapterProducer.java +++ b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomImpressionAdapterProducer.java @@ -9,11 +9,10 @@ import io.split.client.impressions.ImpressionsStorageProducer; import io.split.storages.pluggable.domain.ImpressionConsumer; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import pluggable.CustomStorageWrapper; import java.lang.reflect.Modifier; -import java.util.Collections; import java.util.List; import java.util.stream.Collectors; @@ -21,7 +20,7 @@ public class UserCustomImpressionAdapterProducer implements ImpressionsStorageProducer { - private final SafeUserStorageWrapper _safeUserStorageWrapper; + private final UserStorageWrapper _userStorageWrapper; private final Gson _json = new GsonBuilder() .serializeNulls() // Send nulls .excludeFieldsWithModifiers(Modifier.STATIC) @@ -34,15 +33,17 @@ public class UserCustomImpressionAdapterProducer implements ImpressionsStoragePr private Metadata _metadata; public UserCustomImpressionAdapterProducer(CustomStorageWrapper customStorageWrapper, Metadata metadata) { - _safeUserStorageWrapper = new SafeUserStorageWrapper(checkNotNull(customStorageWrapper)); + _userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); _metadata = metadata; } @Override public long put(List imps) { //Impression + if (imps.isEmpty()){ + return 0; + } List impressions = imps.stream().map(keyImp -> _json.toJson(new ImpressionConsumer(_metadata, keyImp))).collect(Collectors.toList()); - return _safeUserStorageWrapper.pushItems(PrefixAdapter.buildImpressions(), impressions); + return _userStorageWrapper.pushItems(PrefixAdapter.buildImpressions(), impressions); } - } diff --git a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomRuleBasedSegmentAdapterConsumer.java b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomRuleBasedSegmentAdapterConsumer.java new file mode 100644 index 000000000..b04e49761 --- /dev/null +++ b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomRuleBasedSegmentAdapterConsumer.java @@ -0,0 +1,102 @@ +package io.split.storages.pluggable.adapters; + +import io.split.client.dtos.RuleBasedSegment; +import io.split.client.utils.Json; +import io.split.engine.experiments.ParsedRuleBasedSegment; +import io.split.engine.experiments.RuleBasedSegmentParser; +import io.split.storages.RuleBasedSegmentCacheConsumer; +import io.split.storages.pluggable.domain.PrefixAdapter; +import io.split.storages.pluggable.domain.UserStorageWrapper; +import io.split.storages.pluggable.utils.Helper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import pluggable.CustomStorageWrapper; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class UserCustomRuleBasedSegmentAdapterConsumer implements RuleBasedSegmentCacheConsumer { + + private static final Logger _log = LoggerFactory.getLogger(UserCustomRuleBasedSegmentAdapterConsumer.class); + + private final RuleBasedSegmentParser _ruleBasedSegmentParser; + private final UserStorageWrapper _userStorageWrapper; + + public UserCustomRuleBasedSegmentAdapterConsumer(CustomStorageWrapper customStorageWrapper) { + _ruleBasedSegmentParser = new RuleBasedSegmentParser(); + _userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); + } + + @Override + public long getChangeNumber() { + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildRuleBasedSegmentChangeNumber()); + return Helper.responseToLong(wrapperResponse, -1L); + } + + @Override + public ParsedRuleBasedSegment get(String name) { + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildRuleBasedSegmentKey(name)); + if(wrapperResponse == null) { + return null; + } + RuleBasedSegment ruleBasedSegment = Json.fromJson(wrapperResponse, RuleBasedSegment.class); + if(ruleBasedSegment == null) { + _log.warn("Could not parse RuleBasedSegment."); + return null; + } + return _ruleBasedSegmentParser.parse(ruleBasedSegment); + } + + @Override + public Collection getAll() { + Set keys = _userStorageWrapper.getKeysByPrefix(PrefixAdapter.buildGetAllRuleBasedSegment()); + if(keys == null) { + return new ArrayList<>(); + } + List wrapperResponse = _userStorageWrapper.getMany(new ArrayList<>(keys)); + if(wrapperResponse == null) { + return new ArrayList<>(); + } + return stringsToParsedRuleBasedSegments(wrapperResponse); + } + + @Override + public List ruleBasedSegmentNames() { + Set ruleBasedSegmentNamesWithPrefix = _userStorageWrapper.getKeysByPrefix(PrefixAdapter.buildGetAllRuleBasedSegment()); + ruleBasedSegmentNamesWithPrefix = ruleBasedSegmentNamesWithPrefix.stream(). + map(key -> key.replace(PrefixAdapter.buildRuleBasedSegmentsPrefix(), "")). + collect(Collectors.toSet()); + return new ArrayList<>(ruleBasedSegmentNamesWithPrefix); + } + + @Override + public Set getSegments() { + return getAll().stream() + .flatMap(parsedRuleBasedSegment -> parsedRuleBasedSegment. + getSegmentsNames().stream()).collect(Collectors.toSet()); + } + + + private List stringsToParsedRuleBasedSegments(List elements) { + List result = new ArrayList<>(); + for(String s : elements) { + if(s != null) { + result.add(_ruleBasedSegmentParser.parse(Json.fromJson(s, RuleBasedSegment.class))); + continue; + } + result.add(null); + } + return result; + } + + @Override + public boolean contains(Set ruleBasedSegmentNames) { + return _userStorageWrapper.getKeysByPrefix(PrefixAdapter.buildGetAllRuleBasedSegment()).containsAll(ruleBasedSegmentNames); + } + +} \ No newline at end of file diff --git a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterConsumer.java b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterConsumer.java index e011421d7..7ba2d916a 100644 --- a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterConsumer.java +++ b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterConsumer.java @@ -2,7 +2,7 @@ import io.split.storages.SegmentCacheConsumer; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import io.split.storages.pluggable.utils.Helper; import pluggable.CustomStorageWrapper; @@ -12,35 +12,35 @@ public class UserCustomSegmentAdapterConsumer implements SegmentCacheConsumer { - private final SafeUserStorageWrapper _safeUserStorageWrapper; + private final UserStorageWrapper _userStorageWrapper; public UserCustomSegmentAdapterConsumer(CustomStorageWrapper customStorageWrapper) { - _safeUserStorageWrapper = new SafeUserStorageWrapper(checkNotNull(customStorageWrapper)); + _userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); } @Override public long getChangeNumber(String segmentName) { - String wrapperResponse = _safeUserStorageWrapper.get(PrefixAdapter.buildSegment(segmentName)); + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildSegment(segmentName)); return Helper.responseToLong(wrapperResponse, -1L); } @Override public boolean isInSegment(String segmentName, String key) { - return _safeUserStorageWrapper.itemContains(PrefixAdapter.buildSegment(segmentName), key); + return _userStorageWrapper.itemContains(PrefixAdapter.buildSegment(segmentName), key); } @Override public long getSegmentCount() { - Set keys = _safeUserStorageWrapper.getKeysByPrefix(PrefixAdapter.buildSegmentAll()); + Set keys = _userStorageWrapper.getKeysByPrefix(PrefixAdapter.buildSegmentAll()); return keys == null ? 0L : keys.size(); } @Override public long getKeyCount() { - Set keys = _safeUserStorageWrapper.getKeysByPrefix(PrefixAdapter.buildSegmentAll()); + Set keys = _userStorageWrapper.getKeysByPrefix(PrefixAdapter.buildSegmentAll()); if(keys == null) { return 0L; } - return keys.stream().mapToLong(key -> _safeUserStorageWrapper.getItemsCount(key)).sum(); + return keys.stream().mapToLong(key -> _userStorageWrapper.getItemsCount(key)).sum(); } } diff --git a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterProducer.java b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterProducer.java index 5afc5c1f3..faebf8356 100644 --- a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterProducer.java +++ b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterProducer.java @@ -3,7 +3,7 @@ import io.split.client.utils.Json; import io.split.storages.SegmentCacheProducer; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import io.split.storages.pluggable.utils.Helper; import pluggable.CustomStorageWrapper; @@ -13,27 +13,27 @@ public class UserCustomSegmentAdapterProducer implements SegmentCacheProducer { - private final SafeUserStorageWrapper _safeUserStorageWrapper; + private final UserStorageWrapper _userStorageWrapper; public UserCustomSegmentAdapterProducer(CustomStorageWrapper customStorageWrapper) { - _safeUserStorageWrapper = new SafeUserStorageWrapper(checkNotNull(customStorageWrapper)); + _userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); } @Override public long getChangeNumber(String segmentName) { - String wrapperResponse = _safeUserStorageWrapper.get(PrefixAdapter.buildSegment(segmentName)); + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildSegment(segmentName)); return Helper.responseToLong(wrapperResponse, -1L); } @Override public void updateSegment(String segmentName, List toAdd, List toRemove, long changeNumber) { String keySegment = PrefixAdapter.buildSegment(segmentName); - _safeUserStorageWrapper.addItems(keySegment, toAdd); - _safeUserStorageWrapper.removeItems(keySegment, toRemove); - _safeUserStorageWrapper.set(PrefixAdapter.buildSegmentTill(segmentName), Json.toJson(changeNumber)); + _userStorageWrapper.addItems(keySegment, toAdd); + _userStorageWrapper.removeItems(keySegment, toRemove); + _userStorageWrapper.set(PrefixAdapter.buildSegmentTill(segmentName), Json.toJson(changeNumber)); } @Override public void setChangeNumber(String segmentName, long changeNumber) { - _safeUserStorageWrapper.set(PrefixAdapter.buildSegmentTill(segmentName), Json.toJson(changeNumber)); + _userStorageWrapper.set(PrefixAdapter.buildSegmentTill(segmentName), Json.toJson(changeNumber)); } } diff --git a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterConsumer.java b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterConsumer.java index fb10559df..ca255c10f 100644 --- a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterConsumer.java +++ b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterConsumer.java @@ -5,18 +5,21 @@ import io.split.engine.experiments.ParsedSplit; import io.split.engine.experiments.SplitParser; import io.split.storages.SplitCacheConsumer; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserPipelineWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import io.split.storages.pluggable.domain.PrefixAdapter; import io.split.storages.pluggable.utils.Helper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import pluggable.CustomStorageWrapper; +import pluggable.Result; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.HashSet; import java.util.stream.Collectors; @@ -28,22 +31,22 @@ public class UserCustomSplitAdapterConsumer implements SplitCacheConsumer { private static final Logger _log = LoggerFactory.getLogger(UserCustomSplitAdapterConsumer.class); private final SplitParser _splitParser; - private final SafeUserStorageWrapper _safeUserStorageWrapper; + private final UserStorageWrapper _userStorageWrapper; public UserCustomSplitAdapterConsumer(CustomStorageWrapper customStorageWrapper) { _splitParser = new SplitParser(); - _safeUserStorageWrapper = new SafeUserStorageWrapper(checkNotNull(customStorageWrapper)); + _userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); } @Override public long getChangeNumber() { - String wrapperResponse = _safeUserStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber()); + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber()); return Helper.responseToLong(wrapperResponse, -1L); } @Override public ParsedSplit get(String name) { - String wrapperResponse = _safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(name)); + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildSplitKey(name)); if(wrapperResponse == null) { return null; } @@ -57,11 +60,11 @@ public ParsedSplit get(String name) { @Override public Collection getAll() { - Set keys = _safeUserStorageWrapper.getKeysByPrefix(PrefixAdapter.buildGetAllSplit()); + Set keys = _userStorageWrapper.getKeysByPrefix(PrefixAdapter.buildGetAllSplit()); if(keys == null) { return new ArrayList<>(); } - List wrapperResponse = _safeUserStorageWrapper.getMany(new ArrayList<>(keys)); + List wrapperResponse = _userStorageWrapper.getMany(new ArrayList<>(keys)); if(wrapperResponse == null) { return new ArrayList<>(); } @@ -70,7 +73,7 @@ public Collection getAll() { @Override public boolean trafficTypeExists(String trafficTypeName) { - String wrapperResponse = _safeUserStorageWrapper.get(PrefixAdapter.buildTrafficTypeExists(trafficTypeName)); + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildTrafficTypeExists(trafficTypeName)); if(wrapperResponse == null) { return false; } @@ -84,10 +87,45 @@ public boolean trafficTypeExists(String trafficTypeName) { return false; } + @Override + public List splitNames() { + Set splitNamesWithPrefix = _userStorageWrapper.getKeysByPrefix(PrefixAdapter.buildGetAllSplit()); + splitNamesWithPrefix = splitNamesWithPrefix.stream().map(key -> key.replace(PrefixAdapter.buildSplitsPrefix(), "")). + collect(Collectors.toSet()); + return new ArrayList<>(splitNamesWithPrefix); + } + + @Override + public Map> getNamesByFlagSets(List flagSets) { + Map> toReturn = new HashMap<>(); + try { + if (flagSets == null) { + return toReturn; + } + UserPipelineWrapper pipelineExecution = _userStorageWrapper.pipeline(); + for (String set: flagSets) { + pipelineExecution.getMembers(PrefixAdapter.buildFlagSetPrefix(set)); + } + List results = pipelineExecution.exec(); + if (results == null || results.isEmpty()){ + return toReturn; + } + for (int i = 0; i < results.size(); i ++) { + Optional> featureFlags = results.get(i).asHash(); + if(featureFlags.isPresent()) { + toReturn.put(flagSets.get(i), featureFlags.get()); + } + } + } catch (Exception e) { + _log.warn("Redis pipeline exception when getting names by flag sets: ", e); + } + return toReturn; + } + @Override public Map fetchMany(List names) { Map result = new HashMap<>(); - List wrapperResponse = _safeUserStorageWrapper.getItems(PrefixAdapter.buildFetchManySplits(names)); + List wrapperResponse = _userStorageWrapper.getItems(PrefixAdapter.buildFetchManySplits(names)); if(wrapperResponse == null) { return result; } @@ -115,4 +153,4 @@ private List stringsToParsedSplits(List elements) { } return result; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterProducer.java b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterProducer.java index 190ac0af9..b9a034aff 100644 --- a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterProducer.java +++ b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterProducer.java @@ -5,7 +5,7 @@ import io.split.engine.experiments.ParsedSplit; import io.split.storages.SplitCacheProducer; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import io.split.storages.pluggable.utils.Helper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -23,21 +23,21 @@ public class UserCustomSplitAdapterProducer implements SplitCacheProducer { private static final Logger _log = LoggerFactory.getLogger(UserCustomSplitAdapterProducer.class); - private final SafeUserStorageWrapper _safeUserStorageWrapper; + private final UserStorageWrapper _userStorageWrapper; public UserCustomSplitAdapterProducer(CustomStorageWrapper customStorageWrapper) { - _safeUserStorageWrapper = new SafeUserStorageWrapper(checkNotNull(customStorageWrapper)); + _userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); } @Override public long getChangeNumber() { - String wrapperResponse = _safeUserStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber()); + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber()); return Helper.responseToLong(wrapperResponse, -1L); } @Override public boolean remove(String splitName) { - String wrapperResponse = _safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(splitName)); + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildSplitKey(splitName)); if(wrapperResponse == null) { return false; } @@ -46,7 +46,7 @@ public boolean remove(String splitName) { _log.info("Could not parse Split."); return false; } - _safeUserStorageWrapper.delete(Stream.of(PrefixAdapter.buildSplitKey(splitName)).collect(Collectors.toList())); + _userStorageWrapper.delete(Stream.of(PrefixAdapter.buildSplitKey(splitName)).collect(Collectors.toList())); if(split.trafficTypeName != null){ this.decreaseTrafficType(split.trafficTypeName); } @@ -55,12 +55,12 @@ public boolean remove(String splitName) { @Override public void setChangeNumber(long changeNumber) { - _safeUserStorageWrapper.set(PrefixAdapter.buildSplitChangeNumber(),Json.toJson(changeNumber)); + _userStorageWrapper.set(PrefixAdapter.buildSplitChangeNumber(),Json.toJson(changeNumber)); } @Override public void kill(String splitName, String defaultTreatment, long changeNumber) { - String wrapperResponse = _safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(splitName)); + String wrapperResponse = _userStorageWrapper.get(PrefixAdapter.buildSplitKey(splitName)); if(wrapperResponse == null) { return; } @@ -69,7 +69,7 @@ public void kill(String splitName, String defaultTreatment, long changeNumber) { _log.info("Could not parse Split."); return; } - _safeUserStorageWrapper.set(PrefixAdapter.buildSplitKey(splitName), Json.toJson(split)); + _userStorageWrapper.set(PrefixAdapter.buildSplitKey(splitName), Json.toJson(split)); } @Override @@ -80,24 +80,37 @@ public void clear() { @Override public void putMany(List splits) { for(ParsedSplit split : splits) { - _safeUserStorageWrapper.set(PrefixAdapter.buildSplitKey(split.feature()), Json.toJson(split)); + _userStorageWrapper.set(PrefixAdapter.buildSplitKey(split.feature()), Json.toJson(split)); this.increaseTrafficType(PrefixAdapter.buildTrafficTypeExists(split.trafficTypeName())); } } @Override public void increaseTrafficType(String trafficType) { - _safeUserStorageWrapper.increment(PrefixAdapter.buildTrafficTypeExists(trafficType), 1); + _userStorageWrapper.increment(PrefixAdapter.buildTrafficTypeExists(trafficType), 1); } @Override public void decreaseTrafficType(String trafficType) { - long trafficTypeCount = _safeUserStorageWrapper.decrement(PrefixAdapter.buildTrafficTypeExists(trafficType), 1); + long trafficTypeCount = _userStorageWrapper.decrement(PrefixAdapter.buildTrafficTypeExists(trafficType), 1); if(trafficTypeCount<=0) { - _safeUserStorageWrapper.delete(Stream.of(PrefixAdapter.buildTrafficTypeExists(trafficType)).collect(Collectors.toList())); + _userStorageWrapper.delete(Stream.of(PrefixAdapter.buildTrafficTypeExists(trafficType)).collect(Collectors.toList())); } } + @Override + public void update(List toAdd, List toRemove, long changeNumber) { + if(toAdd != null) { + putMany(toAdd); + } + if(toRemove != null) { + for(String featureFlag : toRemove) { + remove(featureFlag); + } + } + setChangeNumber(changeNumber); + } + @Override public Set getSegments() { //NoOp diff --git a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomTelemetryAdapterProducer.java b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomTelemetryAdapterProducer.java index 6c0c00936..1dd9412b7 100644 --- a/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomTelemetryAdapterProducer.java +++ b/client/src/main/java/io/split/storages/pluggable/adapters/UserCustomTelemetryAdapterProducer.java @@ -2,7 +2,7 @@ import io.split.client.utils.SDKMetadata; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import io.split.telemetry.domain.StreamingEvent; import io.split.telemetry.domain.enums.EventsDataRecordsEnum; import io.split.telemetry.domain.enums.HTTPLatenciesEnum; @@ -10,6 +10,7 @@ import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; import io.split.telemetry.domain.enums.MethodEnum; import io.split.telemetry.domain.enums.ResourceEnum; +import io.split.telemetry.domain.enums.UpdatesFromSSEEnum; import io.split.telemetry.storage.TelemetryStorageProducer; import io.split.telemetry.utils.BucketCalculator; import pluggable.CustomStorageWrapper; @@ -18,11 +19,11 @@ public class UserCustomTelemetryAdapterProducer implements TelemetryStorageProducer { - private final SafeUserStorageWrapper _safeUserStorageWrapper; + private final UserStorageWrapper _userStorageWrapper; private SDKMetadata _sdkMetadata; public UserCustomTelemetryAdapterProducer(CustomStorageWrapper customStorageWrapper, SDKMetadata sdkMetadata) { - _safeUserStorageWrapper = new SafeUserStorageWrapper(checkNotNull(customStorageWrapper)); + _userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); _sdkMetadata = sdkMetadata; } @@ -38,12 +39,16 @@ public void recordBURTimeout() { @Override public void recordLatency(MethodEnum method, long latency) { - _safeUserStorageWrapper.increment(PrefixAdapter.buildTelemetryLatenciesPrefix(method.getMethod(), BucketCalculator.getBucketForLatency(latency), _sdkMetadata.getSdkVersion(), _sdkMetadata.getMachineIp(), _sdkMetadata.getMachineName()), 1); + String key = String.format("%s/%s/%s/%s/%d", _sdkMetadata.getSdkVersion(), _sdkMetadata.getMachineName(), + _sdkMetadata.getMachineIp(), method.getMethod(), BucketCalculator.getBucketForLatency(latency)); + _userStorageWrapper.hIncrement(PrefixAdapter.buildTelemetryLatenciesPrefix(), key, 1); } @Override public void recordException(MethodEnum method) { - _safeUserStorageWrapper.increment(PrefixAdapter.buildTelemetryExceptionsPrefix(method.getMethod(), _sdkMetadata.getSdkVersion(), _sdkMetadata.getMachineIp(), _sdkMetadata.getMachineName()), 1); + String key = String.format("%s/%s/%s/%s", _sdkMetadata.getSdkVersion(), _sdkMetadata.getMachineName(), + _sdkMetadata.getMachineIp(), method.getMethod()); + _userStorageWrapper.hIncrement(PrefixAdapter.buildTelemetryExceptionsPrefix(), key, 1); } @Override @@ -95,4 +100,9 @@ public void recordStreamingEvents(StreamingEvent streamingEvent) { public void recordSessionLength(long sessionLength) { //No-op } -} + + @Override + public void recordUpdatesFromSSE(UpdatesFromSSEEnum updatesFromSSEEnum) { + //No-op + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/storages/pluggable/domain/ConfigConsumer.java b/client/src/main/java/io/split/storages/pluggable/domain/ConfigConsumer.java index 36019de24..c2df2b295 100644 --- a/client/src/main/java/io/split/storages/pluggable/domain/ConfigConsumer.java +++ b/client/src/main/java/io/split/storages/pluggable/domain/ConfigConsumer.java @@ -1,8 +1,6 @@ package io.split.storages.pluggable.domain; import com.google.gson.annotations.SerializedName; -import io.split.telemetry.domain.Rates; -import io.split.telemetry.domain.URLOverrides; import java.util.List; @@ -11,7 +9,9 @@ public class ConfigConsumer { /* package private */ static final String FIELD_STORAGE = "st"; /* package private */ static final String FIELD_ACTIVE_FACTORIES = "aF"; /* package private */ static final String FIELD_REDUNDANT_FACTORIES = "rF"; - /* package private */ static final String FIELD__TAGS = "t"; + /* package private */ static final String FIELD_TAGS = "t"; + /* package private */ static final String FIELD_FLAG_SETS_TOTAL = "fsT"; + /* package private */ static final String FIELD_FLAG_SETS_INVALID = "fsI"; @SerializedName(FIELD_OPERATION_MODE) private int _operationMode; @@ -21,46 +21,66 @@ public class ConfigConsumer { private long _activeFactories; @SerializedName(FIELD_REDUNDANT_FACTORIES) private long _redundantFactories; - @SerializedName(FIELD__TAGS) + @SerializedName(FIELD_TAGS) private List _tags; + @SerializedName(FIELD_FLAG_SETS_TOTAL) + private int _flagSetsTotal; + @SerializedName(FIELD_FLAG_SETS_INVALID) + private int _flagSetsInvalid; - public int get_operationMode() { + public int getOperationMode() { return _operationMode; } - public void set_operationMode(int _operationMode) { - this._operationMode = _operationMode; + public void setOperationMode(int operationMode) { + this._operationMode = operationMode; } - public String get_storage() { + public String getStorage() { return _storage; } - public void set_storage(String _storage) { - this._storage = _storage; + public void setStorage(String storage) { + this._storage = storage; } - public long get_activeFactories() { + public long getActiveFactories() { return _activeFactories; } - public void set_activeFactories(long _activeFactories) { - this._activeFactories = _activeFactories; + public void setActiveFactories(long activeFactories) { + this._activeFactories = activeFactories; } - public long get_redundantFactories() { + public long getRedundantFactories() { return _redundantFactories; } - public void set_redundantFactories(long _redundantFactories) { - this._redundantFactories = _redundantFactories; + public void setRedundantFactories(long redundantFactories) { + this._redundantFactories = redundantFactories; } - public List get_tags() { + public List getTags() { return _tags; } - public void set_tags(List _tags) { - this._tags = _tags; + public void setTags(List tags) { + this._tags = tags; } -} + + public int getFlagSetsTotal() { + return _flagSetsTotal; + } + + public void setFlagSetsTotal(int flagSetsTotal) { + this._flagSetsTotal = flagSetsTotal; + } + + public int getFlagSetsInvalid() { + return _flagSetsInvalid; + } + + public void setFlagSetsInvalid(int flagSetsInvalid) { + this._flagSetsInvalid = flagSetsInvalid; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/storages/pluggable/domain/PrefixAdapter.java b/client/src/main/java/io/split/storages/pluggable/domain/PrefixAdapter.java index 8787aa998..a785fbe74 100644 --- a/client/src/main/java/io/split/storages/pluggable/domain/PrefixAdapter.java +++ b/client/src/main/java/io/split/storages/pluggable/domain/PrefixAdapter.java @@ -12,11 +12,16 @@ public class PrefixAdapter { private static final String EVENTS = "events"; private static final String IMPRESSIONS = "impressions"; private static final String SEGMENT = "segment."; + private static final String COUNT = ".count"; + private static final String UNIQUE_KEYS = "uniquekeys"; private static final String TILL = "till"; private static final String TELEMETRY = "telemetry."; private static final String LATENCIES = "latencies"; private static final String EXCEPTIONS = "exceptions"; private static final String INIT = "init"; + private static final String FLAG_SET = "flagSet"; + private static final String RULE_BASED_SEGMENT_PREFIX = "rbsegment"; + private static final String RULE_BASED_SEGMENTS_PREFIX = "rbsegments"; public static String buildSplitKey(String name) { return String.format(DEFAULT_PREFIX+ SPLIT_PREFIX +"%s", name); @@ -30,6 +35,26 @@ public static String buildGetAllSplit() { return DEFAULT_PREFIX+SPLIT_PREFIX+"*"; } + public static String buildSplitsPrefix(){ + return DEFAULT_PREFIX+SPLIT_PREFIX; + } + + public static String buildRuleBasedSegmentKey(String name) { + return String.format(DEFAULT_PREFIX+ RULE_BASED_SEGMENT_PREFIX +"%s", name); + } + + public static String buildRuleBasedSegmentsPrefix(){ + return DEFAULT_PREFIX+RULE_BASED_SEGMENT_PREFIX; + } + + public static String buildRuleBasedSegmentChangeNumber() { + return DEFAULT_PREFIX+RULE_BASED_SEGMENTS_PREFIX+"till"; + } + + public static String buildGetAllRuleBasedSegment() { + return DEFAULT_PREFIX+RULE_BASED_SEGMENT_PREFIX+"*"; + } + public static String buildTrafficTypeExists(String trafficType) { return String.format(DEFAULT_PREFIX+TRAFFIC_TYPE_PREFIX+"%s", trafficType); } @@ -62,15 +87,26 @@ public static String buildSegmentTill(String segmentName) { return String.format(DEFAULT_PREFIX+SEGMENT+"%s."+TILL, segmentName); } - public static String buildTelemetryLatenciesPrefix(String method, int bucketForLatency, String sdkVersion, String machineIp, String machineName) { - return String.format(DEFAULT_PREFIX+TELEMETRY+LATENCIES+"::%s/%s/%s/"+"%s/%d", sdkVersion, machineName, machineIp, method, bucketForLatency); + public static String buildImpressionsCount(){ + return String.format(DEFAULT_PREFIX + IMPRESSIONS + COUNT); + } + + public static String buildUniqueKeys() { + return String.format(DEFAULT_PREFIX + UNIQUE_KEYS); } - public static String buildTelemetryExceptionsPrefix(String method, String sdkVersion, String machineIp, String machineName) { - return String.format(DEFAULT_PREFIX+TELEMETRY+EXCEPTIONS+"::%s/%s/%s/"+"%s", sdkVersion, machineName, machineIp, method); + public static String buildTelemetryLatenciesPrefix() { + return String.format(DEFAULT_PREFIX+TELEMETRY+LATENCIES); } - public static String buildTelemetryInit(String sdkVersion, String machineIp, String machineName) { - return String.format(DEFAULT_PREFIX+TELEMETRY+INIT+"::%s/%s/%s", sdkVersion, machineName, machineIp); + public static String buildTelemetryExceptionsPrefix() { + return String.format(DEFAULT_PREFIX+TELEMETRY+EXCEPTIONS); + } + + public static String buildTelemetryInit() { + return String.format(DEFAULT_PREFIX + TELEMETRY + INIT); + } + public static String buildFlagSetPrefix(String set) { + return String.format(DEFAULT_PREFIX + FLAG_SET + ".%s", set); } } diff --git a/client/src/main/java/io/split/storages/pluggable/domain/UserPipelineWrapper.java b/client/src/main/java/io/split/storages/pluggable/domain/UserPipelineWrapper.java new file mode 100644 index 000000000..81ddc691a --- /dev/null +++ b/client/src/main/java/io/split/storages/pluggable/domain/UserPipelineWrapper.java @@ -0,0 +1,48 @@ +package io.split.storages.pluggable.domain; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import pluggable.Pipeline; +import pluggable.Result; + +import java.util.List; + +public class UserPipelineWrapper implements Pipeline{ + + private static final Logger _logger = LoggerFactory.getLogger(UserPipelineWrapper.class); + + private final Pipeline _pipeline; + + + public UserPipelineWrapper(Pipeline pipeline) { + _pipeline = pipeline; + } + + @Override + public List exec() throws Exception { + try{ + return _pipeline.exec(); + } catch (Exception e) { + _logger.warn("Exception calling Pipeline exec", e); + throw e; + } + } + + @Override + public void hIncrement(String key, String field, long value) { + try { + _pipeline.hIncrement(key, field, value); + } catch (Exception e){ + _logger.warn("Exception calling Pipeline hIncrement", e); + } + } + + @Override + public void getMembers(String key) { + try { + _pipeline.getMembers(key); + } catch (Exception e){ + _logger.warn("Exception calling Pipeline getMembers", e); + } + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/storages/pluggable/domain/SafeUserStorageWrapper.java b/client/src/main/java/io/split/storages/pluggable/domain/UserStorageWrapper.java similarity index 78% rename from client/src/main/java/io/split/storages/pluggable/domain/SafeUserStorageWrapper.java rename to client/src/main/java/io/split/storages/pluggable/domain/UserStorageWrapper.java index 011da4a75..dfe6820f1 100644 --- a/client/src/main/java/io/split/storages/pluggable/domain/SafeUserStorageWrapper.java +++ b/client/src/main/java/io/split/storages/pluggable/domain/UserStorageWrapper.java @@ -3,19 +3,21 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import pluggable.CustomStorageWrapper; +import pluggable.HasPipelineSupport; +import pluggable.NotPipelinedImpl; import java.util.List; import java.util.Set; import static com.google.common.base.Preconditions.checkNotNull; -public class SafeUserStorageWrapper implements CustomStorageWrapper { +public class UserStorageWrapper implements CustomStorageWrapper { - private static final Logger _log = LoggerFactory.getLogger(SafeUserStorageWrapper.class); + private static final Logger _log = LoggerFactory.getLogger(UserStorageWrapper.class); private final CustomStorageWrapper _customStorageWrapper; - public SafeUserStorageWrapper(CustomStorageWrapper customStorageWrapper) { + public UserStorageWrapper(CustomStorageWrapper customStorageWrapper) { _customStorageWrapper = checkNotNull(customStorageWrapper); } @@ -51,6 +53,16 @@ public void set(String key, String item) { } } + @Override + public void hSet(String key, String field, String item) { + try { + _customStorageWrapper.hSet(key, field, item); + } + catch (Exception e) { + _log.error(String.format("error updating key by field '%s' from storage. Error: '%s'", key, e.getMessage())); + } + } + @Override public void delete(List keys) { try { @@ -94,6 +106,17 @@ public long increment(String key, long value) { } } + @Override + public long hIncrement(String key, String field, long value){ + try { + return _customStorageWrapper.hIncrement(key, field, value); + } + catch (Exception e) { + _log.error(String.format("error incrementing key by field '%s' from storage. Error: '%s'", key, e.getMessage())); + return 0L; + } + } + @Override public long decrement(String key, long value) { try { @@ -180,6 +203,17 @@ public List getItems(List keys){ } } + @Override + public Set getMembers(String key) { + try { + return _customStorageWrapper.getMembers(key); + } + catch (Exception e) { + _log.error(String.format("error getting set members with key '%s' from storage. Error: '%s'", key, e.getMessage())); + return null; + } + } + @Override public boolean connect(){ try { @@ -201,4 +235,10 @@ public boolean disconnect(){ return false; } } + + public UserPipelineWrapper pipeline() throws Exception { + return (_customStorageWrapper instanceof HasPipelineSupport) + ? new UserPipelineWrapper(((HasPipelineSupport) _customStorageWrapper).pipeline()) + : new UserPipelineWrapper(new NotPipelinedImpl(_customStorageWrapper)); + } } diff --git a/client/src/main/java/io/split/storages/pluggable/synchronizer/TelemetryConsumerSubmitter.java b/client/src/main/java/io/split/storages/pluggable/synchronizer/TelemetryConsumerSubmitter.java index d55cc709d..2e80abdfc 100644 --- a/client/src/main/java/io/split/storages/pluggable/synchronizer/TelemetryConsumerSubmitter.java +++ b/client/src/main/java/io/split/storages/pluggable/synchronizer/TelemetryConsumerSubmitter.java @@ -2,15 +2,18 @@ import com.google.common.annotations.VisibleForTesting; import io.split.client.SplitClientConfig; +import io.split.client.dtos.UniqueKeys; import io.split.client.utils.Json; import io.split.client.utils.SDKMetadata; import io.split.storages.enums.OperationMode; import io.split.storages.pluggable.domain.ConfigConsumer; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import io.split.telemetry.synchronizer.TelemetrySynchronizer; import pluggable.CustomStorageWrapper; +import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; @@ -20,17 +23,18 @@ public class TelemetryConsumerSubmitter implements TelemetrySynchronizer { private static final String STORAGE = "PLUGGABLE"; - private final SafeUserStorageWrapper _safeUserStorageWrapper; + private final UserStorageWrapper _userStorageWrapper; private final SDKMetadata _sdkMetadata; public TelemetryConsumerSubmitter(CustomStorageWrapper customStorageWrapper, SDKMetadata sdkMetadata) { - _safeUserStorageWrapper = new SafeUserStorageWrapper(checkNotNull(customStorageWrapper)); + _userStorageWrapper = new UserStorageWrapper(checkNotNull(customStorageWrapper)); _sdkMetadata = checkNotNull(sdkMetadata); } @Override public void synchronizeConfig(SplitClientConfig config, long timeUntilReady, Map factoryInstances, List tags) { - _safeUserStorageWrapper.set(PrefixAdapter.buildTelemetryInit(_sdkMetadata.getSdkVersion(), _sdkMetadata.getMachineIp(), _sdkMetadata.getMachineName()), Json.toJson(generateConfig(config, factoryInstances, tags))); + String key = String.format("%s/%s/%s", _sdkMetadata.getSdkVersion(), _sdkMetadata.getMachineName(), _sdkMetadata.getMachineIp()); + _userStorageWrapper.hSet(PrefixAdapter.buildTelemetryInit(), key, Json.toJson(generateConfig(config, factoryInstances, tags))); } @Override @@ -39,22 +43,31 @@ public void synchronizeStats() { } @Override - public void finalSynchronization(long splitCount, long segmentCount, long segmentKeyCount) { + public void synchronizeUniqueKeys(UniqueKeys uniqueKeys) { + List uniqueKeysToSend; + for (UniqueKeys.UniqueKey uniqueKey: uniqueKeys.uniqueKeys) { + uniqueKeysToSend = new ArrayList<>(Arrays.asList(Json.toJson(uniqueKey))); + _userStorageWrapper.pushItems(PrefixAdapter.buildUniqueKeys(), uniqueKeysToSend); + } + } + + @Override + public void finalSynchronization() { //No-Op } @VisibleForTesting ConfigConsumer generateConfig(SplitClientConfig splitClientConfig, Map factoryInstances, List tags) { ConfigConsumer config = new ConfigConsumer(); - config.set_operationMode(splitClientConfig.operationMode()== OperationMode.STANDALONE ? 0 : 1); - config.set_storage(STORAGE); - config.set_activeFactories(factoryInstances.size()); - config.set_redundantFactories(getRedundantFactories(factoryInstances)); - config.set_tags(tags.size() < 10 ? tags : tags.subList(0, 10)); + config.setOperationMode(splitClientConfig.operationMode()== OperationMode.STANDALONE ? 0 : 1); + config.setStorage(STORAGE); + config.setActiveFactories(factoryInstances.size()); + config.setRedundantFactories(getRedundantFactories(factoryInstances)); + config.setTags(tags.size() < 10 ? tags : tags.subList(0, 10)); return config; } private long getRedundantFactories(Map factoryInstances) { return factoryInstances.values().stream().mapToLong(l -> l - 1L).sum(); } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/Config.java b/client/src/main/java/io/split/telemetry/domain/Config.java index 1844b8e98..f5369ea7f 100644 --- a/client/src/main/java/io/split/telemetry/domain/Config.java +++ b/client/src/main/java/io/split/telemetry/domain/Config.java @@ -21,7 +21,9 @@ public class Config { /* package private */ static final String FIELD_BUR_TIMEOUTS = "bT"; /* package private */ static final String FIELD_NON_READY_USAGES = "nR"; /* package private */ static final String FIELD_INTEGRATIONS = "i"; - /* package private */ static final String FIELD__TAGS = "t"; + /* package private */ static final String FIELD_TAGS = "t"; + /* package private */ static final String FIELD_FLAG_SETS_TOTAL = "fsT"; + /* package private */ static final String FIELD_FLAG_SETS_INVALID = "fsI"; @SerializedName(FIELD_OPERATION_MODE) private int _operationMode; @@ -55,142 +57,162 @@ public class Config { private long _nonReadyUsages; @SerializedName(FIELD_INTEGRATIONS) private List _integrations; - @SerializedName(FIELD__TAGS) + @SerializedName(FIELD_TAGS) private List _tags; + @SerializedName(FIELD_FLAG_SETS_TOTAL) + private int _flagSetsTotal; + @SerializedName(FIELD_FLAG_SETS_INVALID) + private int _flagSetsInvalid; - public int get_operationMode() { + public int getOperationMode() { return _operationMode; } - public void set_operationMode(int _operationMode) { - this._operationMode = _operationMode; + public void setOperationMode(int operationMode) { + this._operationMode = operationMode; } - public boolean is_streamingEnabled() { + public boolean isStreamingEnabled() { return _streamingEnabled; } - public void set_streamingEnabled(boolean _streamingEnabled) { - this._streamingEnabled = _streamingEnabled; + public void setStreamingEnabled(boolean streamingEnabled) { + this._streamingEnabled = streamingEnabled; } - public String get_storage() { + public String getStorage() { return _storage; } - public void set_storage(String _storage) { - this._storage = _storage; + public void setStorage(String storage) { + this._storage = storage; } - public Rates get_rates() { + public Rates getRates() { return _rates; } - public void set_rates(Rates _rates) { - this._rates = _rates; + public void setRates(Rates rates) { + this._rates = rates; } - public URLOverrides get_urlOverrides() { + public URLOverrides getUrlOverrides() { return _urlOverrides; } - public void set_urlOverrides(URLOverrides _urlOverrides) { - this._urlOverrides = _urlOverrides; + public void setUrlOverrides(URLOverrides urlOverrides) { + this._urlOverrides = urlOverrides; } - public long get_impressionsQueueSize() { + public long getImpressionsQueueSize() { return _impressionsQueueSize; } - public void set_impressionsQueueSize(long _impressionsQueueSize) { - this._impressionsQueueSize = _impressionsQueueSize; + public void setImpressionsQueueSize(long impressionsQueueSize) { + this._impressionsQueueSize = impressionsQueueSize; } - public long get_eventsQueueSize() { + public long getEventsQueueSize() { return _eventsQueueSize; } - public void set_eventsQueueSize(long _eventsQueueSize) { - this._eventsQueueSize = _eventsQueueSize; + public void setEventsQueueSize(long eventsQueueSize) { + this._eventsQueueSize = eventsQueueSize; } - public int get_impressionsMode() { + public int getImpressionsMode() { return _impressionsMode; } - public void set_impressionsMode(int _impressionsMode) { - this._impressionsMode = _impressionsMode; + public void setImpressionsMode(int impressionsMode) { + this._impressionsMode = impressionsMode; } - public boolean is_impressionsListenerEnabled() { + public boolean isImpressionsListenerEnabled() { return _impressionsListenerEnabled; } - public void set_impressionsListenerEnabled(boolean _impressionsListenerEnabled) { - this._impressionsListenerEnabled = _impressionsListenerEnabled; + public void setImpressionsListenerEnabled(boolean impressionsListenerEnabled) { + this._impressionsListenerEnabled = impressionsListenerEnabled; } - public boolean is_httpProxyDetected() { + public boolean isHttpProxyDetected() { return _httpProxyDetected; } - public void set_httpProxyDetected(boolean _httpProxyDetected) { - this._httpProxyDetected = _httpProxyDetected; + public void setHttpProxyDetected(boolean httpProxyDetected) { + this._httpProxyDetected = httpProxyDetected; } - public long get_activeFactories() { + public long getActiveFactories() { return _activeFactories; } - public void set_activeFactories(long _activeFactories) { - this._activeFactories = _activeFactories; + public void setActiveFactories(long activeFactories) { + this._activeFactories = activeFactories; } - public long get_redundantFactories() { + public long getRedundantFactories() { return _redundantFactories; } - public void set_redundantFactories(long _redundantFactories) { - this._redundantFactories = _redundantFactories; + public void setRedundantFactories(long redundantFactories) { + this._redundantFactories = redundantFactories; } - public long get_timeUntilReady() { + public long getTimeUntilReady() { return _timeUntilReady; } - public void set_timeUntilReady(long _timeUntilReady) { - this._timeUntilReady = _timeUntilReady; + public void setTimeUntilReady(long timeUntilReady) { + this._timeUntilReady = timeUntilReady; } - public long get_burTimeouts() { + public long getBurTimeouts() { return _burTimeouts; } - public void set_burTimeouts(long _burTimeouts) { - this._burTimeouts = _burTimeouts; + public void setBurTimeouts(long burTimeouts) { + this._burTimeouts = burTimeouts; } - public long get_nonReadyUsages() { + public long getNonReadyUsages() { return _nonReadyUsages; } - public void set_nonReadyUsages(long _nonReadyUsages) { - this._nonReadyUsages = _nonReadyUsages; + public void setNonReadyUsages(long nonReadyUsages) { + this._nonReadyUsages = nonReadyUsages; } - public List get_integrations() { + public List getIntegrations() { return _integrations; } - public void set_integrations(List _integrations) { - this._integrations = _integrations; + public void setIntegrations(List integrations) { + this._integrations = integrations; } - public List get_tags() { + public List getTags() { return _tags; } - public void set_tags(List _tags) { - this._tags = _tags; + public void setTags(List tags) { + this._tags = tags; + } + + public int getFlagSetsTotal() { + return _flagSetsTotal; + } + + public int getFlagSetsInvalid() { + return _flagSetsInvalid; + } + + public void setFlagSetsTotal(int flagSetsTotal) { + this._flagSetsTotal = flagSetsTotal; + } + + public void setFlagSetsInvalid(int flagSetsInvalid) { + this._flagSetsInvalid = flagSetsInvalid; } } diff --git a/client/src/main/java/io/split/telemetry/domain/HTTPErrors.java b/client/src/main/java/io/split/telemetry/domain/HTTPErrors.java index dac746117..69c85ad85 100644 --- a/client/src/main/java/io/split/telemetry/domain/HTTPErrors.java +++ b/client/src/main/java/io/split/telemetry/domain/HTTPErrors.java @@ -39,59 +39,59 @@ public HTTPErrors() { _telemetry = new ConcurrentHashMap<>(); } - public Map get_splits() { + public Map getSplits() { return _splits; } - public void set_splits(Map _splits) { + public void setSplits(Map _splits) { this._splits = _splits; } - public Map get_segments() { + public Map getSegments() { return _segments; } - public void set_segments(Map _segments) { + public void setSegments(Map _segments) { this._segments = _segments; } - public Map get_impressions() { + public Map getImpressions() { return _impressions; } - public void set_impressions(Map _impressions) { + public void setImpressions(Map _impressions) { this._impressions = _impressions; } - public Map get_events() { + public Map getEvents() { return _events; } - public void set_events(Map _events) { + public void setEvents(Map _events) { this._events = _events; } - public Map get_token() { + public Map getToken() { return _token; } - public void set_token(Map _token) { + public void setToken(Map _token) { this._token = _token; } - public Map get_telemetry() { + public Map getTelemetry() { return _telemetry; } - public void set_telemetry(Map _telemetry) { + public void setTelemetry(Map _telemetry) { this._telemetry = _telemetry; } - public Map get_impressionsCount() { + public Map getImpressionsCount() { return _impressionsCount; } - public void set_impressionsCount(Map _impressionsCount) { + public void setImpressionsCount(Map _impressionsCount) { this._impressionsCount = _impressionsCount; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/HTTPLatencies.java b/client/src/main/java/io/split/telemetry/domain/HTTPLatencies.java index 0e0791ed9..1b7ec2f8e 100644 --- a/client/src/main/java/io/split/telemetry/domain/HTTPLatencies.java +++ b/client/src/main/java/io/split/telemetry/domain/HTTPLatencies.java @@ -39,59 +39,59 @@ public HTTPLatencies() { _telemetry = new ArrayList<>(); } - public List get_splits() { + public List getSplits() { return _splits; } - public void set_splits(List _splits) { + public void setSplits(List _splits) { this._splits = _splits; } - public List get_segments() { + public List getSegments() { return _segments; } - public void set_segments(List _segments) { + public void setSegments(List _segments) { this._segments = _segments; } - public List get_impressions() { + public List getImpressions() { return _impressions; } - public void set_impressions(List _impressions) { + public void setImpressions(List _impressions) { this._impressions = _impressions; } - public List get_events() { + public List getEvents() { return _events; } - public void set_events(List _events) { + public void setEvents(List _events) { this._events = _events; } - public List get_token() { + public List getToken() { return _token; } - public void set_token(List _token) { + public void setToken(List _token) { this._token = _token; } - public List get_telemetry() { + public List getTelemetry() { return _telemetry; } - public void set_telemetry(List _telemetry) { + public void setTelemetry(List _telemetry) { this._telemetry = _telemetry; } - public List get_impressionsCount() { + public List getImpressionsCount() { return _impressionsCount; } - public void set_impressionsCount(List _impressionsCount) { + public void setImpressionsCount(List _impressionsCount) { this._impressionsCount = _impressionsCount; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/LastSynchronization.java b/client/src/main/java/io/split/telemetry/domain/LastSynchronization.java index 59586562e..74889e1f9 100644 --- a/client/src/main/java/io/split/telemetry/domain/LastSynchronization.java +++ b/client/src/main/java/io/split/telemetry/domain/LastSynchronization.java @@ -26,59 +26,59 @@ public class LastSynchronization { @SerializedName(FIELD_TELEMETRY) private long _telemetry; - public long get_splits() { + public long getSplits() { return _splits; } - public void set_splits(long _splits) { + public void setSplits(long _splits) { this._splits = _splits; } - public long get_segments() { + public long getSegments() { return _segments; } - public void set_segments(long _segments) { + public void setSegments(long _segments) { this._segments = _segments; } - public long get_impressions() { + public long getImpressions() { return _impressions; } - public void set_impressions(long _impressions) { + public void setImpressions(long _impressions) { this._impressions = _impressions; } - public long get_events() { + public long getEvents() { return _events; } - public void set_events(long _events) { + public void setEvents(long _events) { this._events = _events; } - public long get_token() { + public long getToken() { return _token; } - public void set_token(long _token) { + public void setToken(long _token) { this._token = _token; } - public long get_telemetry() { + public long getTelemetry() { return _telemetry; } - public void set_telemetry(long _telemetry) { + public void setTelemetry(long _telemetry) { this._telemetry = _telemetry; } - public long get_impressionsCount() { + public long getImpressionsCount() { return _impressionsCount; } - public void set_impressionsCount(long _impressionsCount) { + public void setImpressionsCount(long _impressionsCount) { this._impressionsCount = _impressionsCount; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/MethodExceptions.java b/client/src/main/java/io/split/telemetry/domain/MethodExceptions.java index c6d6561be..c5298e8d5 100644 --- a/client/src/main/java/io/split/telemetry/domain/MethodExceptions.java +++ b/client/src/main/java/io/split/telemetry/domain/MethodExceptions.java @@ -7,6 +7,10 @@ public class MethodExceptions { /* package private */ static final String FIELD_TREATMENTS = "ts"; /* package private */ static final String FIELD_TREATMENT_WITH_CONFIG = "tc"; /* package private */ static final String FIELD_TREATMENTS_WITH_CONFIG = "tcs"; + /* package private */ static final String FIELD_TREATMENT_BY_FLAG_SET = "tf"; + /* package private */static final String FIELD_TREATMENT_BY_FLAG_SETS = "tfs"; + /* package private */static final String FIELD_TREATMENT_WITH_CONFIG_BY_FLAG_SET = "tcf"; + /* package private */static final String FIELD_TREATMENT_WITH_CONFIG_BY_FLAG_SETS = "tcfs"; /* package private */ static final String FIELD_TRACK = "tr"; @SerializedName(FIELD_TREATMENT) @@ -17,46 +21,86 @@ public class MethodExceptions { private long _treatmentWithConfig; @SerializedName(FIELD_TREATMENTS_WITH_CONFIG) private long _treatmentsWithConfig; + @SerializedName(FIELD_TREATMENT_BY_FLAG_SET) + private Long _treatmentByFlagSet; + @SerializedName(FIELD_TREATMENT_BY_FLAG_SETS) + private Long _treatmentByFlagSets; + @SerializedName(FIELD_TREATMENT_WITH_CONFIG_BY_FLAG_SET) + private Long _treatmentWithConfigByFlagSet; + @SerializedName(FIELD_TREATMENT_WITH_CONFIG_BY_FLAG_SETS) + private Long _treatmentWithConfigByFlagSets; @SerializedName(FIELD_TRACK) private long _track; - public long get_treatment() { + public long getTreatment() { return _treatment; } - public void set_treatment(long _treatment) { - this._treatment = _treatment; + public void setTreatment(long treatment) { + this._treatment = treatment; } - public long get_treatments() { + public long getTreatments() { return _treatments; } - public void set_treatments(long _treatments) { - this._treatments = _treatments; + public void setTreatments(long treatments) { + this._treatments = treatments; } - public long get_treatmentsWithConfig() { + public long getTreatmentsWithConfig() { return _treatmentsWithConfig; } - public void set_treatmentsWithConfig(long _treatmentsWithConfig) { - this._treatmentsWithConfig = _treatmentsWithConfig; + public void setTreatmentsWithConfig(long treatmentsWithConfig) { + this._treatmentsWithConfig = treatmentsWithConfig; } - public long get_treatmentWithConfig() { + public long getTreatmentWithConfig() { return _treatmentWithConfig; } - public void set_treatmentWithConfig(long _treatmentWithConfig) { - this._treatmentWithConfig = _treatmentWithConfig; + public void setTreatmentWithConfig(long treatmentWithConfig) { + this._treatmentWithConfig = treatmentWithConfig; } - public long get_track() { + public long getTrack() { return _track; } - public void set_track(long _track) { - this._track = _track; + public void setTrack(long track) { + this._track = track; } -} + + public long getTreatmentByFlagSet() { + return _treatmentByFlagSet; + } + + public long getTreatmentByFlagSets() { + return _treatmentByFlagSets; + } + + public long getTreatmentWithConfigByFlagSet() { + return _treatmentWithConfigByFlagSet; + } + + public long getTreatmentWithConfigByFlagSets() { + return _treatmentWithConfigByFlagSets; + } + + public void setTreatmentByFlagSet(Long treatmentByFlagSet) { + this._treatmentByFlagSet = treatmentByFlagSet; + } + + public void setTreatmentByFlagSets(Long treatmentByFlagSets) { + this._treatmentByFlagSets = treatmentByFlagSets; + } + + public void setTreatmentWithConfigByFlagSet(Long treatmentWithConfigByFlagSet) { + this._treatmentWithConfigByFlagSet = treatmentWithConfigByFlagSet; + } + + public void setTreatmentWithConfigByFlagSets(Long treatmentWithConfigByFlagSets) { + this._treatmentWithConfigByFlagSets = treatmentWithConfigByFlagSets; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/MethodLatencies.java b/client/src/main/java/io/split/telemetry/domain/MethodLatencies.java index 21aae636c..bed8df300 100644 --- a/client/src/main/java/io/split/telemetry/domain/MethodLatencies.java +++ b/client/src/main/java/io/split/telemetry/domain/MethodLatencies.java @@ -10,6 +10,10 @@ public class MethodLatencies { /* package private */ static final String FIELD_TREATMENTS = "ts"; /* package private */ static final String FIELD_TREATMENT_WITH_CONFIG = "tc"; /* package private */ static final String FIELD_TREATMENTS_WITH_CONFIG = "tcs"; + /* package private */ static final String FIELD_TREATMENT_BY_FLAG_SET = "tf"; + /* package private */static final String FIELD_TREATMENT_BY_FLAG_SETS = "tfs"; + /* package private */static final String FIELD_TREATMENT_WITH_CONFIG_BY_FLAG_SET = "tcf"; + /* package private */static final String FIELD_TREATMENT_WITH_CONFIG_BY_FLAG_SETS = "tcfs"; /* package private */ static final String FIELD_TRACK = "tr"; @SerializedName(FIELD_TREATMENT) @@ -20,6 +24,14 @@ public class MethodLatencies { private List _treatmentWithConfig; @SerializedName(FIELD_TREATMENTS_WITH_CONFIG) private List _treatmentsWithConfig; + @SerializedName(FIELD_TREATMENT_BY_FLAG_SET) + private List _treatmentByFlagSet; + @SerializedName(FIELD_TREATMENT_BY_FLAG_SETS) + private List _treatmentByFlagSets; + @SerializedName(FIELD_TREATMENT_WITH_CONFIG_BY_FLAG_SET) + private List _treatmentWithConfigByFlagSet; + @SerializedName(FIELD_TREATMENT_WITH_CONFIG_BY_FLAG_SETS) + private List _treatmentWithConfigByFlagSets; @SerializedName(FIELD_TRACK) private List _track; @@ -28,46 +40,82 @@ public MethodLatencies() { _treatments = new ArrayList<>(); _treatmentWithConfig = new ArrayList<>(); _treatmentsWithConfig = new ArrayList<>(); + _treatmentByFlagSet = new ArrayList<>(); + _treatmentByFlagSets = new ArrayList<>(); + _treatmentWithConfigByFlagSet = new ArrayList<>(); + _treatmentWithConfigByFlagSets = new ArrayList<>(); _track = new ArrayList<>(); } - public List get_treatment() { + public List getTreatment() { return _treatment; } - public void set_treatment(List _treatment) { - this._treatment = _treatment; + public void setTreatment(List treatment) { + this._treatment = treatment; } - public List get_treatments() { + public List getTreatments() { return _treatments; } - public void set_treatments(List _treatments) { - this._treatments = _treatments; + public void setTreatments(List treatments) { + this._treatments = treatments; } - public List get_treatmentsWithConfig() { + public List getTreatmentsWithConfig() { return _treatmentsWithConfig; } - public void set_treatmentsWithConfig(List _treatmentsWithConfig) { - this._treatmentsWithConfig = _treatmentsWithConfig; + public void setTreatmentsWithConfig(List treatmentsWithConfig) { + this._treatmentsWithConfig = treatmentsWithConfig; } - public List get_treatmentWithConfig() { + public List getTreatmentWithConfig() { return _treatmentWithConfig; } - public void set_treatmentWithConfig(List _treatmentWithConfig) { - this._treatmentWithConfig = _treatmentWithConfig; + public void setTreatmentWithConfig(List treatmentWithConfig) { + this._treatmentWithConfig = treatmentWithConfig; } - public List get_track() { + public List getTrack() { return _track; } - public void set_track(List _track) { - this._track = _track; + public void setTrack(List track) { + this._track = track; } -} + + public List getTreatmentByFlagSet() { + return _treatmentByFlagSet; + } + + public List getTreatmentByFlagSets() { + return _treatmentByFlagSets; + } + + public void setTreatmentByFlagSet(List treatmentByFlagSet) { + this._treatmentByFlagSet = treatmentByFlagSet; + } + + public void setTreatmentByFlagSets(List treatmentByFlagSets) { + this._treatmentByFlagSets = treatmentByFlagSets; + } + + public List getTreatmentWithConfigByFlagSet() { + return _treatmentWithConfigByFlagSet; + } + + public void setTreatmentWithConfigByFlagSet(List treatmentWithConfigByFlagSet) { + this._treatmentWithConfigByFlagSet = treatmentWithConfigByFlagSet; + } + + public void setTreatmentWithConfigByFlagSets(List treatmentWithConfigByFlagSets) { + this._treatmentWithConfigByFlagSets = treatmentWithConfigByFlagSets; + } + + public List getTreatmentWithConfigByFlagSets() { + return _treatmentWithConfigByFlagSets; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/Rates.java b/client/src/main/java/io/split/telemetry/domain/Rates.java index e80d26079..0ae5efdac 100644 --- a/client/src/main/java/io/split/telemetry/domain/Rates.java +++ b/client/src/main/java/io/split/telemetry/domain/Rates.java @@ -20,43 +20,43 @@ public class Rates { @SerializedName(FIELD_TELEMETRY) private long _telemetry; - public long get_splits() { + public long getSplits() { return _splits; } - public void set_splits(long _splits) { + public void setSplits(long _splits) { this._splits = _splits; } - public long get_segments() { + public long getSegments() { return _segments; } - public void set_segments(long _segments) { + public void setSegments(long _segments) { this._segments = _segments; } - public long get_impressions() { + public long getImpressions() { return _impressions; } - public void set_impressions(long _impressions) { + public void setImpressions(long _impressions) { this._impressions = _impressions; } - public long get_events() { + public long getEvents() { return _events; } - public void set_events(long _events) { + public void setEvents(long _events) { this._events = _events; } - public long get_telemetry() { + public long getTelemetry() { return _telemetry; } - public void set_telemetry(long _telemetry) { + public void setTelemetry(long _telemetry) { this._telemetry = _telemetry; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/Stats.java b/client/src/main/java/io/split/telemetry/domain/Stats.java index 8baec7261..55882cdc9 100644 --- a/client/src/main/java/io/split/telemetry/domain/Stats.java +++ b/client/src/main/java/io/split/telemetry/domain/Stats.java @@ -23,6 +23,7 @@ public class Stats { /* package private */ static final String FIELD_EVENTS_DROPPED = "eD"; /* package private */ static final String FIELD_STREAMING_EVENT = "sE"; /* package private */ static final String FIELD_TAGS = "t"; + /* package private */ static final String FIELD_UPDATES_FROM_SSE = "ufs"; @SerializedName(FIELD_LAST_SYNCHRONIZATION) private LastSynchronization _lastSynchronization; @@ -60,148 +61,158 @@ public class Stats { private List _streamingEvents; @SerializedName(FIELD_TAGS) private List _tags; + @SerializedName(FIELD_UPDATES_FROM_SSE) + private UpdatesFromSSE _updatesFromSSE; - public LastSynchronization get_lastSynchronization() { + public LastSynchronization getLastSynchronization() { return _lastSynchronization; } - public void set_lastSynchronization(LastSynchronization _lastSynchronization) { - this._lastSynchronization = _lastSynchronization; + public void setLastSynchronization(LastSynchronization lastSynchronization) { + this._lastSynchronization = lastSynchronization; } - public MethodLatencies get_methodLatencies() { + public MethodLatencies getMethodLatencies() { return _methodLatencies; } - public void set_methodLatencies(MethodLatencies _methodLatencies) { - this._methodLatencies = _methodLatencies; + public void setMethodLatencies(MethodLatencies methodLatencies) { + this._methodLatencies = methodLatencies; } - public MethodExceptions get_methodExceptions() { + public MethodExceptions getMethodExceptions() { return _methodExceptions; } - public void set_methodExceptions(MethodExceptions _methodExceptions) { - this._methodExceptions = _methodExceptions; + public void setMethodExceptions(MethodExceptions methodExceptions) { + this._methodExceptions = methodExceptions; } - public HTTPErrors get_httpErrors() { + public HTTPErrors getHttpErrors() { return _httpErrors; } - public void set_httpErrors(HTTPErrors _httpErrors) { - this._httpErrors = _httpErrors; + public void setHttpErrors(HTTPErrors httpErrors) { + this._httpErrors = httpErrors; } - public HTTPLatencies get_httpLatencies() { + public HTTPLatencies getHttpLatencies() { return _httpLatencies; } - public void set_httpLatencies(HTTPLatencies _httpLatencies) { - this._httpLatencies = _httpLatencies; + public void setHttpLatencies(HTTPLatencies httpLatencies) { + this._httpLatencies = httpLatencies; } - public long get_tokenRefreshes() { + public long getTokenRefreshes() { return _tokenRefreshes; } - public void set_tokenRefreshes(long _tokenRefreshes) { - this._tokenRefreshes = _tokenRefreshes; + public void setTokenRefreshes(long tokenRefreshes) { + this._tokenRefreshes = tokenRefreshes; } - public long get_authRejections() { + public long getAuthRejections() { return _authRejections; } - public void set_authRejections(long _authRejections) { - this._authRejections = _authRejections; + public void setAuthRejections(long authRejections) { + this._authRejections = authRejections; } - public long get_impressionsQueued() { + public long getImpressionsQueued() { return _impressionsQueued; } - public void set_impressionsQueued(long _impressionsQueued) { - this._impressionsQueued = _impressionsQueued; + public void setImpressionsQueued(long impressionsQueued) { + this._impressionsQueued = impressionsQueued; } - public long get_impressionsDeduped() { + public long getImpressionsDeduped() { return _impressionsDeduped; } - public void set_impressionsDeduped(long _impressionsDeduped) { - this._impressionsDeduped = _impressionsDeduped; + public void setImpressionsDeduped(long impressionsDeduped) { + this._impressionsDeduped = impressionsDeduped; } - public long get_impressionsDropped() { + public long getImpressionsDropped() { return _impressionsDropped; } - public void set_impressionsDropped(long _impressionsDropped) { - this._impressionsDropped = _impressionsDropped; + public void setImpressionsDropped(long impressionsDropped) { + this._impressionsDropped = impressionsDropped; } - public long get_splitCount() { + public long getSplitCount() { return _splitCount; } - public void set_splitCount(long _splitCount) { - this._splitCount = _splitCount; + public void setSplitCount(long splitCount) { + this._splitCount = splitCount; } - public long get_segmentCount() { + public long getSegmentCount() { return _segmentCount; } - public void set_segmentCount(long _segmentCount) { - this._segmentCount = _segmentCount; + public void setSegmentCount(long segmentCount) { + this._segmentCount = segmentCount; } - public long get_segmentKeyCount() { + public long getSegmentKeyCount() { return _segmentKeyCount; } - public void set_segmentKeyCount(long _segmentKeyCount) { - this._segmentKeyCount = _segmentKeyCount; + public void setSegmentKeyCount(long segmentKeyCount) { + this._segmentKeyCount = segmentKeyCount; } - public long get_sessionLengthMs() { + public long getSessionLengthMs() { return _sessionLengthMs; } - public void set_sessionLengthMs(long _sessionLengthMs) { - this._sessionLengthMs = _sessionLengthMs; + public void setSessionLengthMs(long sessionLengthMs) { + this._sessionLengthMs = sessionLengthMs; } - public long get_eventsQueued() { + public long getEventsQueued() { return _eventsQueued; } - public void set_eventsQueued(long _eventsQueued) { - this._eventsQueued = _eventsQueued; + public void setEventsQueued(long eventsQueued) { + this._eventsQueued = eventsQueued; } - public long get_eventsDropped() { + public long getEventsDropped() { return _eventsDropped; } - public void set_eventsDropped(long _eventsDropped) { - this._eventsDropped = _eventsDropped; + public void setEventsDropped(long eventsDropped) { + this._eventsDropped = eventsDropped; } - public List get_streamingEvents() { + public List getStreamingEvents() { return _streamingEvents; } - public void set_streamingEvents(List _streamingEvents) { - this._streamingEvents = _streamingEvents; + public void setStreamingEvents(List streamingEvents) { + this._streamingEvents = streamingEvents; } - public List get_tags() { + public List getTags() { return _tags; } - public void set_tags(List _tags) { - this._tags = _tags; + public void setTags(List tags) { + this._tags = tags; } -} + + public UpdatesFromSSE getUpdatesFromSSE() { + return _updatesFromSSE; + } + + public void setUpdatesFromSSE(UpdatesFromSSE updatesFromSSE) { + this._updatesFromSSE = updatesFromSSE; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/StreamingEvent.java b/client/src/main/java/io/split/telemetry/domain/StreamingEvent.java index 161b29762..9fd3ae568 100644 --- a/client/src/main/java/io/split/telemetry/domain/StreamingEvent.java +++ b/client/src/main/java/io/split/telemetry/domain/StreamingEvent.java @@ -20,19 +20,19 @@ public StreamingEvent(int _type, long _data, long _timestamp) { this._timestamp = _timestamp; } - public int get_type() { + public int getType() { return _type; } - public void set_type(int _type) { + public void setType(int _type) { this._type = _type; } - public long get_data() { + public long getData() { return _data; } - public void set_data(long _data) { + public void setData(long _data) { this._data = _data; } @@ -43,4 +43,4 @@ public long getTimestamp() { public void setTimestamp(long timestamp) { this._timestamp = timestamp; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/URLOverrides.java b/client/src/main/java/io/split/telemetry/domain/URLOverrides.java index 5813f1d6c..587fdf3dd 100644 --- a/client/src/main/java/io/split/telemetry/domain/URLOverrides.java +++ b/client/src/main/java/io/split/telemetry/domain/URLOverrides.java @@ -20,43 +20,43 @@ public class URLOverrides { @SerializedName(FIELD_TELEMETRY) private boolean _telemetry; - public boolean is_sdk() { + public boolean isSdk() { return _sdk; } - public void set_sdk(boolean _sdk) { + public void setSdk(boolean _sdk) { this._sdk = _sdk; } - public boolean is_events() { + public boolean isEvents() { return _events; } - public void set_events(boolean _events) { + public void setEvents(boolean _events) { this._events = _events; } - public boolean is_auth() { + public boolean isAuth() { return _auth; } - public void set_auth(boolean _auth) { + public void setAuth(boolean _auth) { this._auth = _auth; } - public boolean is_stream() { + public boolean isStream() { return _stream; } - public void set_stream(boolean _stream) { + public void setStream(boolean _stream) { this._stream = _stream; } - public boolean is_telemetry() { + public boolean isTelemetry() { return _telemetry; } - public void set_telemetry(boolean _telemetry) { + public void setTelemetry(boolean _telemetry) { this._telemetry = _telemetry; } -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/UpdatesFromSSE.java b/client/src/main/java/io/split/telemetry/domain/UpdatesFromSSE.java new file mode 100644 index 000000000..1f2238f9c --- /dev/null +++ b/client/src/main/java/io/split/telemetry/domain/UpdatesFromSSE.java @@ -0,0 +1,19 @@ +package io.split.telemetry.domain; + +import com.google.gson.annotations.SerializedName; + +public class UpdatesFromSSE { + + /* package private */ static final String FIELD_FEATURE_FLAGS = "sp"; + + @SerializedName(FIELD_FEATURE_FLAGS) + private long splits; + + public long getSplits() { + return splits; + } + + public void setSplits(long splits) { + this.splits = splits; + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/domain/enums/MethodEnum.java b/client/src/main/java/io/split/telemetry/domain/enums/MethodEnum.java index 8f99527f2..27b72dbfc 100644 --- a/client/src/main/java/io/split/telemetry/domain/enums/MethodEnum.java +++ b/client/src/main/java/io/split/telemetry/domain/enums/MethodEnum.java @@ -5,6 +5,10 @@ public enum MethodEnum { TREATMENTS("getTreatments"), TREATMENT_WITH_CONFIG("getTreatmentWithConfig"), TREATMENTS_WITH_CONFIG("getTreatmentsWithConfig"), + TREATMENTS_BY_FLAG_SET("getTreatmentsByFlagSet"), + TREATMENTS_BY_FLAG_SETS("getTreatmentsByFlagSets"), + TREATMENTS_WITH_CONFIG_BY_FLAG_SET("getTreatmentsWithConfigByFlagSet"), + TREATMENTS_WITH_CONFIG_BY_FLAG_SETS("getTreatmentsWithConfigByFlagSets"), TRACK("track"); private String _method; diff --git a/client/src/main/java/io/split/telemetry/domain/enums/UpdatesFromSSEEnum.java b/client/src/main/java/io/split/telemetry/domain/enums/UpdatesFromSSEEnum.java new file mode 100644 index 000000000..eceb872e6 --- /dev/null +++ b/client/src/main/java/io/split/telemetry/domain/enums/UpdatesFromSSEEnum.java @@ -0,0 +1,5 @@ +package io.split.telemetry.domain.enums; + +public enum UpdatesFromSSEEnum { + SPLITS +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/storage/InMemoryTelemetryStorage.java b/client/src/main/java/io/split/telemetry/storage/InMemoryTelemetryStorage.java index 0e1ff5eb0..6958f110a 100644 --- a/client/src/main/java/io/split/telemetry/storage/InMemoryTelemetryStorage.java +++ b/client/src/main/java/io/split/telemetry/storage/InMemoryTelemetryStorage.java @@ -1,8 +1,24 @@ package io.split.telemetry.storage; import com.google.common.collect.Maps; -import io.split.telemetry.domain.*; -import io.split.telemetry.domain.enums.*; + +import io.split.telemetry.domain.HTTPErrors; +import io.split.telemetry.domain.HTTPLatencies; +import io.split.telemetry.domain.LastSynchronization; +import io.split.telemetry.domain.MethodExceptions; +import io.split.telemetry.domain.MethodLatencies; +import io.split.telemetry.domain.StreamingEvent; +import io.split.telemetry.domain.UpdatesFromSSE; +import io.split.telemetry.domain.enums.EventsDataRecordsEnum; +import io.split.telemetry.domain.enums.FactoryCountersEnum; +import io.split.telemetry.domain.enums.HTTPLatenciesEnum; +import io.split.telemetry.domain.enums.ImpressionsDataTypeEnum; +import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; +import io.split.telemetry.domain.enums.MethodEnum; +import io.split.telemetry.domain.enums.PushCountersEnum; +import io.split.telemetry.domain.enums.ResourceEnum; +import io.split.telemetry.domain.enums.SdkRecordsEnum; +import io.split.telemetry.domain.enums.UpdatesFromSSEEnum; import io.split.telemetry.utils.AtomicLongArray; import io.split.telemetry.utils.BucketCalculator; @@ -12,9 +28,8 @@ import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; -public class InMemoryTelemetryStorage implements TelemetryStorage{ +public class InMemoryTelemetryStorage implements TelemetryStorage{ public static final int MAX_LATENCY_BUCKET_COUNT = 23; public static final int MAX_STREAMING_EVENTS = 20; public static final int MAX_TAGS = 10; @@ -33,6 +48,7 @@ public class InMemoryTelemetryStorage implements TelemetryStorage{ private final ConcurrentMap _eventsDataRecords = Maps.newConcurrentMap(); private final ConcurrentMap _lastSynchronizationRecords = Maps.newConcurrentMap(); private final ConcurrentMap _sdkRecords = Maps.newConcurrentMap(); + private final ConcurrentMap _updatesFromSSERecords = Maps.newConcurrentMap(); //HTTPErrors private final ConcurrentMap> _httpErrors = Maps.newConcurrentMap(); @@ -56,6 +72,7 @@ public InMemoryTelemetryStorage() { initSdkRecords(); initLastSynchronizationRecords(); initEventDataRecords(); + initUpdatesFromSEE(); } @Override @@ -71,11 +88,15 @@ public long getNonReadyUsages() { @Override public MethodExceptions popExceptions() { MethodExceptions exceptions = new MethodExceptions(); - exceptions.set_treatment(_exceptionsCounters.get(MethodEnum.TREATMENT).getAndSet(0L)); - exceptions.set_treatments(_exceptionsCounters.get(MethodEnum.TREATMENTS).getAndSet(0L)); - exceptions.set_treatmentWithConfig(_exceptionsCounters.get(MethodEnum.TREATMENT_WITH_CONFIG).getAndSet(0L)); - exceptions.set_treatmentsWithConfig(_exceptionsCounters.get(MethodEnum.TREATMENTS_WITH_CONFIG).getAndSet(0L)); - exceptions.set_track(_exceptionsCounters.get(MethodEnum.TRACK).getAndSet(0L)); + exceptions.setTreatment(_exceptionsCounters.get(MethodEnum.TREATMENT).getAndSet(0L)); + exceptions.setTreatments(_exceptionsCounters.get(MethodEnum.TREATMENTS).getAndSet(0L)); + exceptions.setTreatmentWithConfig(_exceptionsCounters.get(MethodEnum.TREATMENT_WITH_CONFIG).getAndSet(0L)); + exceptions.setTreatmentsWithConfig(_exceptionsCounters.get(MethodEnum.TREATMENTS_WITH_CONFIG).getAndSet(0L)); + exceptions.setTreatmentByFlagSet(_exceptionsCounters.get(MethodEnum.TREATMENTS_BY_FLAG_SET).getAndSet(0L)); + exceptions.setTreatmentByFlagSets(_exceptionsCounters.get(MethodEnum.TREATMENTS_BY_FLAG_SETS).getAndSet(0L)); + exceptions.setTreatmentWithConfigByFlagSet(_exceptionsCounters.get(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET).getAndSet(0L)); + exceptions.setTreatmentWithConfigByFlagSets(_exceptionsCounters.get(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS).getAndSet(0L)); + exceptions.setTrack(_exceptionsCounters.get(MethodEnum.TRACK).getAndSet(0L)); return exceptions; } @@ -83,11 +104,15 @@ public MethodExceptions popExceptions() { @Override public MethodLatencies popLatencies() { MethodLatencies latencies = new MethodLatencies(); - latencies.set_treatment(_methodLatencies.get(MethodEnum.TREATMENT).fetchAndClearAll()); - latencies.set_treatments(_methodLatencies.get(MethodEnum.TREATMENTS).fetchAndClearAll()); - latencies.set_treatmentWithConfig(_methodLatencies.get(MethodEnum.TREATMENT_WITH_CONFIG).fetchAndClearAll()); - latencies.set_treatmentsWithConfig(_methodLatencies.get(MethodEnum.TREATMENTS_WITH_CONFIG).fetchAndClearAll()); - latencies.set_track(_methodLatencies.get(MethodEnum.TRACK).fetchAndClearAll()); + latencies.setTreatment(_methodLatencies.get(MethodEnum.TREATMENT).fetchAndClearAll()); + latencies.setTreatments(_methodLatencies.get(MethodEnum.TREATMENTS).fetchAndClearAll()); + latencies.setTreatmentWithConfig(_methodLatencies.get(MethodEnum.TREATMENT_WITH_CONFIG).fetchAndClearAll()); + latencies.setTreatmentsWithConfig(_methodLatencies.get(MethodEnum.TREATMENTS_WITH_CONFIG).fetchAndClearAll()); + latencies.setTreatmentByFlagSet(_methodLatencies.get(MethodEnum.TREATMENTS_BY_FLAG_SET).fetchAndClearAll()); + latencies.setTreatmentByFlagSets(_methodLatencies.get(MethodEnum.TREATMENTS_BY_FLAG_SETS).fetchAndClearAll()); + latencies.setTreatmentWithConfigByFlagSet(_methodLatencies.get(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET).fetchAndClearAll()); + latencies.setTreatmentWithConfigByFlagSets(_methodLatencies.get(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS).fetchAndClearAll()); + latencies.setTrack(_methodLatencies.get(MethodEnum.TRACK).fetchAndClearAll()); return latencies; } @@ -127,13 +152,13 @@ public long getEventStats(EventsDataRecordsEnum dataType) { @Override public LastSynchronization getLastSynchronization() { LastSynchronization lastSynchronization = new LastSynchronization(); - lastSynchronization.set_splits(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.SPLITS).get()); - lastSynchronization.set_segments(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.SEGMENTS).get()); - lastSynchronization.set_impressions(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.IMPRESSIONS).get()); - lastSynchronization.set_impressionsCount(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.IMPRESSIONS_COUNT).get()); - lastSynchronization.set_events(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.EVENTS).get()); - lastSynchronization.set_telemetry(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.TELEMETRY).get()); - lastSynchronization.set_token(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.TOKEN).get()); + lastSynchronization.setSplits(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.SPLITS).get()); + lastSynchronization.setSegments(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.SEGMENTS).get()); + lastSynchronization.setImpressions(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.IMPRESSIONS).get()); + lastSynchronization.setImpressionsCount(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.IMPRESSIONS_COUNT).get()); + lastSynchronization.setEvents(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.EVENTS).get()); + lastSynchronization.setTelemetry(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.TELEMETRY).get()); + lastSynchronization.setToken(_lastSynchronizationRecords.get(LastSynchronizationRecordsEnum.TOKEN).get()); return lastSynchronization; } @@ -141,13 +166,13 @@ public LastSynchronization getLastSynchronization() { @Override public HTTPErrors popHTTPErrors() { HTTPErrors errors = new HTTPErrors(); - errors.set_splits(_httpErrors.get(ResourceEnum.SPLIT_SYNC)); - errors.set_segments(_httpErrors.get(ResourceEnum.SEGMENT_SYNC)); - errors.set_impressions(_httpErrors.get(ResourceEnum.IMPRESSION_SYNC)); - errors.set_impressionsCount(_httpErrors.get(ResourceEnum.IMPRESSION_COUNT_SYNC)); - errors.set_events(_httpErrors.get(ResourceEnum.EVENT_SYNC)); - errors.set_telemetry(_httpErrors.get(ResourceEnum.TELEMETRY_SYNC)); - errors.set_token(_httpErrors.get(ResourceEnum.TOKEN_SYNC)); + errors.setSplits(_httpErrors.get(ResourceEnum.SPLIT_SYNC)); + errors.setSegments(_httpErrors.get(ResourceEnum.SEGMENT_SYNC)); + errors.setImpressions(_httpErrors.get(ResourceEnum.IMPRESSION_SYNC)); + errors.setImpressionsCount(_httpErrors.get(ResourceEnum.IMPRESSION_COUNT_SYNC)); + errors.setEvents(_httpErrors.get(ResourceEnum.EVENT_SYNC)); + errors.setTelemetry(_httpErrors.get(ResourceEnum.TELEMETRY_SYNC)); + errors.setToken(_httpErrors.get(ResourceEnum.TOKEN_SYNC)); _httpErrors.clear(); initHttpErrors(); @@ -158,13 +183,13 @@ public HTTPErrors popHTTPErrors() { @Override public HTTPLatencies popHTTPLatencies(){ HTTPLatencies latencies = new HTTPLatencies(); - latencies.set_splits(_httpLatencies.get(HTTPLatenciesEnum.SPLITS).fetchAndClearAll()); - latencies.set_segments(_httpLatencies.get(HTTPLatenciesEnum.SEGMENTS).fetchAndClearAll()); - latencies.set_impressions(_httpLatencies.get(HTTPLatenciesEnum.IMPRESSIONS).fetchAndClearAll()); - latencies.set_impressionsCount(_httpLatencies.get(HTTPLatenciesEnum.IMPRESSIONS_COUNT).fetchAndClearAll()); - latencies.set_events(_httpLatencies.get(HTTPLatenciesEnum.EVENTS).fetchAndClearAll()); - latencies.set_telemetry(_httpLatencies.get(HTTPLatenciesEnum.TELEMETRY).fetchAndClearAll()); - latencies.set_token(_httpLatencies.get(HTTPLatenciesEnum.TOKEN).fetchAndClearAll()); + latencies.setSplits(_httpLatencies.get(HTTPLatenciesEnum.SPLITS).fetchAndClearAll()); + latencies.setSegments(_httpLatencies.get(HTTPLatenciesEnum.SEGMENTS).fetchAndClearAll()); + latencies.setImpressions(_httpLatencies.get(HTTPLatenciesEnum.IMPRESSIONS).fetchAndClearAll()); + latencies.setImpressionsCount(_httpLatencies.get(HTTPLatenciesEnum.IMPRESSIONS_COUNT).fetchAndClearAll()); + latencies.setEvents(_httpLatencies.get(HTTPLatenciesEnum.EVENTS).fetchAndClearAll()); + latencies.setTelemetry(_httpLatencies.get(HTTPLatenciesEnum.TELEMETRY).fetchAndClearAll()); + latencies.setToken(_httpLatencies.get(HTTPLatenciesEnum.TOKEN).fetchAndClearAll()); return latencies; } @@ -210,6 +235,13 @@ public long getSessionLength() { return _sdkRecords.get(SdkRecordsEnum.SESSION).get(); } + @Override + public UpdatesFromSSE popUpdatesFromSSE() { + UpdatesFromSSE updatesFromSSE = new UpdatesFromSSE(); + updatesFromSSE.setSplits(_updatesFromSSERecords.get(UpdatesFromSSEEnum.SPLITS).getAndSet(0L)); + return updatesFromSSE; + } + @Override public void addTag(String tag) { synchronized (_tagsLock) { @@ -272,11 +304,20 @@ public void recordSessionLength(long sessionLength) { _sdkRecords.replace(SdkRecordsEnum.SESSION, new AtomicLong(sessionLength)); } + @Override + public void recordUpdatesFromSSE(UpdatesFromSSEEnum updatesFromSSEEnum) { + _updatesFromSSERecords.get(UpdatesFromSSEEnum.SPLITS).incrementAndGet(); + } + private void initMethodLatencies() { _methodLatencies.put(MethodEnum.TREATMENT, new AtomicLongArray(MAX_LATENCY_BUCKET_COUNT)); _methodLatencies.put(MethodEnum.TREATMENTS, new AtomicLongArray(MAX_LATENCY_BUCKET_COUNT)); _methodLatencies.put(MethodEnum.TREATMENT_WITH_CONFIG, new AtomicLongArray(MAX_LATENCY_BUCKET_COUNT)); _methodLatencies.put(MethodEnum.TREATMENTS_WITH_CONFIG, new AtomicLongArray(MAX_LATENCY_BUCKET_COUNT)); + _methodLatencies.put(MethodEnum.TREATMENTS_BY_FLAG_SET, new AtomicLongArray(MAX_LATENCY_BUCKET_COUNT)); + _methodLatencies.put(MethodEnum.TREATMENTS_BY_FLAG_SETS, new AtomicLongArray(MAX_LATENCY_BUCKET_COUNT)); + _methodLatencies.put(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET, new AtomicLongArray(MAX_LATENCY_BUCKET_COUNT)); + _methodLatencies.put(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, new AtomicLongArray(MAX_LATENCY_BUCKET_COUNT)); _methodLatencies.put(MethodEnum.TRACK, new AtomicLongArray(MAX_LATENCY_BUCKET_COUNT)); } @@ -305,6 +346,10 @@ private void initMethodExceptions() { _exceptionsCounters.put(MethodEnum.TREATMENTS, new AtomicLong()); _exceptionsCounters.put(MethodEnum.TREATMENT_WITH_CONFIG, new AtomicLong()); _exceptionsCounters.put(MethodEnum.TREATMENTS_WITH_CONFIG, new AtomicLong()); + _exceptionsCounters.put(MethodEnum.TREATMENTS_BY_FLAG_SET, new AtomicLong()); + _exceptionsCounters.put(MethodEnum.TREATMENTS_BY_FLAG_SETS, new AtomicLong()); + _exceptionsCounters.put(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET, new AtomicLong()); + _exceptionsCounters.put(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, new AtomicLong()); _exceptionsCounters.put(MethodEnum.TRACK, new AtomicLong()); } @@ -342,4 +387,8 @@ private void initEventDataRecords() { _eventsDataRecords.put(EventsDataRecordsEnum.EVENTS_DROPPED, new AtomicLong()); _eventsDataRecords.put(EventsDataRecordsEnum.EVENTS_QUEUED, new AtomicLong()); } -} + + private void initUpdatesFromSEE() { + _updatesFromSSERecords.put(UpdatesFromSSEEnum.SPLITS, new AtomicLong()); + } +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/storage/NoopTelemetryStorage.java b/client/src/main/java/io/split/telemetry/storage/NoopTelemetryStorage.java index 3673d0c0a..d18134a8a 100644 --- a/client/src/main/java/io/split/telemetry/storage/NoopTelemetryStorage.java +++ b/client/src/main/java/io/split/telemetry/storage/NoopTelemetryStorage.java @@ -1,7 +1,19 @@ package io.split.telemetry.storage; -import io.split.telemetry.domain.*; -import io.split.telemetry.domain.enums.*; +import io.split.telemetry.domain.HTTPErrors; +import io.split.telemetry.domain.HTTPLatencies; +import io.split.telemetry.domain.LastSynchronization; +import io.split.telemetry.domain.MethodExceptions; +import io.split.telemetry.domain.MethodLatencies; +import io.split.telemetry.domain.StreamingEvent; +import io.split.telemetry.domain.UpdatesFromSSE; +import io.split.telemetry.domain.enums.EventsDataRecordsEnum; +import io.split.telemetry.domain.enums.HTTPLatenciesEnum; +import io.split.telemetry.domain.enums.ImpressionsDataTypeEnum; +import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; +import io.split.telemetry.domain.enums.MethodEnum; +import io.split.telemetry.domain.enums.ResourceEnum; +import io.split.telemetry.domain.enums.UpdatesFromSSEEnum; import java.util.List; @@ -77,6 +89,11 @@ public void recordSessionLength(long sessionLength) { } + @Override + public void recordUpdatesFromSSE(UpdatesFromSSEEnum updatesFromSSEEnum) { + + } + @Override public long getBURTimeouts() { return 0; @@ -146,4 +163,9 @@ public List popTags() { public long getSessionLength() { return 0; } + + @Override + public UpdatesFromSSE popUpdatesFromSSE() { + return null; + } } diff --git a/client/src/main/java/io/split/telemetry/storage/TelemetryRuntimeConsumer.java b/client/src/main/java/io/split/telemetry/storage/TelemetryRuntimeConsumer.java index 6a746e783..8be689989 100644 --- a/client/src/main/java/io/split/telemetry/storage/TelemetryRuntimeConsumer.java +++ b/client/src/main/java/io/split/telemetry/storage/TelemetryRuntimeConsumer.java @@ -4,6 +4,7 @@ import io.split.telemetry.domain.HTTPLatencies; import io.split.telemetry.domain.LastSynchronization; import io.split.telemetry.domain.StreamingEvent; +import io.split.telemetry.domain.UpdatesFromSSE; import io.split.telemetry.domain.enums.EventsDataRecordsEnum; import io.split.telemetry.domain.enums.ImpressionsDataTypeEnum; @@ -20,4 +21,5 @@ public interface TelemetryRuntimeConsumer { List popStreamingEvents(); List popTags(); long getSessionLength(); + UpdatesFromSSE popUpdatesFromSSE(); } diff --git a/client/src/main/java/io/split/telemetry/storage/TelemetryRuntimeProducer.java b/client/src/main/java/io/split/telemetry/storage/TelemetryRuntimeProducer.java index 2baf016f0..789562871 100644 --- a/client/src/main/java/io/split/telemetry/storage/TelemetryRuntimeProducer.java +++ b/client/src/main/java/io/split/telemetry/storage/TelemetryRuntimeProducer.java @@ -1,6 +1,7 @@ package io.split.telemetry.storage; import io.split.telemetry.domain.StreamingEvent; +import io.split.telemetry.domain.UpdatesFromSSE; import io.split.telemetry.domain.enums.*; public interface TelemetryRuntimeProducer { @@ -14,4 +15,5 @@ public interface TelemetryRuntimeProducer { void recordTokenRefreshes(); void recordStreamingEvents(StreamingEvent streamingEvent); void recordSessionLength(long sessionLength); -} + void recordUpdatesFromSSE(UpdatesFromSSEEnum updatesFromSSEEnum); +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/synchronizer/HttpTelemetryMemorySender.java b/client/src/main/java/io/split/telemetry/synchronizer/HttpTelemetryMemorySender.java index cc56493ee..4388eecaa 100644 --- a/client/src/main/java/io/split/telemetry/synchronizer/HttpTelemetryMemorySender.java +++ b/client/src/main/java/io/split/telemetry/synchronizer/HttpTelemetryMemorySender.java @@ -1,47 +1,59 @@ package io.split.telemetry.synchronizer; import com.google.common.annotations.VisibleForTesting; +import io.split.client.dtos.UniqueKeys; import io.split.client.utils.Utils; import io.split.service.HttpPostImp; +import io.split.service.SplitHttpClient; import io.split.telemetry.domain.Config; import io.split.telemetry.domain.Stats; -import io.split.telemetry.domain.enums.HTTPLatenciesEnum; import io.split.telemetry.domain.enums.HttpParamsWrapper; -import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; -import io.split.telemetry.domain.enums.ResourceEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.net.URI; import java.net.URISyntaxException; public class HttpTelemetryMemorySender{ + private static final Logger _log = LoggerFactory.getLogger(HttpTelemetryMemorySender.class); + private static final String CONFIG_ENDPOINT_PATH = "metrics/config"; private static final String STATS_ENDPOINT_PATH = "metrics/usage"; + private static final String UNIQUE_KEYS_ENDPOINT_PATH = "keys/ss"; private static final String CONFIG_METRICS = "Config metrics "; private static final String STATS_METRICS = "Stats metrics "; + private static final String UNIQUE_KEYS_METRICS = "Unique keys metrics "; private final URI _impressionConfigTarget; private final URI _impressionStatsTarget; + private final URI _uniqueKeysTarget; private final HttpPostImp _httpPost; - public static HttpTelemetryMemorySender create(CloseableHttpClient client, URI telemetryRootEndpoint, TelemetryRuntimeProducer telemetryRuntimeProducer) throws URISyntaxException { + public static HttpTelemetryMemorySender create(SplitHttpClient client, URI telemetryRootEndpoint, + TelemetryRuntimeProducer telemetryRuntimeProducer) throws URISyntaxException { return new HttpTelemetryMemorySender(client, Utils.appendPath(telemetryRootEndpoint,CONFIG_ENDPOINT_PATH), Utils.appendPath(telemetryRootEndpoint, STATS_ENDPOINT_PATH), + Utils.appendPath(telemetryRootEndpoint, UNIQUE_KEYS_ENDPOINT_PATH), telemetryRuntimeProducer ); } @VisibleForTesting - HttpTelemetryMemorySender(CloseableHttpClient client, URI impressionConfigTarget, URI impressionStatsTarget, TelemetryRuntimeProducer telemetryRuntimeProducer) { + HttpTelemetryMemorySender(SplitHttpClient client, URI impressionConfigTarget, URI impressionStatsTarget, + URI uniqueKeysTarget,TelemetryRuntimeProducer telemetryRuntimeProducer) { _httpPost = new HttpPostImp(client, telemetryRuntimeProducer); _impressionConfigTarget = impressionConfigTarget; _impressionStatsTarget = impressionStatsTarget; + _uniqueKeysTarget = uniqueKeysTarget; } public void postConfig(Config config) { + if (_log.isDebugEnabled()) { + _log.debug("Sending init telemetry"); + } _httpPost.post(_impressionConfigTarget, config, CONFIG_METRICS, HttpParamsWrapper.TELEMETRY); } @@ -49,4 +61,7 @@ public void postStats(Stats stats) { _httpPost.post(_impressionStatsTarget, stats, STATS_METRICS, HttpParamsWrapper.TELEMETRY); } + public void postUniqueKeys(UniqueKeys uniqueKeys) { + _httpPost.post(_uniqueKeysTarget, uniqueKeys, UNIQUE_KEYS_METRICS, HttpParamsWrapper.TELEMETRY); + } } diff --git a/client/src/main/java/io/split/telemetry/synchronizer/TelemetryInMemorySubmitter.java b/client/src/main/java/io/split/telemetry/synchronizer/TelemetryInMemorySubmitter.java index 81843d90a..5559332a1 100644 --- a/client/src/main/java/io/split/telemetry/synchronizer/TelemetryInMemorySubmitter.java +++ b/client/src/main/java/io/split/telemetry/synchronizer/TelemetryInMemorySubmitter.java @@ -2,10 +2,12 @@ import com.google.common.annotations.VisibleForTesting; import io.split.client.SplitClientConfig; +import io.split.client.dtos.UniqueKeys; import io.split.client.impressions.ImpressionListener; import io.split.client.impressions.ImpressionsManager; import io.split.integrations.IntegrationsConfig; import io.split.integrations.NewRelicListener; +import io.split.service.SplitHttpClient; import io.split.storages.SegmentCacheConsumer; import io.split.storages.SplitCacheConsumer; import io.split.telemetry.domain.Config; @@ -16,7 +18,6 @@ import io.split.telemetry.domain.enums.ImpressionsDataTypeEnum; import io.split.telemetry.storage.TelemetryRuntimeProducer; import io.split.telemetry.storage.TelemetryStorageConsumer; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import java.net.URI; import java.net.URISyntaxException; @@ -32,15 +33,16 @@ public class TelemetryInMemorySubmitter implements TelemetrySynchronizer{ private static final String STORAGE = "memory"; private HttpTelemetryMemorySender _httpHttpTelemetryMemorySender; - private TelemetryStorageConsumer _teleTelemetryStorageConsumer; + private TelemetryStorageConsumer _telemetryStorageConsumer; private SplitCacheConsumer _splitCacheConsumer; private SegmentCacheConsumer _segmentCacheConsumer; private final long _initStartTime; - public TelemetryInMemorySubmitter(CloseableHttpClient client, URI telemetryRootEndpoint, TelemetryStorageConsumer telemetryStorageConsumer, SplitCacheConsumer splitCacheConsumer, - SegmentCacheConsumer segmentCacheConsumer, TelemetryRuntimeProducer telemetryRuntimeProducer, long initStartTime) throws URISyntaxException { + public TelemetryInMemorySubmitter(SplitHttpClient client, URI telemetryRootEndpoint, TelemetryStorageConsumer telemetryStorageConsumer, + SplitCacheConsumer splitCacheConsumer, SegmentCacheConsumer segmentCacheConsumer, + TelemetryRuntimeProducer telemetryRuntimeProducer, long initStartTime) throws URISyntaxException { _httpHttpTelemetryMemorySender = HttpTelemetryMemorySender.create(client, telemetryRootEndpoint, telemetryRuntimeProducer); - _teleTelemetryStorageConsumer = checkNotNull(telemetryStorageConsumer); + _telemetryStorageConsumer = checkNotNull(telemetryStorageConsumer); _splitCacheConsumer = checkNotNull(splitCacheConsumer); _segmentCacheConsumer = checkNotNull(segmentCacheConsumer); _initStartTime = initStartTime; @@ -57,35 +59,41 @@ public void synchronizeStats() throws Exception { } @Override - public void finalSynchronization(long splitCount, long segmentCount, long segmentKeyCount) throws Exception { + public void synchronizeUniqueKeys(UniqueKeys uniqueKeys){ + _httpHttpTelemetryMemorySender.postUniqueKeys(uniqueKeys); + } + + @Override + public void finalSynchronization() throws Exception { Stats stats = generateStats(); - stats.set_splitCount(splitCount); - stats.set_segmentCount(segmentCount); - stats.set_segmentKeyCount(segmentKeyCount); + stats.setSplitCount(_splitCacheConsumer.getAll().stream().count()); + stats.setSegmentCount(_segmentCacheConsumer.getSegmentCount()); + stats.setSegmentKeyCount(_segmentCacheConsumer.getKeyCount()); _httpHttpTelemetryMemorySender.postStats(stats); } @VisibleForTesting Stats generateStats() throws Exception { Stats stats = new Stats(); - stats.set_lastSynchronization(_teleTelemetryStorageConsumer.getLastSynchronization()); - stats.set_methodLatencies(_teleTelemetryStorageConsumer.popLatencies()); - stats.set_methodExceptions(_teleTelemetryStorageConsumer.popExceptions()); - stats.set_httpErrors(_teleTelemetryStorageConsumer.popHTTPErrors()); - stats.set_httpLatencies(_teleTelemetryStorageConsumer.popHTTPLatencies()); - stats.set_tokenRefreshes(_teleTelemetryStorageConsumer.popTokenRefreshes()); - stats.set_authRejections(_teleTelemetryStorageConsumer.popAuthRejections()); - stats.set_impressionsQueued(_teleTelemetryStorageConsumer.getImpressionsStats(ImpressionsDataTypeEnum.IMPRESSIONS_QUEUED)); - stats.set_impressionsDeduped(_teleTelemetryStorageConsumer.getImpressionsStats(ImpressionsDataTypeEnum.IMPRESSIONS_DEDUPED)); - stats.set_impressionsDropped(_teleTelemetryStorageConsumer.getImpressionsStats(ImpressionsDataTypeEnum.IMPRESSIONS_DROPPED)); - stats.set_splitCount(_splitCacheConsumer.getAll().stream().count()); - stats.set_segmentCount(_segmentCacheConsumer.getSegmentCount()); - stats.set_segmentKeyCount(_segmentCacheConsumer.getKeyCount()); - stats.set_sessionLengthMs(_teleTelemetryStorageConsumer.getSessionLength()); - stats.set_eventsQueued(_teleTelemetryStorageConsumer.getEventStats(EventsDataRecordsEnum.EVENTS_QUEUED)); - stats.set_eventsDropped(_teleTelemetryStorageConsumer.getEventStats(EventsDataRecordsEnum.EVENTS_DROPPED)); - stats.set_streamingEvents(_teleTelemetryStorageConsumer.popStreamingEvents()); - stats.set_tags(_teleTelemetryStorageConsumer.popTags()); + stats.setLastSynchronization(_telemetryStorageConsumer.getLastSynchronization()); + stats.setMethodLatencies(_telemetryStorageConsumer.popLatencies()); + stats.setMethodExceptions(_telemetryStorageConsumer.popExceptions()); + stats.setHttpErrors(_telemetryStorageConsumer.popHTTPErrors()); + stats.setHttpLatencies(_telemetryStorageConsumer.popHTTPLatencies()); + stats.setTokenRefreshes(_telemetryStorageConsumer.popTokenRefreshes()); + stats.setAuthRejections(_telemetryStorageConsumer.popAuthRejections()); + stats.setImpressionsQueued(_telemetryStorageConsumer.getImpressionsStats(ImpressionsDataTypeEnum.IMPRESSIONS_QUEUED)); + stats.setImpressionsDeduped(_telemetryStorageConsumer.getImpressionsStats(ImpressionsDataTypeEnum.IMPRESSIONS_DEDUPED)); + stats.setImpressionsDropped(_telemetryStorageConsumer.getImpressionsStats(ImpressionsDataTypeEnum.IMPRESSIONS_DROPPED)); + stats.setSplitCount(_splitCacheConsumer.getAll().stream().count()); + stats.setSegmentCount(_segmentCacheConsumer.getSegmentCount()); + stats.setSegmentKeyCount(_segmentCacheConsumer.getKeyCount()); + stats.setSessionLengthMs(_telemetryStorageConsumer.getSessionLength()); + stats.setEventsQueued(_telemetryStorageConsumer.getEventStats(EventsDataRecordsEnum.EVENTS_QUEUED)); + stats.setEventsDropped(_telemetryStorageConsumer.getEventStats(EventsDataRecordsEnum.EVENTS_DROPPED)); + stats.setStreamingEvents(_telemetryStorageConsumer.popStreamingEvents()); + stats.setTags(_telemetryStorageConsumer.popTags()); + stats.setUpdatesFromSSE(_telemetryStorageConsumer.popUpdatesFromSSE()); return stats; } @@ -101,35 +109,38 @@ Config generateConfig(SplitClientConfig splitClientConfig, long readyTimestamp, } List impressions = getImpressions(impressionsListeners); - rates.set_telemetry(splitClientConfig.get_telemetryRefreshRate()); - rates.set_events(splitClientConfig.eventFlushIntervalInMillis()); - rates.set_impressions(splitClientConfig.impressionsRefreshRate()); - rates.set_segments(splitClientConfig.segmentsRefreshRate()); - rates.set_splits(splitClientConfig.featuresRefreshRate()); - - urlOverrides.set_auth(!SplitClientConfig.AUTH_ENDPOINT.equals(splitClientConfig.authServiceURL())); - urlOverrides.set_stream(!SplitClientConfig.STREAMING_ENDPOINT.equals(splitClientConfig.streamingServiceURL())); - urlOverrides.set_sdk(!SplitClientConfig.SDK_ENDPOINT.equals(splitClientConfig.endpoint())); - urlOverrides.set_events(!SplitClientConfig.EVENTS_ENDPOINT.equals(splitClientConfig.eventsEndpoint())); - urlOverrides.set_telemetry(!SplitClientConfig.TELEMETRY_ENDPOINT.equals(splitClientConfig.telemetryURL())); - - config.set_burTimeouts(_teleTelemetryStorageConsumer.getBURTimeouts()); - config.set_nonReadyUsages(_teleTelemetryStorageConsumer.getNonReadyUsages()); - config.set_httpProxyDetected(splitClientConfig.proxy() != null); - config.set_impressionsMode(getImpressionsMode(splitClientConfig)); - config.set_integrations(impressions); - config.set_impressionsListenerEnabled((impressionsListeners.size()-impressions.size()) > 0); - config.set_operationMode(OPERATION_MODE); - config.set_storage(STORAGE); - config.set_impressionsQueueSize(splitClientConfig.impressionsQueueSize()); - config.set_redundantFactories(getRedundantFactories(factoryInstances)); - config.set_eventsQueueSize(splitClientConfig.eventsQueueSize()); - config.set_tags(getListMaxSize(tags)); - config.set_activeFactories(factoryInstances.size()); - config.set_timeUntilReady(readyTimestamp - _initStartTime); - config.set_rates(rates); - config.set_urlOverrides(urlOverrides); - config.set_streamingEnabled(splitClientConfig.streamingEnabled()); + rates.setTelemetry(splitClientConfig.getTelemetryRefreshRate()); + rates.setEvents(splitClientConfig.eventSendIntervalInMillis()); + rates.setImpressions(splitClientConfig.impressionsRefreshRate()); + rates.setSegments(splitClientConfig.segmentsRefreshRate()); + rates.setSplits(splitClientConfig.featuresRefreshRate()); + + urlOverrides.setAuth(!SplitClientConfig.AUTH_ENDPOINT.equals(splitClientConfig.authServiceURL())); + urlOverrides.setStream(!SplitClientConfig.STREAMING_ENDPOINT.equals(splitClientConfig.streamingServiceURL())); + urlOverrides.setSdk(!SplitClientConfig.SDK_ENDPOINT.equals(splitClientConfig.endpoint())); + urlOverrides.setEvents(!SplitClientConfig.EVENTS_ENDPOINT.equals(splitClientConfig.eventsEndpoint())); + urlOverrides.setTelemetry(!SplitClientConfig.TELEMETRY_ENDPOINT.equals(splitClientConfig.telemetryURL())); + + config.setBurTimeouts(_telemetryStorageConsumer.getBURTimeouts()); + config.setNonReadyUsages(_telemetryStorageConsumer.getNonReadyUsages()); + config.setHttpProxyDetected(splitClientConfig.proxy() != null); + config.setImpressionsMode(getImpressionsMode(splitClientConfig)); + config.setIntegrations(impressions); + config.setImpressionsListenerEnabled((impressionsListeners.size()-impressions.size()) > 0); + config.setOperationMode(OPERATION_MODE); + config.setStorage(STORAGE); + config.setImpressionsQueueSize(splitClientConfig.impressionsQueueSize()); + config.setRedundantFactories(getRedundantFactories(factoryInstances)); + config.setEventsQueueSize(splitClientConfig.eventsQueueSize()); + config.setTags(getListMaxSize(tags)); + config.setActiveFactories(factoryInstances.size()); + config.setTimeUntilReady(readyTimestamp - _initStartTime); + config.setRates(rates); + config.setUrlOverrides(urlOverrides); + config.setStreamingEnabled(splitClientConfig.streamingEnabled()); + int invalidSets = splitClientConfig.getInvalidSets(); + config.setFlagSetsTotal(splitClientConfig.getSetsFilter().size() + invalidSets); + config.setFlagSetsInvalid(invalidSets); return config; } @@ -159,4 +170,4 @@ private List getImpressions(List { try { _telemetrySynchronizer.synchronizeStats(); @@ -45,13 +33,12 @@ protected void startScheduledTask() { },_telemetryRefreshRate, _telemetryRefreshRate, TimeUnit.SECONDS); } - public void stopScheduledTask(long splitCount, long segmentCount, long segmentKeyCount) { + public void stopScheduledTask() { try { - _telemetrySynchronizer.finalSynchronization(splitCount, segmentCount, segmentKeyCount); + _telemetrySynchronizer.finalSynchronization(); } catch (Exception e) { _log.warn("Error trying to send telemetry stats."); } _telemetrySyncScheduledExecutorService.shutdown(); } - -} +} \ No newline at end of file diff --git a/client/src/main/java/io/split/telemetry/synchronizer/TelemetrySynchronizer.java b/client/src/main/java/io/split/telemetry/synchronizer/TelemetrySynchronizer.java index 7600a6334..54ccff68a 100644 --- a/client/src/main/java/io/split/telemetry/synchronizer/TelemetrySynchronizer.java +++ b/client/src/main/java/io/split/telemetry/synchronizer/TelemetrySynchronizer.java @@ -1,6 +1,7 @@ package io.split.telemetry.synchronizer; import io.split.client.SplitClientConfig; +import io.split.client.dtos.UniqueKeys; import java.util.List; import java.util.Map; @@ -8,5 +9,6 @@ public interface TelemetrySynchronizer { void synchronizeConfig(SplitClientConfig config, long timeUntilReady, Map factoryInstances, List tags); void synchronizeStats() throws Exception; - void finalSynchronization(long splitCount, long segmentCount, long segmentKeyCount) throws Exception; + void synchronizeUniqueKeys(UniqueKeys uniqueKeys); + void finalSynchronization() throws Exception; } diff --git a/client/src/test/java/io/split/TestHelper.java b/client/src/test/java/io/split/TestHelper.java index 39b973c78..577a1d00f 100644 --- a/client/src/test/java/io/split/TestHelper.java +++ b/client/src/test/java/io/split/TestHelper.java @@ -1,15 +1,23 @@ package io.split; +import io.split.client.dtos.Condition; +import io.split.client.dtos.Excluded; +import io.split.client.dtos.RuleBasedSegment; +import io.split.client.dtos.Status; import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; import org.apache.hc.core5.http.ClassicHttpResponse; import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHeaders; +import org.apache.hc.core5.http.message.BasicHeader; import org.mockito.Mockito; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.List; public class TestHelper { public static CloseableHttpClient mockHttpClient(String jsonName, int httpStatus) throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { @@ -19,7 +27,10 @@ public static CloseableHttpClient mockHttpClient(String jsonName, int httpStatus ClassicHttpResponse httpResponseMock = Mockito.mock(ClassicHttpResponse.class); Mockito.when(httpResponseMock.getEntity()).thenReturn(entityMock); Mockito.when(httpResponseMock.getCode()).thenReturn(httpStatus); - Mockito.when(httpResponseMock.getHeaders()).thenReturn(new Header[0]); + Header[] headers = new Header[2]; + headers[0] = new BasicHeader(HttpHeaders.VIA, "HTTP/1.1 m_proxy_rio1"); + headers[1] = new BasicHeader(HttpHeaders.VIA, "HTTP/1.1 s_proxy_rio1"); + Mockito.when(httpResponseMock.getHeaders()).thenReturn(headers); CloseableHttpClient httpClientMock = Mockito.mock(CloseableHttpClient.class); Mockito.when(httpClientMock.execute(Mockito.anyObject())).thenReturn(classicResponseToCloseableMock(httpResponseMock)); @@ -31,4 +42,20 @@ public static CloseableHttpResponse classicResponseToCloseableMock(ClassicHttpRe adaptMethod.setAccessible(true); return (CloseableHttpResponse) adaptMethod.invoke(null, mocked); } + + public static RuleBasedSegment makeRuleBasedSegment(String name, List conditions, long changeNumber) { + Excluded excluded = new Excluded(); + excluded.segments = new ArrayList<>(); + excluded.keys = new ArrayList<>(); + + RuleBasedSegment ruleBasedSegment = new RuleBasedSegment(); + ruleBasedSegment.name = name; + ruleBasedSegment.status = Status.ACTIVE; + ruleBasedSegment.conditions = conditions; + ruleBasedSegment.trafficTypeName = "user"; + ruleBasedSegment.changeNumber = changeNumber; + ruleBasedSegment.excluded = excluded; + return ruleBasedSegment; + } + } diff --git a/client/src/test/java/io/split/client/CacheUpdaterServiceTest.java b/client/src/test/java/io/split/client/CacheUpdaterServiceTest.java index 9c15773f7..5f8084f4e 100644 --- a/client/src/test/java/io/split/client/CacheUpdaterServiceTest.java +++ b/client/src/test/java/io/split/client/CacheUpdaterServiceTest.java @@ -1,11 +1,14 @@ package io.split.client; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; import io.split.storages.memory.InMemoryCacheImp; import io.split.storages.SplitCache; import org.junit.Assert; import org.junit.Test; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; public class CacheUpdaterServiceTest { @@ -17,7 +20,8 @@ public class CacheUpdaterServiceTest { @Test public void testCacheUpdate() { - SplitCache splitCache = new InMemoryCacheImp(); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>()); + SplitCache splitCache = new InMemoryCacheImp(flagSetsFilter); CacheUpdaterService cacheUpdaterService = new CacheUpdaterService(splitCache); cacheUpdaterService.updateCache(getMap()); Assert.assertNotNull(splitCache.get(MY_FEATURE)); diff --git a/client/src/test/java/io/split/client/HttpSegmentChangeFetcherTest.java b/client/src/test/java/io/split/client/HttpSegmentChangeFetcherTest.java index be1fb4b3c..a8113d8ef 100644 --- a/client/src/test/java/io/split/client/HttpSegmentChangeFetcherTest.java +++ b/client/src/test/java/io/split/client/HttpSegmentChangeFetcherTest.java @@ -2,8 +2,11 @@ import io.split.TestHelper; import io.split.client.dtos.SegmentChange; +import io.split.client.utils.SDKMetadata; import io.split.engine.common.FetchOptions; import io.split.engine.metrics.Metrics; +import io.split.service.SplitHttpClient; +import io.split.service.SplitHttpClientImpl; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryStorage; import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; @@ -31,46 +34,68 @@ public class HttpSegmentChangeFetcherTest { public void testDefaultURL() throws URISyntaxException { URI rootTarget = URI.create("https://api.split.io"); CloseableHttpClient httpClient = HttpClients.custom().build(); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(httpClient, rootTarget, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://api.split.io/api/segmentChanges"))); + HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(splitHtpClient, rootTarget, + TELEMETRY_STORAGE); + Assert.assertThat(fetcher.getTarget().toString(), + Matchers.is(Matchers.equalTo("https://api.split.io/api/segmentChanges"))); } @Test public void testCustomURLNoPathNoBackslash() throws URISyntaxException { URI rootTarget = URI.create("https://kubernetesturl.com/split"); CloseableHttpClient httpClient = HttpClients.custom().build(); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(httpClient, rootTarget, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/segmentChanges"))); + HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(splitHtpClient, rootTarget, + TELEMETRY_STORAGE); + Assert.assertThat(fetcher.getTarget().toString(), + Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/segmentChanges"))); } @Test public void testCustomURLAppendingPath() throws URISyntaxException { URI rootTarget = URI.create("https://kubernetesturl.com/split/"); CloseableHttpClient httpClient = HttpClients.custom().build(); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(httpClient, rootTarget, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/segmentChanges"))); + HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(splitHtpClient, rootTarget, + TELEMETRY_STORAGE); + Assert.assertThat(fetcher.getTarget().toString(), + Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/segmentChanges"))); } @Test public void testCustomURLAppendingPathNoBackslash() throws URISyntaxException { URI rootTarget = URI.create("https://kubernetesturl.com/split"); CloseableHttpClient httpClient = HttpClients.custom().build(); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(httpClient, rootTarget, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/segmentChanges"))); + HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(splitHtpClient, rootTarget, + TELEMETRY_STORAGE); + Assert.assertThat(fetcher.getTarget().toString(), + Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/segmentChanges"))); } @Test - public void testFetcherWithSpecialCharacters() throws URISyntaxException, IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { + public void testFetcherWithSpecialCharacters() throws URISyntaxException, IOException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException { URI rootTarget = URI.create("https://api.split.io/api/segmentChanges"); - CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("segment-change-special-chatacters.json", HttpStatus.SC_OK); + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("segment-change-special-chatacters.json", + HttpStatus.SC_OK); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", + metadata()); Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(httpClientMock, rootTarget, TELEMETRY_STORAGE); + HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(splitHtpClient, rootTarget, + TELEMETRY_STORAGE); SegmentChange change = fetcher.fetch("some_segment", 1234567, new FetchOptions.Builder().build()); @@ -82,7 +107,8 @@ public void testFetcherWithSpecialCharacters() throws URISyntaxException, IOExce } @Test - public void testFetcherWithCDNBypassOption() throws IOException, URISyntaxException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { + public void testFetcherWithCDNBypassOption() throws IOException, URISyntaxException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException { URI rootTarget = URI.create("https://api.split.io"); HttpEntity entityMock = Mockito.mock(HttpEntity.class); @@ -94,17 +120,52 @@ public void testFetcherWithCDNBypassOption() throws IOException, URISyntaxExcept ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ClassicHttpRequest.class); CloseableHttpClient httpClientMock = Mockito.mock(CloseableHttpClient.class); - when(httpClientMock.execute(requestCaptor.capture())).thenReturn(TestHelper.classicResponseToCloseableMock(response)); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + + when(httpClientMock.execute(requestCaptor.capture())) + .thenReturn(TestHelper.classicResponseToCloseableMock(response)); Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(httpClientMock, rootTarget, Mockito.mock(TelemetryStorage.class)); + HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(splitHtpClient, rootTarget, + Mockito.mock(TelemetryStorage.class)); fetcher.fetch("someSegment", -1, new FetchOptions.Builder().targetChangeNumber(123).build()); - fetcher.fetch("someSegment2",-1, new FetchOptions.Builder().build()); + fetcher.fetch("someSegment2", -1, new FetchOptions.Builder().build()); List captured = requestCaptor.getAllValues(); Assert.assertEquals(captured.size(), 2); Assert.assertTrue(captured.get(0).getUri().toString().contains("till=123")); Assert.assertFalse(captured.get(1).getUri().toString().contains("till=")); } + @Test(expected = IllegalStateException.class) + public void testFetcherWithError() throws IOException, URISyntaxException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException { + URI rootTarget = URI.create("https://api.split.io"); + + HttpEntity entityMock = Mockito.mock(HttpEntity.class); + when(entityMock.getContent()).thenReturn(new StringBufferInputStream("{\"till\": 1}")); + ClassicHttpResponse response = Mockito.mock(ClassicHttpResponse.class); + when(response.getCode()).thenReturn(400); + when(response.getEntity()).thenReturn(entityMock); + when(response.getHeaders()).thenReturn(new Header[0]); + + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ClassicHttpRequest.class); + CloseableHttpClient httpClientMock = Mockito.mock(CloseableHttpClient.class); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + + when(httpClientMock.execute(requestCaptor.capture())) + .thenReturn(TestHelper.classicResponseToCloseableMock(response)); + + Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); + HttpSegmentChangeFetcher fetcher = HttpSegmentChangeFetcher.create(splitHtpClient, rootTarget, + Mockito.mock(TelemetryStorage.class)); + + fetcher.fetch("someSegment", -1, new FetchOptions.Builder().build()); + } + + private SDKMetadata metadata() { + return new SDKMetadata("java-1.2.3", "1.2.3.4", "someIP"); + } } diff --git a/client/src/test/java/io/split/client/HttpSplitChangeFetcherTest.java b/client/src/test/java/io/split/client/HttpSplitChangeFetcherTest.java index 55090e7b3..e1198cd0f 100644 --- a/client/src/test/java/io/split/client/HttpSplitChangeFetcherTest.java +++ b/client/src/test/java/io/split/client/HttpSplitChangeFetcherTest.java @@ -1,105 +1,127 @@ package io.split.client; +import io.split.Spec; import io.split.TestHelper; import io.split.client.dtos.Split; import io.split.client.dtos.SplitChange; +import io.split.client.utils.Json; +import io.split.client.utils.SDKMetadata; import io.split.engine.common.FetchOptions; import io.split.engine.metrics.Metrics; +import io.split.engine.sse.client.SSEClient; +import io.split.service.SplitHttpClient; +import io.split.service.SplitHttpClientImpl; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryRuntimeProducer; import io.split.telemetry.storage.TelemetryStorage; -import org.apache.hc.client5.http.classic.methods.HttpUriRequestBase; import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; -import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; import org.apache.hc.client5.http.impl.classic.HttpClients; -import org.apache.hc.core5.http.*; -import org.apache.hc.core5.http.io.entity.StringEntity; -import org.apache.hc.core5.http.message.BasicClassicHttpResponse; -import org.hamcrest.Matchers; +import org.apache.hc.core5.http.ClassicHttpRequest; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpStatus; +import org.awaitility.Awaitility; import org.junit.Assert; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; -import java.io.Closeable; +import java.io.ByteArrayInputStream; import java.io.IOException; -import java.io.StringBufferInputStream; +import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.net.URI; import java.net.URISyntaxException; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static org.mockito.Mockito.when; public class HttpSplitChangeFetcherTest { private static final TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); + @Test public void testDefaultURL() throws URISyntaxException { URI rootTarget = URI.create("https://api.split.io"); CloseableHttpClient httpClient = HttpClients.custom().build(); Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(httpClient, rootTarget, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://api.split.io/api/splitChanges"))); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + + HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(splitHtpClient, rootTarget, TELEMETRY_STORAGE, false); + Assert.assertEquals("https://api.split.io/api/splitChanges", fetcher.getTarget().toString()); } @Test public void testCustomURLNoPathNoBackslash() throws URISyntaxException { URI rootTarget = URI.create("https://kubernetesturl.com/split"); CloseableHttpClient httpClient = HttpClients.custom().build(); - Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(httpClient, rootTarget, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/splitChanges"))); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + + HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(splitHtpClient, rootTarget, TELEMETRY_STORAGE, false); + Assert.assertEquals("https://kubernetesturl.com/split/api/splitChanges", fetcher.getTarget().toString()); } @Test public void testCustomURLAppendingPath() throws URISyntaxException { URI rootTarget = URI.create("https://kubernetesturl.com/split/"); CloseableHttpClient httpClient = HttpClients.custom().build(); - Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(httpClient, rootTarget, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/splitChanges"))); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(splitHtpClient, rootTarget, TELEMETRY_STORAGE, false); + Assert.assertEquals("https://kubernetesturl.com/split/api/splitChanges", fetcher.getTarget().toString()); } @Test public void testCustomURLAppendingPathNoBackslash() throws URISyntaxException { URI rootTarget = URI.create("https://kubernetesturl.com/split"); CloseableHttpClient httpClient = HttpClients.custom().build(); - Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(httpClient, rootTarget, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/splitChanges"))); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(splitHtpClient, rootTarget, TELEMETRY_STORAGE, false); + Assert.assertEquals("https://kubernetesturl.com/split/api/splitChanges", fetcher.getTarget().toString()); } @Test - public void testFetcherWithSpecialCharacters() throws URISyntaxException, InvocationTargetException, NoSuchMethodException, IllegalAccessException, IOException { + public void testFetcherWithSpecialCharacters() throws URISyntaxException, InvocationTargetException, + NoSuchMethodException, IllegalAccessException, IOException { URI rootTarget = URI.create("https://api.split.io"); - CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("split-change-special-characters.json", HttpStatus.SC_OK); + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("split-change-special-characters.json", + HttpStatus.SC_OK); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", + metadata()); - Metrics.NoopMetrics metrics = new Metrics.NoopMetrics(); - HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(httpClientMock, rootTarget, TELEMETRY_STORAGE); + HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(splitHtpClient, rootTarget, TELEMETRY_STORAGE, false); - SplitChange change = fetcher.fetch(1234567, new FetchOptions.Builder().cacheControlHeaders(true).build()); + SplitChange change = fetcher.fetch(1234567, -1, new FetchOptions.Builder().cacheControlHeaders(true).build()); Assert.assertNotNull(change); - Assert.assertEquals(1, change.splits.size()); - Assert.assertNotNull(change.splits.get(0)); + Assert.assertEquals(1, change.featureFlags.d.size()); + Assert.assertNotNull(change.featureFlags.d.get(0)); - Split split = change.splits.get(0); + Split split = change.featureFlags.d.get(0); Map configs = split.configurations; Assert.assertEquals(2, configs.size()); Assert.assertEquals("{\"test\": \"blue\",\"grüne Straße\": 13}", configs.get("on")); Assert.assertEquals("{\"test\": \"blue\",\"size\": 15}", configs.get("off")); + Assert.assertEquals(2, split.sets.size()); } @Test - public void testFetcherWithCDNBypassOption() throws IOException, URISyntaxException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { + public void testFetcherWithCDNBypassOption() throws IOException, URISyntaxException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException { URI rootTarget = URI.create("https://api.split.io"); HttpEntity entityMock = Mockito.mock(HttpEntity.class); - when(entityMock.getContent()).thenReturn(new StringBufferInputStream("{\"till\": 1}")); + when(entityMock.getContent()) + .thenReturn(new ByteArrayInputStream("{\"ff\":{\"t\": 1,\"s\": -1,\"d\": []},\"rbs\":{\"t\": -1,\"s\": -1,\"d\": []}}". + getBytes(StandardCharsets.UTF_8))); ClassicHttpResponse response = Mockito.mock(ClassicHttpResponse.class); when(response.getCode()).thenReturn(200); when(response.getEntity()).thenReturn(entityMock); @@ -107,26 +129,32 @@ public void testFetcherWithCDNBypassOption() throws IOException, URISyntaxExcept ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ClassicHttpRequest.class); CloseableHttpClient httpClientMock = Mockito.mock(CloseableHttpClient.class); - when(httpClientMock.execute(requestCaptor.capture())).thenReturn(TestHelper.classicResponseToCloseableMock(response)); + when(httpClientMock.execute(requestCaptor.capture())) + .thenReturn(TestHelper.classicResponseToCloseableMock(response)); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); - HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(httpClientMock, rootTarget, Mockito.mock(TelemetryRuntimeProducer.class)); + HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(splitHtpClient, rootTarget, + Mockito.mock(TelemetryRuntimeProducer.class), false); - fetcher.fetch(-1, new FetchOptions.Builder().targetChangeNumber(123).build()); - fetcher.fetch(-1, new FetchOptions.Builder().build()); + fetcher.fetch(-1, -1, new FetchOptions.Builder().targetChangeNumber(123).build()); + // TODO: Fix the test with integration tests update List captured = requestCaptor.getAllValues(); - Assert.assertEquals(captured.size(), 2); + Assert.assertEquals(1, captured.size()); Assert.assertTrue(captured.get(0).getUri().toString().contains("till=123")); - Assert.assertFalse(captured.get(1).getUri().toString().contains("till=")); } @Test public void testRandomNumberGeneration() throws URISyntaxException { URI rootTarget = URI.create("https://api.split.io"); CloseableHttpClient httpClientMock = Mockito.mock(CloseableHttpClient.class); - HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(httpClientMock, rootTarget, Mockito.mock(TelemetryRuntimeProducer.class)); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(splitHtpClient, rootTarget, + Mockito.mock(TelemetryRuntimeProducer.class), false); Set seen = new HashSet<>(); - long min = (long)Math.pow(2, 63) * (-1); + long min = (long) Math.pow(2, 63) * (-1); final long total = 10000000; for (long x = 0; x < total; x++) { long r = fetcher.makeRandomTill(); @@ -136,4 +164,137 @@ public void testRandomNumberGeneration() throws URISyntaxException { Assert.assertTrue(seen.size() >= (total * 0.9999)); } + + @Test(expected = IllegalStateException.class) + public void testURLTooLong() throws IOException, URISyntaxException, IllegalAccessException, NoSuchMethodException, + InvocationTargetException { + URI rootTarget = URI.create("https://api.split.io"); + + HttpEntity entityMock = Mockito.mock(HttpEntity.class); + when(entityMock.getContent()) + .thenReturn(new ByteArrayInputStream("{\"till\": 1}".getBytes(StandardCharsets.UTF_8))); + ClassicHttpResponse response = Mockito.mock(ClassicHttpResponse.class); + when(response.getCode()).thenReturn(414); + when(response.getEntity()).thenReturn(entityMock); + when(response.getHeaders()).thenReturn(new Header[0]); + CloseableHttpClient httpClientMock = Mockito.mock(CloseableHttpClient.class); + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ClassicHttpRequest.class); + when(httpClientMock.execute(requestCaptor.capture())) + .thenReturn(TestHelper.classicResponseToCloseableMock(response)); + + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(splitHtpClient, rootTarget, + Mockito.mock(TelemetryRuntimeProducer.class), false); + List sets = new ArrayList(); + for (Integer i = 0; i < 100; i++) { + sets.add("set" + i.toString()); + } + String result = sets.stream() + .map(n -> String.valueOf(n)) + .collect(Collectors.joining(",", "", "")); + fetcher.fetch(-1, -1, new FetchOptions.Builder().flagSetsFilter(result).cacheControlHeaders(false).build()); + } + + @Test + public void testSwitchingToOldSpec() throws URISyntaxException, InvocationTargetException, + NoSuchMethodException, IllegalAccessException, IOException, NoSuchFieldException, InterruptedException { + URI rootTarget = URI.create("https://api.split.io"); + CloseableHttpClient httpClientMock = Mockito.mock(CloseableHttpClient.class); + HttpEntity entityMock = Mockito.mock(HttpEntity.class); + when(entityMock.getContent()) + .thenReturn(new ByteArrayInputStream("{\"till\": -1, \"since\": -1, \"splits\": []}".getBytes(StandardCharsets.UTF_8))); + HttpEntity entityMock2 = Mockito.mock(HttpEntity.class); + when(entityMock2.getContent()) + .thenReturn(new ByteArrayInputStream("{\"till\": 123, \"since\": 122, \"splits\": [{\"name\":\"some\"}, {\"name\":\"some2\"}]}".getBytes(StandardCharsets.UTF_8))); + HttpEntity entityMock3 = Mockito.mock(HttpEntity.class); + when(entityMock3.getContent()) + .thenReturn(new ByteArrayInputStream("{\"till\": 123, \"since\": 122, \"splits\": [{\"name\":\"some\"}, {\"name\":\"some2\"}]}".getBytes(StandardCharsets.UTF_8))); + HttpEntity entityMock4 = Mockito.mock(HttpEntity.class); + when(entityMock4.getContent()) + .thenReturn(new ByteArrayInputStream("{\"ff\":{\"t\": 123, \"s\": 122, \"d\": [{\"name\":\"some\"}, {\"name\":\"some2\"}]}, \"rbs\":{\"t\": -1, \"s\": -1, \"d\": []}}".getBytes(StandardCharsets.UTF_8))); + ClassicHttpResponse response1 = Mockito.mock(ClassicHttpResponse.class); + when(response1.getCode()).thenReturn(HttpStatus.SC_BAD_REQUEST); + when(response1.getEntity()).thenReturn(entityMock); + when(response1.getHeaders()).thenReturn(new Header[0]); + + ClassicHttpResponse response2 = Mockito.mock(ClassicHttpResponse.class); + when(response2.getCode()).thenReturn(HttpStatus.SC_OK); + when(response2.getEntity()).thenReturn(entityMock2); + when(response2.getHeaders()).thenReturn(new Header[0]); + + ClassicHttpResponse response3 = Mockito.mock(ClassicHttpResponse.class); + when(response3.getCode()).thenReturn(HttpStatus.SC_OK); + when(response3.getEntity()).thenReturn(entityMock3); + when(response3.getHeaders()).thenReturn(new Header[0]); + + ClassicHttpResponse response4 = Mockito.mock(ClassicHttpResponse.class); + when(response4.getCode()).thenReturn(HttpStatus.SC_OK); + when(response4.getEntity()).thenReturn(entityMock4); + when(response4.getHeaders()).thenReturn(new Header[0]); + + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ClassicHttpRequest.class); + + when(httpClientMock.execute(requestCaptor.capture())) + .thenReturn(TestHelper.classicResponseToCloseableMock(response1)) + .thenReturn(TestHelper.classicResponseToCloseableMock(response2)) + .thenReturn(TestHelper.classicResponseToCloseableMock(response1)) + .thenReturn(TestHelper.classicResponseToCloseableMock(response3)) + .thenReturn(TestHelper.classicResponseToCloseableMock(response4)); + + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + HttpSplitChangeFetcher fetcher = HttpSplitChangeFetcher.create(splitHtpClient, rootTarget, + Mockito.mock(TelemetryRuntimeProducer.class), true); + + SplitChange change = fetcher.fetch(-1, -1, new FetchOptions.Builder().cacheControlHeaders(true).build()); + + List captured = requestCaptor.getAllValues(); + Assert.assertEquals(2, captured.size()); + Assert.assertTrue(captured.get(0).getUri().toString().contains("s=1.3")); + Assert.assertTrue(captured.get(1).getUri().toString().contains("s=1.1")); + Assert.assertEquals(122, change.featureFlags.s); + Assert.assertEquals(123, change.featureFlags.t); + Assert.assertEquals(2, change.featureFlags.d.size()); + Assert.assertEquals(Json.fromJson("{\"name\":\"some\"}", Split.class).name, change.featureFlags.d.get(0).name); + Assert.assertEquals(Json.fromJson("{\"name\":\"some2\"}", Split.class).name, change.featureFlags.d.get(1).name); + Assert.assertEquals(0, change.ruleBasedSegments.d.size()); + Assert.assertEquals(-1, change.ruleBasedSegments.s); + Assert.assertEquals(-1, change.ruleBasedSegments.t); + + // Set proxy interval to low number to force check for spec 1.3 + Field proxyInterval = fetcher.getClass().getDeclaredField("PROXY_CHECK_INTERVAL_MILLISECONDS_SS"); + proxyInterval.setAccessible(true); + proxyInterval.set(fetcher, 5); + Awaitility.await() + .atMost(1L, TimeUnit.SECONDS) + .untilAsserted(() -> Assert.assertTrue(proxyInterval.get(fetcher).equals(5))); + + change = fetcher.fetch(-1, -1, new FetchOptions.Builder().cacheControlHeaders(true).build()); + + Assert.assertTrue(captured.get(2).getUri().toString().contains("s=1.3")); + Assert.assertTrue(captured.get(3).getUri().toString().contains("s=1.1")); + Assert.assertEquals(122, change.featureFlags.s); + Assert.assertEquals(123, change.featureFlags.t); + Assert.assertEquals(2, change.featureFlags.d.size()); + Assert.assertEquals(Json.fromJson("{\"name\":\"some\"}", Split.class).name, change.featureFlags.d.get(0).name); + Assert.assertEquals(Json.fromJson("{\"name\":\"some2\"}", Split.class).name, change.featureFlags.d.get(1).name); + + // test if proxy is upgraded and spec 1.3 now works. + Awaitility.await() + .atMost(5L, TimeUnit.SECONDS) + .untilAsserted(() -> Assert.assertTrue(captured.size() >= 4)); + change = fetcher.fetch(-1, -1, new FetchOptions.Builder().cacheControlHeaders(true).build()); + Assert.assertTrue(captured.get(4).getUri().toString().contains("s=1.3")); + Assert.assertEquals(122, change.featureFlags.s); + Assert.assertEquals(123, change.featureFlags.t); + Assert.assertEquals(2, change.featureFlags.d.size()); + Assert.assertEquals(Json.fromJson("{\"name\":\"some\"}", Split.class).name, change.featureFlags.d.get(0).name); + Assert.assertEquals(Json.fromJson("{\"name\":\"some2\"}", Split.class).name, change.featureFlags.d.get(1).name); + } + + private SDKMetadata metadata() { + return new SDKMetadata("java-1.2.3", "1.2.3.4", "someIP"); + } + } diff --git a/client/src/test/java/io/split/client/JsonLocalhostSplitChangeFetcherTest.java b/client/src/test/java/io/split/client/JsonLocalhostSplitChangeFetcherTest.java new file mode 100644 index 000000000..583dddab8 --- /dev/null +++ b/client/src/test/java/io/split/client/JsonLocalhostSplitChangeFetcherTest.java @@ -0,0 +1,223 @@ +package io.split.client; + +import io.split.client.dtos.*; +import io.split.client.utils.FileInputStreamProvider; +import io.split.client.utils.InputStreamProvider; +import io.split.client.utils.StaticContentInputStreamProvider; +import io.split.engine.common.FetchOptions; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +public class JsonLocalhostSplitChangeFetcherTest { + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + private String TEST_0 = "{\"ff\":{\"d\":[{\"trafficTypeName\":\"user\",\"name\":\"SPLIT_1\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-1780071202,\"seed\":-1442762199,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1675443537882,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100}],\"label\":\"default rule\"}]}],\"s\":-1,\"t\":-1},\"rbs\":{\"d\":[{\"changeNumber\":5,\"name\":\"sample_rule_based_segment\",\"status\":\"ACTIVE\",\"trafficTypeName\":\"user\",\"excluded\":{\"keys\":[\"mauro@split.io\"],\"segments\":[]},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"email\"},\"matcherType\":\"ENDS_WITH\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"@split.io\"]}}]}}]}],\"s\":-1,\"t\":-1}}"; + private String TEST_1 = "{\"ff\":{\"d\":[{\"trafficTypeName\":\"user\",\"name\":\"SPLIT_1\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-1780071202,\"seed\":-1442762199,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1675443537882,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100}],\"label\":\"default rule\"}]},{\"trafficTypeName\":\"user\",\"name\":\"SPLIT_2\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-1780071202,\"seed\":-1442762199,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1675443537882,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100}],\"label\":\"default rule\"}]}],\"s\":-1,\"t\":-1},\"rbs\":{\"d\":[{\"changeNumber\":5,\"name\":\"sample_rule_based_segment\",\"status\":\"ACTIVE\",\"trafficTypeName\":\"user\",\"excluded\":{\"keys\":[\"mauro@split.io\"],\"segments\":[]},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"email\"},\"matcherType\":\"ENDS_WITH\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"@split.io\"]}}]}}]}],\"s\":-1,\"t\":-1}}"; + private String TEST_2 = "{\"ff\":{\"d\":[{\"trafficTypeName\":\"user\",\"name\":\"SPLIT_1\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-1780071202,\"seed\":-1442762199,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1675443537882,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100}],\"label\":\"default rule\"}]},{\"trafficTypeName\":\"user\",\"name\":\"SPLIT_2\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-1780071202,\"seed\":-1442762199,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1675443537882,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100}],\"label\":\"default rule\"}]}],\"s\":-1,\"t\":2323},\"rbs\":{\"d\":[{\"changeNumber\":5,\"name\":\"sample_rule_based_segment\",\"status\":\"ACTIVE\",\"trafficTypeName\":\"user\",\"excluded\":{\"keys\":[\"mauro@split.io\"],\"segments\":[]},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"email\"},\"matcherType\":\"ENDS_WITH\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"@split.io\"]}}]}}]}],\"s\":-1,\"t\":-1}}"; + private String TEST_3 = "{\"ff\":{\"d\":[{\"trafficTypeName\":\"user\",\"name\":\"SPLIT_1\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-1780071202,\"seed\":-1442762199,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1675443537882,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100}],\"label\":\"default rule\"}]}],\"s\":-1,\"t\":2323},\"rbs\":{\"d\":[{\"changeNumber\":5,\"name\":\"sample_rule_based_segment\",\"status\":\"ACTIVE\",\"trafficTypeName\":\"user\",\"excluded\":{\"keys\":[\"mauro@split.io\",\"gaston@split.io\"],\"segments\":[]},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"email\"},\"matcherType\":\"ENDS_WITH\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"@split.io\"]}}]}}]}],\"s\":-1,\"t\":1122}}"; + private String TEST_4 = "{\"ff\":{\"d\":[{\"trafficTypeName\":\"user\",\"name\":\"SPLIT_1\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-1780071202,\"seed\":-1442762199,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1675443537882,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100}],\"label\":\"default rule\"}]}],\"s\":-1,\"t\":445345},\"rbs\":{\"d\":[{\"changeNumber\":5,\"name\":\"sample_rule_based_segment\",\"status\":\"ACTIVE\",\"trafficTypeName\":\"user\",\"excluded\":{\"keys\":[\"mauro@split.io\",\"gaston@split.io\"],\"segments\":[]},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"email\"},\"matcherType\":\"ENDS_WITH\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"@split.io\"]}}]}}]}],\"s\":-1,\"t\":5566}}"; + private String TEST_5 = "{\"ff\":{\"d\":[{\"trafficTypeName\":\"user\",\"name\":\"SPLIT_1\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-1780071202,\"seed\":-1442762199,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1675443537882,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100}],\"label\":\"default rule\"}]},{\"trafficTypeName\":\"user\",\"name\":\"SPLIT_2\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-1780071202,\"seed\":-1442762199,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1675443537882,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100}],\"label\":\"default rule\"}]}],\"s\":-1,\"t\":-1},\"rbs\":{\"d\":[{\"changeNumber\":5,\"name\":\"sample_rule_based_segment\",\"status\":\"ACTIVE\",\"trafficTypeName\":\"user\",\"excluded\":{\"keys\":[\"mauro@split.io\"],\"segments\":[]},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"email\"},\"matcherType\":\"ENDS_WITH\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"@split.io\"]}}]}}]}],\"s\":-1,\"t\":-1}}"; + + @Test + public void testParseSplitChange() throws FileNotFoundException { + InputStream inputStream = new FileInputStream("src/test/resources/split_init.json"); + InputStreamProvider inputStreamProvider = new StaticContentInputStreamProvider(inputStream); + JsonLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + + List split = splitChange.featureFlags.d; + Assert.assertEquals(7, split.size()); + Assert.assertEquals(1660326991072L, splitChange.featureFlags.t); + Assert.assertEquals(-1L, splitChange.featureFlags.s); + } + + @Test + public void testSinceAndTillSanitization() throws FileNotFoundException { + InputStream inputStream = new FileInputStream("src/test/resources/sanitizer/splitChangeTillSanitization.json"); + InputStreamProvider inputStreamProvider = new StaticContentInputStreamProvider(inputStream); + JsonLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + + Assert.assertEquals(-1L, splitChange.featureFlags.t); + Assert.assertEquals(-1L, splitChange.featureFlags.s); + + Assert.assertEquals(-1L, splitChange.ruleBasedSegments.t); + Assert.assertEquals(-1L, splitChange.ruleBasedSegments.s); + + } + + @Test + public void testSplitChangeWithoutSplits() throws FileNotFoundException { + InputStream inputStream = new FileInputStream("src/test/resources/sanitizer/splitChangeWithoutSplits.json"); + InputStreamProvider inputStreamProvider = new StaticContentInputStreamProvider(inputStream); + JsonLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + + Assert.assertEquals(0, splitChange.featureFlags.d.size()); + Assert.assertEquals(0, splitChange.ruleBasedSegments.d.size()); + } + + @Test + public void testSplitChangeSplitsToSanitize() throws FileNotFoundException { + InputStream inputStream = new FileInputStream("src/test/resources/sanitizer/splitChangeSplitsToSanitize.json"); + InputStreamProvider inputStreamProvider = new StaticContentInputStreamProvider(inputStream); + JsonLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + + Assert.assertEquals(1, splitChange.featureFlags.d.size()); + Split split = splitChange.featureFlags.d.get(0); + Assert.assertEquals(Optional.of(100), Optional.of(split.trafficAllocation)); + Assert.assertEquals(Status.ACTIVE, split.status); + Assert.assertEquals("control", split.defaultTreatment); + Assert.assertEquals(ConditionType.ROLLOUT, split.conditions.get(split.conditions.size() - 1).conditionType); + + Assert.assertEquals(1, splitChange.ruleBasedSegments.d.size()); + RuleBasedSegment ruleBasedSegment = splitChange.ruleBasedSegments.d.get(0); + Assert.assertEquals(Status.ACTIVE, split.status); + Assert.assertEquals(ConditionType.ROLLOUT, ruleBasedSegment.conditions.get(ruleBasedSegment.conditions.size() - 1).conditionType); + Assert.assertEquals(new ArrayList<>(), ruleBasedSegment.excluded.segments); + } + + @Test + public void testSplitChangeSplitsToSanitizeMatchersNull() throws FileNotFoundException { + InputStream inputStream = new FileInputStream("src/test/resources/sanitizer/splitChangerMatchersNull.json"); + InputStreamProvider inputStreamProvider = new StaticContentInputStreamProvider(inputStream); + JsonLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + + Assert.assertEquals(1, splitChange.featureFlags.d.size()); + Split split = splitChange.featureFlags.d.get(0); + Assert.assertEquals(Optional.of(100), Optional.of(split.trafficAllocation)); + Assert.assertEquals(Status.ACTIVE, split.status); + Assert.assertEquals("off", split.defaultTreatment); + Assert.assertEquals(ConditionType.ROLLOUT, split.conditions.get(split.conditions.size() - 1).conditionType); + } + + @Test + public void testSplitChangeSplitsDifferentScenarios() throws IOException { + + File file = folder.newFile("test_0.json"); + + byte[] test = TEST_0.getBytes(); + com.google.common.io.Files.write(test, file); + + InputStreamProvider inputStreamProvider = new FileInputStreamProvider(file.getAbsolutePath()); + JsonLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + // 0) The CN from storage is -1, till and since are -1, and sha doesn't exist in the hash. It's going to return a split change with updates. + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + Assert.assertEquals(1, splitChange.featureFlags.d.size()); + Assert.assertEquals(-1, splitChange.featureFlags.t); + Assert.assertEquals(-1, splitChange.featureFlags.s); + Assert.assertEquals(1, splitChange.ruleBasedSegments.d.size()); + Assert.assertEquals(-1, splitChange.ruleBasedSegments.t); + Assert.assertEquals(-1, splitChange.ruleBasedSegments.s); + + test = TEST_1.getBytes(); + com.google.common.io.Files.write(test, file); + + // 1) The CN from storage is -1, till and since are -1, and sha is different than before. It's going to return a split change with updates. + splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + Assert.assertEquals(2, splitChange.featureFlags.d.size()); + Assert.assertEquals(-1, splitChange.featureFlags.t); + Assert.assertEquals(-1, splitChange.featureFlags.s); + + test = TEST_2.getBytes(); + com.google.common.io.Files.write(test, file); + + // 2) The CN from storage is -1, till is 2323, and since is -1, and sha is the same as before. It's going to return a split change with the same data. + splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + Assert.assertEquals(2, splitChange.featureFlags.d.size()); + Assert.assertEquals(-1, splitChange.featureFlags.t); + Assert.assertEquals(-1, splitChange.featureFlags.s); + + test = TEST_3.getBytes(); + com.google.common.io.Files.write(test, file); + + // 3) The CN from storage is -1, till is 2323, and since is -1, sha is different than before. It's going to return a split change with updates. + splitChange = localhostSplitChangeFetcher.fetch(-1L, -1L, fetchOptions); + Assert.assertEquals(1, splitChange.featureFlags.d.size()); + Assert.assertEquals(2323, splitChange.featureFlags.t); + Assert.assertEquals(-1, splitChange.featureFlags.s); + Assert.assertEquals(1, splitChange.ruleBasedSegments.d.size()); + Assert.assertEquals(1122, splitChange.ruleBasedSegments.t); + Assert.assertEquals(-1, splitChange.ruleBasedSegments.s); + + test = TEST_4.getBytes(); + com.google.common.io.Files.write(test, file); + + // 4) The CN from storage is 2323, till is 445345, and since is -1, and sha is the same as before. It's going to return a split change with same data. + splitChange = localhostSplitChangeFetcher.fetch(2323, 1122, fetchOptions); + Assert.assertEquals(1, splitChange.featureFlags.d.size()); + Assert.assertEquals(2323, splitChange.featureFlags.t); + Assert.assertEquals(2323, splitChange.featureFlags.s); + Assert.assertEquals(1, splitChange.ruleBasedSegments.d.size()); + Assert.assertEquals(1122, splitChange.ruleBasedSegments.t); + Assert.assertEquals(1122, splitChange.ruleBasedSegments.s); + + test = TEST_5.getBytes(); + com.google.common.io.Files.write(test, file); + + // 5) The CN from storage is 2323, till and since are -1, and sha is different than before. It's going to return a split change with updates. + splitChange = localhostSplitChangeFetcher.fetch(2323, 1122, fetchOptions); + Assert.assertEquals(2, splitChange.featureFlags.d.size()); + Assert.assertEquals(2323, splitChange.featureFlags.t); + Assert.assertEquals(2323, splitChange.featureFlags.s); + Assert.assertEquals(1, splitChange.ruleBasedSegments.d.size()); + Assert.assertEquals(1122, splitChange.ruleBasedSegments.t); + Assert.assertEquals(1122, splitChange.ruleBasedSegments.s); + } + + @Test(expected = IllegalStateException.class) + public void processTestForException() { + InputStreamProvider inputStreamProvider = new FileInputStreamProvider("src/test/resources/notExist.json"); + JsonLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + } + + @Test + public void testParseOldSpec() throws FileNotFoundException { + InputStream inputStream = new FileInputStream("src/test/resources/split_old_spec.json"); + InputStreamProvider inputStreamProvider = new StaticContentInputStreamProvider(inputStream); + JsonLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + + List split = splitChange.featureFlags.d; + Assert.assertEquals(7, split.size()); + Assert.assertEquals(1660326991072L, splitChange.featureFlags.t); + Assert.assertEquals(-1L, splitChange.featureFlags.s); + + Assert.assertEquals(new ArrayList<>(), splitChange.ruleBasedSegments.d); + Assert.assertEquals(-1L, splitChange.ruleBasedSegments.t); + Assert.assertEquals(-1L, splitChange.ruleBasedSegments.s); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/JsonLocalhostSplitFactoryTest.java b/client/src/test/java/io/split/client/JsonLocalhostSplitFactoryTest.java new file mode 100644 index 000000000..2df64c08a --- /dev/null +++ b/client/src/test/java/io/split/client/JsonLocalhostSplitFactoryTest.java @@ -0,0 +1,61 @@ +package io.split.client; + +import io.split.client.dtos.FallbackTreatment; +import io.split.client.dtos.FallbackTreatmentsConfiguration; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.concurrent.TimeoutException; + +public class JsonLocalhostSplitFactoryTest { + + @Test + public void works() throws IOException, URISyntaxException, InterruptedException, TimeoutException { + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on-global"), + new HashMap() {{ put("feature", new FallbackTreatment("off-local", "{\"prop2\", \"val2\"}")); }}); + + SplitClientConfig config = SplitClientConfig.builder() + .splitFile("src/test/resources/splits_localhost.json") + .segmentDirectory("src/test/resources") + .setBlockUntilReadyTimeout(10000) + .fallbackTreatments(fallbackTreatmentsConfiguration) + .build(); + SplitFactory splitFactory = SplitFactoryBuilder.build("localhost", config); + SplitClient client = splitFactory.client(); + client.blockUntilReady(); + + Assert.assertEquals("on", client.getTreatment("bilal@@split.io", "rbs_flag", new HashMap() {{ + put("email", "bilal@@split.io"); + }})); + Assert.assertEquals("off", client.getTreatment("mauro@split.io", "rbs_flag", new HashMap() {{ + put("email", "mauro@split.io"); + }})); + Assert.assertEquals("off", client.getTreatment("bilal", "test_split")); + Assert.assertEquals("on", client.getTreatment("bilal", "push_test")); + Assert.assertEquals("on_whitelist", client.getTreatment("admin", "push_test")); + Assert.assertEquals("off-local", client.getTreatment("bilal", "feature")); + Assert.assertEquals("on-global", client.getTreatment("bilal", "feature2")); + + client.destroy(); + } + + @Test + public void testOldSpec() throws IOException, URISyntaxException, InterruptedException, TimeoutException { + SplitClientConfig config = SplitClientConfig.builder() + .splitFile("src/test/resources/split_old_spec.json") + .segmentDirectory("src/test/resources") + .setBlockUntilReadyTimeout(10000) + .build(); + SplitFactory splitFactory = SplitFactoryBuilder.build("localhost", config); + SplitClient client = splitFactory.client(); + client.blockUntilReady(); + + Assert.assertEquals("on", client.getTreatment("bilal", "split_1")); + Assert.assertEquals("off", client.getTreatment("bilal", "split_2")); + Assert.assertEquals("v5", client.getTreatment("admin", "split_2")); + client.destroy(); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/LegacyLocalhostSplitChangeFetcherTest.java b/client/src/test/java/io/split/client/LegacyLocalhostSplitChangeFetcherTest.java new file mode 100644 index 000000000..affee8010 --- /dev/null +++ b/client/src/test/java/io/split/client/LegacyLocalhostSplitChangeFetcherTest.java @@ -0,0 +1,41 @@ +package io.split.client; + +import com.google.common.collect.Maps; +import io.split.client.dtos.SplitChange; +import io.split.client.utils.LocalhostUtils; +import io.split.engine.common.FetchOptions; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; + +import java.io.File; +import java.io.IOException; +import java.util.Map; + +public class LegacyLocalhostSplitChangeFetcherTest { + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Test + public void testParseSplitChange() throws IOException { + File file = folder.newFile(LegacyLocalhostSplitChangeFetcher.FILENAME); + + Map map = Maps.newHashMap(); + map.put(SplitAndKey.of("onboarding"), LocalhostSplit.of("on")); + map.put(SplitAndKey.of("onboarding", "user1"), LocalhostSplit.of("off")); + map.put(SplitAndKey.of("onboarding", "user2"), LocalhostSplit.of("off")); + map.put(SplitAndKey.of("test"), LocalhostSplit.of("a")); + + LocalhostUtils.writeFile(file, map); + + LegacyLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new LegacyLocalhostSplitChangeFetcher(folder.getRoot().getAbsolutePath()); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + + Assert.assertEquals(2, splitChange.featureFlags.d.size()); + Assert.assertEquals(-1, splitChange.featureFlags.s); + Assert.assertEquals(-1, splitChange.featureFlags.t); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/LocalhostSplitFactoryTest.java b/client/src/test/java/io/split/client/LocalhostSplitFactoryTest.java index f4ae2e4bf..983bf4cc4 100644 --- a/client/src/test/java/io/split/client/LocalhostSplitFactoryTest.java +++ b/client/src/test/java/io/split/client/LocalhostSplitFactoryTest.java @@ -1,21 +1,22 @@ package io.split.client; import com.google.common.collect.Maps; +import io.split.client.dtos.FallbackTreatment; +import io.split.client.dtos.FallbackTreatmentsConfiguration; +import io.split.client.utils.LocalhostUtils; import io.split.grammar.Treatments; +import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; -import java.io.BufferedWriter; import java.io.File; -import java.io.FileWriter; import java.io.IOException; import java.net.URISyntaxException; +import java.util.HashMap; import java.util.Map; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertEquals; /** * Tests for LocalhostSplitFactory @@ -23,13 +24,12 @@ * @author adil */ public class LocalhostSplitFactoryTest { - @Rule public TemporaryFolder folder = new TemporaryFolder(); @Test public void works() throws IOException, URISyntaxException, InterruptedException { - File file = folder.newFile(LocalhostSplitFactory.FILENAME); + File file = folder.newFile(LegacyLocalhostSplitChangeFetcher.FILENAME); Map map = Maps.newHashMap(); map.put(SplitAndKey.of("onboarding"), LocalhostSplit.of("on")); @@ -37,56 +37,47 @@ public void works() throws IOException, URISyntaxException, InterruptedException map.put(SplitAndKey.of("onboarding", "user2"), LocalhostSplit.of("off")); map.put(SplitAndKey.of("test"), LocalhostSplit.of("a")); - writeFile(file, map); - - LocalhostSplitFactory factory = new LocalhostSplitFactory(folder.getRoot().getAbsolutePath(), LocalhostSplitFactory.FILENAME); - SplitClient client = factory.client(); - - assertThat(client.getTreatment(null, "foo"), is(equalTo(Treatments.CONTROL))); - assertThat(client.getTreatment("user1", "foo"), is(equalTo(Treatments.CONTROL))); - assertThat(client.getTreatment("user1", "onboarding"), is(equalTo("off"))); - assertThat(client.getTreatment("user2", "onboarding"), is(equalTo("off"))); - assertThat(client.getTreatment("user3", "onboarding"), is(equalTo("on"))); - assertThat(client.getTreatment("user1", "test"), is(equalTo("a"))); - assertThat(client.getTreatment("user2", "test"), is(equalTo("a"))); + LocalhostUtils.writeFile(file, map); + + SplitClientConfig config = SplitClientConfig.builder() + .splitFile(folder.getRoot().getAbsolutePath()) + .build(); + SplitFactory splitFactory = SplitFactoryBuilder.build("localhost", config); + SplitClient client = splitFactory.client(); + + assertEquals(Treatments.CONTROL, client.getTreatment(null, "foo")); + assertEquals(Treatments.CONTROL, client.getTreatment("user1", "foo")); + assertEquals("off", client.getTreatment("user1", "onboarding")); + assertEquals("off", client.getTreatment("user1", "onboarding")); + assertEquals("off", client.getTreatment("user2", "onboarding")); + assertEquals("on", client.getTreatment("user3", "onboarding")); + assertEquals("a", client.getTreatment("user1", "test")); + assertEquals("a", client.getTreatment("user2", "test")); + } - // now update it. - map.clear(); + @Test + public void testFallbackTreatments() throws IOException, URISyntaxException, InterruptedException { + File file = folder.newFile(LegacyLocalhostSplitChangeFetcher.FILENAME); + Map map = Maps.newHashMap(); map.put(SplitAndKey.of("onboarding"), LocalhostSplit.of("on")); + map.put(SplitAndKey.of("onboarding", "user1"), LocalhostSplit.of("off")); + map.put(SplitAndKey.of("onboarding", "user2"), LocalhostSplit.of("off")); + map.put(SplitAndKey.of("test"), LocalhostSplit.of("a")); - factory.updateFeatureToTreatmentMap(map); - - assertThat(client.getTreatment("user1", "onboarding"), is(equalTo("on"))); - assertThat(client.getTreatment("user2", "onboarding"), is(equalTo("on"))); - assertThat(client.getTreatment("user3", "onboarding"), is(equalTo("on"))); - assertThat(client.getTreatment("user1", "test"), is(equalTo(Treatments.CONTROL))); - } - - private void writeFile(File f, Map map) throws IOException { - BufferedWriter writer = new BufferedWriter(new FileWriter(f)); + LocalhostUtils.writeFile(file, map); - for (Map.Entry entry : map.entrySet()) { - String line = toString(entry); - writer.write(line); - } + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on-global"), + new HashMap() {{ put("feature", new FallbackTreatment("off-local", "{\"prop2\", \"val2\"}")); }}); - writer.flush(); - writer.close(); - } + SplitClientConfig config = SplitClientConfig.builder() + .splitFile(folder.getRoot().getAbsolutePath()) + .fallbackTreatments(fallbackTreatmentsConfiguration) + .build(); + SplitFactory splitFactory = SplitFactoryBuilder.build("localhost", config); + SplitClient client = splitFactory.client(); - private String toString(Map.Entry entry) { - StringBuilder bldr = new StringBuilder(); - bldr.append(entry.getKey().split()); - bldr.append(' '); - bldr.append(entry.getValue().treatment); - if (entry.getKey().key() != null) { - bldr.append(' '); - bldr.append(entry.getKey().key()); - } - bldr.append('\n'); - return bldr.toString(); + assertEquals("off-local", client.getTreatment("user1", "feature")); + assertEquals("on-global", client.getTreatment("user1", "feature2")); } - - -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlCompactSampleTest.java b/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlCompactSampleTest.java index b54e7f489..97110cc8d 100644 --- a/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlCompactSampleTest.java +++ b/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlCompactSampleTest.java @@ -1,12 +1,10 @@ package io.split.client; -import com.google.common.collect.Maps; import io.split.grammar.Treatments; import org.junit.Test; import java.io.IOException; import java.net.URISyntaxException; -import java.util.Map; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; @@ -25,8 +23,9 @@ public void works() throws IOException, URISyntaxException { String file = getClass().getClassLoader().getResource("split_compact.yaml").getFile(); - LocalhostSplitFactory factory = new LocalhostSplitFactory("", file); - SplitClient client = factory.client(); + SplitClientConfig config = SplitClientConfig.builder().splitFile(file).build(); + SplitFactory splitFactory = SplitFactoryBuilder.build("localhost", config); + SplitClient client = splitFactory.client(); assertThat(client.getTreatment(null, "foo"), is(equalTo(Treatments.CONTROL))); assertThat(client.getTreatment("user_c", "foo"), is(equalTo(Treatments.CONTROL))); @@ -42,15 +41,30 @@ public void works() throws IOException, URISyntaxException { assertThat(client.getTreatment("user_e", "split_2"), is(equalTo("off"))); assertThat(client.getTreatmentWithConfig("user_e", "split_2").treatment(), is(equalTo("off"))); assertThat(client.getTreatmentWithConfig("user_e", "split_2").config(), is(equalTo("{ \"size\" : 55 }"))); + } - // Update + @Test + public void worksYML() throws IOException, URISyntaxException { - Map update = Maps.newHashMap(); - update.put(SplitAndKey.of("split_2", "user_a"), LocalhostSplit.of("on")); + String file = getClass().getClassLoader().getResource("split_compact.yml").getFile(); - factory.updateFeatureToTreatmentMap(update); + SplitClientConfig config = SplitClientConfig.builder().splitFile(file).build(); + SplitFactory splitFactory = SplitFactoryBuilder.build("localhost", config); + SplitClient client = splitFactory.client(); - assertThat(client.getTreatment("user_a", "split_2"), is(equalTo("on"))); - } + assertThat(client.getTreatment(null, "foo"), is(equalTo(Treatments.CONTROL))); + assertThat(client.getTreatment("user_c", "foo"), is(equalTo(Treatments.CONTROL))); + + assertThat(client.getTreatment("user_c", "split_1"), is(equalTo("off"))); + assertThat(client.getTreatmentWithConfig("user_c", "split_1").treatment(), is(equalTo("off"))); + assertThat(client.getTreatmentWithConfig("user_c", "split_1").config(), is(equalTo("{ \"size\" : 10 }"))); -} + assertThat(client.getTreatment("user_d", "split_1"), is(equalTo("on"))); + assertThat(client.getTreatmentWithConfig("user_d", "split_1").treatment(), is(equalTo("on"))); + assertThat(client.getTreatmentWithConfig("user_d", "split_1").config(), is(nullValue())); + + assertThat(client.getTreatment("user_e", "split_2"), is(equalTo("off"))); + assertThat(client.getTreatmentWithConfig("user_e", "split_2").treatment(), is(equalTo("off"))); + assertThat(client.getTreatmentWithConfig("user_e", "split_2").config(), is(equalTo("{ \"size\" : 55 }"))); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlSampleTest.java b/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlSampleTest.java index 933d19039..c77aef3a2 100644 --- a/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlSampleTest.java +++ b/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlSampleTest.java @@ -1,12 +1,10 @@ package io.split.client; -import com.google.common.collect.Maps; import io.split.grammar.Treatments; import org.junit.Test; import java.io.IOException; import java.net.URISyntaxException; -import java.util.Map; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; @@ -25,8 +23,9 @@ public void works() throws IOException, URISyntaxException { String file = getClass().getClassLoader().getResource(SplitClientConfig.LOCALHOST_DEFAULT_FILE).getFile(); - LocalhostSplitFactory factory = new LocalhostSplitFactory("", file); - SplitClient client = factory.client(); + SplitClientConfig config = SplitClientConfig.builder().splitFile(file).build(); + SplitFactory splitFactory = SplitFactoryBuilder.build("localhost", config); + SplitClient client = splitFactory.client(); assertThat(client.getTreatment(null, "foo"), is(equalTo(Treatments.CONTROL))); assertThat(client.getTreatment("user_a", "foo"), is(equalTo(Treatments.CONTROL))); @@ -58,15 +57,5 @@ public void works() throws IOException, URISyntaxException { assertThat(client.getTreatment("user_random", "splitWithNoKeys"), is(equalTo("v2"))); assertThat(client.getTreatmentWithConfig("user_random", "splitWithNoKeys").treatment(), is(equalTo("v2"))); assertThat(client.getTreatmentWithConfig("user_random", "splitWithNoKeys").config(), is(equalTo("{ \"size\" : 999 }"))); - - // Update - - Map update = Maps.newHashMap(); - update.put(SplitAndKey.of("split_2", "user_a"), LocalhostSplit.of("on")); - - factory.updateFeatureToTreatmentMap(update); - - assertThat(client.getTreatment("user_a", "split_2"), is(equalTo("on"))); } - } diff --git a/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlTest.java b/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlTest.java index c0be15838..989da1a1e 100644 --- a/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlTest.java +++ b/client/src/test/java/io/split/client/LocalhostSplitFactoryYamlTest.java @@ -1,22 +1,19 @@ package io.split.client; -import com.google.common.collect.Maps; +import io.split.client.dtos.FallbackTreatment; +import io.split.client.dtos.FallbackTreatmentsConfiguration; +import io.split.client.utils.LocalhostUtils; import io.split.grammar.Treatments; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.yaml.snakeyaml.Yaml; -import java.io.BufferedWriter; import java.io.File; -import java.io.FileWriter; import java.io.IOException; import java.io.StringWriter; import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; @@ -39,7 +36,6 @@ * @author patricioe */ public class LocalhostSplitFactoryYamlTest { - @Rule public TemporaryFolder folder = new TemporaryFolder(); @@ -83,10 +79,11 @@ public void works() throws IOException, URISyntaxException { assertEquals(expectedYaml, writer.toString()); - writeFile(file, writer); + LocalhostUtils.writeFile(file, writer); - LocalhostSplitFactory factory = new LocalhostSplitFactory("", file.getAbsolutePath()); - SplitClient client = factory.client(); + SplitClientConfig config = SplitClientConfig.builder().splitFile(file.getAbsolutePath()).build(); + SplitFactory splitFactory = SplitFactoryBuilder.build("localhost", config); + SplitClient client = splitFactory.client(); assertThat(client.getTreatment(null, "foo"), is(equalTo(Treatments.CONTROL))); assertThat(client.getTreatment("user_a", "foo"), is(equalTo(Treatments.CONTROL))); @@ -103,50 +100,64 @@ public void works() throws IOException, URISyntaxException { assertThat(client.getTreatmentWithConfig("user_a", "split_2").treatment(), is(equalTo("off"))); assertThat(client.getTreatmentWithConfig("user_a", "split_2").config(), is(equalTo("{ \"size\" : 20 }"))); - // Update - Map update = Maps.newHashMap(); - update.put(SplitAndKey.of("split_2", "user_a"), LocalhostSplit.of("on")); - factory.updateFeatureToTreatmentMap(update); - - assertThat(client.getTreatment("user_a", "split_2"), is(equalTo("on"))); - - // Make split_1 "legacy" treatment for all keys mines the whitelisted ones. - update = Maps.newHashMap(); - update.put(SplitAndKey.of("split_1", "user_a"), LocalhostSplit.of("off")); - update.put(SplitAndKey.of("split_1", "user_b"), LocalhostSplit.of("on")); - update.put(SplitAndKey.of("split_1"), LocalhostSplit.of("legacy")); - factory.updateFeatureToTreatmentMap(update); - // unchanged assertThat(client.getTreatment("user_a", "split_1"), is(equalTo("off"))); // unchanged assertThat(client.getTreatment("user_b", "split_1"), is(equalTo("on"))); + } - // "legacy" for any other user - assertThat(client.getTreatment("user_blah", "split_1"), is(equalTo("legacy"))); + @Test + public void testFallbackTreatment() throws IOException, URISyntaxException { + File file = folder.newFile(SplitClientConfig.LOCALHOST_DEFAULT_FILE); - factory.updateFeatureToTreatmentMap(update); - } + List> allSplits = new ArrayList(); - private void writeFile(File f, StringWriter content) throws IOException { - BufferedWriter writer = new BufferedWriter(new FileWriter(f)); - writer.write(content.toString()); - writer.flush(); - writer.close(); - } + Map split1_user_a = new LinkedHashMap<>(); + Map split1_user_a_data = new LinkedHashMap<>(); + split1_user_a_data.put("keys", "user_a"); + split1_user_a_data.put("treatment", "off"); + split1_user_a_data.put("config", "{ \"size\" : 20 }"); + split1_user_a.put("split_1", split1_user_a_data); + allSplits.add(split1_user_a); - private String toString(Map.Entry entry) { - StringBuilder bldr = new StringBuilder(); - bldr.append(entry.getKey().split()); - bldr.append(' '); - bldr.append(entry.getValue()); - if (entry.getKey().key() != null) { - bldr.append(' '); - bldr.append(entry.getKey().key()); - } - bldr.append('\n'); - return bldr.toString(); - } + Map split1_user_b = new LinkedHashMap<>(); + Map split1_user_b_data = new LinkedHashMap<>(); + split1_user_b_data.put("keys", "user_b"); + split1_user_b_data.put("treatment", "on"); + split1_user_b.put("split_1", split1_user_b_data); + allSplits.add(split1_user_b); + + Map split2_user_a = new LinkedHashMap<>(); + Map split2_user_a_data = new LinkedHashMap<>(); + split2_user_a_data.put("keys", "user_a"); + split2_user_a_data.put("treatment", "off"); + split2_user_a_data.put("config", "{ \"size\" : 20 }"); + split2_user_a.put("split_2", split2_user_a_data); + allSplits.add(split2_user_a); + + + Yaml yaml = new Yaml(); + StringWriter writer = new StringWriter(); + yaml.dump(allSplits, writer); + String expectedYaml = "- split_1: {keys: user_a, treatment: 'off', config: '{ \"size\" : 20 }'}\n" + + "- split_1: {keys: user_b, treatment: 'on'}\n" + + "- split_2: {keys: user_a, treatment: 'off', config: '{ \"size\" : 20 }'}\n"; -} + assertEquals(expectedYaml, writer.toString()); + + LocalhostUtils.writeFile(file, writer); + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on-global"), + new HashMap() {{ put("feature", new FallbackTreatment("off-local", "{\"prop2\", \"val2\"}")); }}); + + SplitClientConfig config = SplitClientConfig.builder() + .splitFile(file.getAbsolutePath()) + .fallbackTreatments(fallbackTreatmentsConfiguration) + .build(); + SplitFactory splitFactory = SplitFactoryBuilder.build("localhost", config); + SplitClient client = splitFactory.client(); + + assertEquals("off-local", client.getTreatment("user1", "feature")); + assertEquals("on-global", client.getTreatment("user1", "feature2")); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/SplitClientConfigTest.java b/client/src/test/java/io/split/client/SplitClientConfigTest.java index 1fa3e6ab1..69c95d032 100644 --- a/client/src/test/java/io/split/client/SplitClientConfigTest.java +++ b/client/src/test/java/io/split/client/SplitClientConfigTest.java @@ -1,12 +1,27 @@ package io.split.client; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import io.split.client.dtos.RequestContext; +import io.split.client.dtos.FallbackTreatmentsConfiguration; +import io.split.client.dtos.FallbackTreatment; +import io.split.client.dtos.ProxyConfiguration; import io.split.client.impressions.Impression; import io.split.client.impressions.ImpressionListener; import io.split.client.impressions.ImpressionsManager; import io.split.integrations.IntegrationsConfig; -import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Test; +import org.mockito.Mockito; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; @@ -15,14 +30,14 @@ public class SplitClientConfigTest { @Test(expected = IllegalArgumentException.class) - public void cannot_set_feature_refresh_rate_to_less_than_5() { + public void cannotSetFeatureRefreshRateToLessThan5() { SplitClientConfig.builder() .featuresRefreshRate(4) .build(); } @Test(expected = IllegalArgumentException.class) - public void cannot_set_segment_refresh_rate_to_less_than_30() { + public void cannotSetSegmentRefreshRateToLessThan30() { SplitClientConfig.builder() .segmentsRefreshRate(29) .build(); @@ -77,67 +92,62 @@ public void testImpressionRefreshRateConstraints() { } @Test - public void set_impression_refresh_rate_works() { - SplitClientConfig.builder() - .impressionsRefreshRate(1) + public void setImpressionRefreshRateWorks() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsRefreshRate(65) .build(); + Assert.assertEquals(65, config.impressionsRefreshRate()); } @Test(expected = IllegalArgumentException.class) - public void cannot_set_events_flush_rate_to_equal_to_1000() { + public void cannotSetEventsFlushRateToEqualTo1000() { SplitClientConfig.builder() .eventFlushIntervalInMillis(999) .build(); } @Test - public void events_flush_rate_works() { - SplitClientConfig.builder() + public void eventsFlushRateWorks() { + SplitClientConfig config = SplitClientConfig.builder() .eventFlushIntervalInMillis(1000) .build(); + Assert.assertEquals(1000, config.eventSendIntervalInMillis()); } @Test(expected = IllegalArgumentException.class) - public void cannot_set_metrics_refresh_rate_to_less_than_30() { + public void cannotSetMetricsRefreshRateToLessThan30() { SplitClientConfig.builder() .metricsRefreshRate(29) .build(); } @Test - public void can_set_refresh_rates_to__30() { - SplitClientConfig.builder() + public void canSetRefreshRatesTo30() { + SplitClientConfig cfg = SplitClientConfig.builder() .featuresRefreshRate(30) .segmentsRefreshRate(30) - .impressionsRefreshRate(30) - .metricsRefreshRate(30) - .build(); - } - - @Test(expected = IllegalArgumentException.class) - public void cannot_set_zero_capacity_on_impression_listener() throws InterruptedException { - SplitClientConfig.builder() - .integrations(IntegrationsConfig.builder() - .impressionsListener(new ImpressionListener.NoopImpressionListener(), 0) - .build()) + .impressionsRefreshRate(65) + .metricsRefreshRate(65) .build(); + Assert.assertEquals(30, cfg.featuresRefreshRate()); + Assert.assertEquals(30, cfg.segmentsRefreshRate()); + Assert.assertEquals(65, cfg.impressionsRefreshRate()); + Assert.assertEquals(65, cfg.metricsRefreshRate()); } @Test - public void config_does_not_crash_if_new_relic_class_not_present() { + public void configDoesNotCrashIfNewRelicClassNotPresent() { SplitClientConfig cfg = SplitClientConfig.builder() .integrations(IntegrationsConfig.builder() .newRelicImpressionListener() .build()) .build(); - Assert.assertThat( - cfg.integrationsConfig().getImpressionsListeners(IntegrationsConfig.Execution.SYNC).size(), - is(equalTo(0))); + Assert.assertEquals(0, cfg.integrationsConfig().getImpressionsListeners(IntegrationsConfig.Execution.SYNC).size()); } @Test - public void old_impression_listener_config_still_works() { + public void oldImpressionListenerConfigStillWorks() { SplitClientConfig cfg = SplitClientConfig.builder() .impressionListener(new ImpressionListener() { @Override @@ -148,9 +158,7 @@ public void close() { /* noop */ } }, 1000) .build(); - Assert.assertThat( - cfg.integrationsConfig().getImpressionsListeners(IntegrationsConfig.Execution.ASYNC).size(), - is(equalTo(1))); + Assert.assertEquals(1, cfg.integrationsConfig().getImpressionsListeners(IntegrationsConfig.Execution.ASYNC).size()); } @Test @@ -158,8 +166,8 @@ public void testVersion() { SplitClientConfig config = SplitClientConfig.builder() .build(); - Assert.assertThat(config.splitSdkVersion, Matchers.not(Matchers.equalTo("undefined"))); - Assert.assertThat(config.splitSdkVersion, Matchers.startsWith("java-")); + Assert.assertNotEquals("undefined", config.splitSdkVersion); + Assert.assertTrue(config.splitSdkVersion.startsWith("java-")); } @Test(expected = IllegalArgumentException.class) @@ -188,5 +196,192 @@ public void streamingReconnectBackoffBaseAllowed() { SplitClientConfig cfg = SplitClientConfig.builder() .streamingReconnectBackoffBase(1) .build(); + Assert.assertEquals(1, cfg.streamingReconnectBackoffBase()); + } + + @Test + public void checkDefaultRateForFeatureAndSegment() { + SplitClientConfig config = SplitClientConfig.builder().build(); + Assert.assertEquals(60, config.featuresRefreshRate()); + Assert.assertEquals(60, config.segmentsRefreshRate()); + } + + @Test + public void checkSetFlagSetsFilter() { + List sets = Stream.of("test1", "test2", "TEST3", "test-4").collect(Collectors.toList()); + SplitClientConfig config = SplitClientConfig.builder().flagSetsFilter(sets).build(); + Assert.assertNotNull(config.getSetsFilter()); + Assert.assertEquals(3, config.getSetsFilter().size()); + } + + @Test + public void threadFactoryNull() { + SplitClientConfig config = SplitClientConfig.builder().build(); + Assert.assertNull(config.getThreadFactory()); + } + + @Test + public void threadFactoryNotNull() { + SplitClientConfig config = SplitClientConfig.builder().threadFactory(new ThreadFactoryBuilder().build()).build(); + Assert.assertNotNull(config.getThreadFactory()); + } + + @Test + public void IntegrationConfigSyncNotNull() { + SplitClientConfig config = SplitClientConfig.builder().integrations(IntegrationsConfig.builder() + .impressionsListener(Mockito.mock(ImpressionListener.class), 500, IntegrationsConfig.Execution.SYNC) + .build()).build(); + Assert.assertNotNull(config.integrationsConfig()); + Assert.assertEquals(1, config.integrationsConfig().getImpressionsListeners(IntegrationsConfig.Execution.SYNC).size()); + Assert.assertEquals(0, config.integrationsConfig().getImpressionsListeners(IntegrationsConfig.Execution.ASYNC).size()); + } + + @Test + public void IntegrationConfigAsyncNotNull() { + SplitClientConfig config = SplitClientConfig.builder().integrations(IntegrationsConfig.builder() + .impressionsListener(Mockito.mock(ImpressionListener.class), 500, IntegrationsConfig.Execution.ASYNC) + .build()).build(); + Assert.assertNotNull(config.integrationsConfig()); + Assert.assertEquals(0, config.integrationsConfig().getImpressionsListeners(IntegrationsConfig.Execution.SYNC).size()); + Assert.assertEquals(1, config.integrationsConfig().getImpressionsListeners(IntegrationsConfig.Execution.ASYNC).size()); + } + + @Test + public void checkUserCustomdHeaderDecorator() { + CustomHeaderDecorator ucd = new CustomHeaderDecorator() { + @Override + public Map> getHeaderOverrides(RequestContext context) { + return null; + } + }; + SplitClientConfig config = SplitClientConfig.builder().customHeaderDecorator(ucd).build(); + Assert.assertNotNull(config.customHeaderDecorator()); + Assert.assertEquals(ucd, config.customHeaderDecorator()); + + SplitClientConfig config2 = SplitClientConfig.builder().build(); + Assert.assertNull(config2.customHeaderDecorator()); + } + + @Test + public void checkProxyParams() throws MalformedURLException, FileNotFoundException { + SplitClientConfig config = SplitClientConfig.builder() + .proxyConfiguration(new ProxyConfiguration.Builder() + .url(new URL("https://proxy-host:8888")) + .build()) + .build(); + Assert.assertEquals("proxy-host", config.proxyConfiguration().getHost().getHostName()); + Assert.assertEquals(8888, config.proxyConfiguration().getHost().getPort()); + Assert.assertEquals("https", config.proxyConfiguration().getHost().getSchemeName()); + + config = SplitClientConfig.builder() + .proxyConfiguration(new ProxyConfiguration.Builder() + .url(new URL("https://proxy-host:888")) + .credentialsProvider(new io.split.client.dtos.BasicCredentialsProvider() { + @Override + public String getUsername() { + return "user"; + } + + @Override + public String getPassword() { + return "pass"; + } + }) + .build()) + .build(); + io.split.client.dtos.BasicCredentialsProvider basicAuth = (io.split.client.dtos.BasicCredentialsProvider) config.proxyConfiguration().getProxyCredentialsProvider(); + Assert.assertEquals("user", basicAuth.getUsername()); + Assert.assertEquals("pass", basicAuth.getPassword()); + + io.split.client.dtos.BearerCredentialsProvider bearerCredentialsProvider = new io.split.client.dtos.BearerCredentialsProvider() { + @Override + public String getToken() { + return "my-token"; + } + }; + + config = SplitClientConfig.builder() + .proxyConfiguration(new ProxyConfiguration.Builder() + .url(new URL("https://proxy-host:888")) + .credentialsProvider(bearerCredentialsProvider) + .build()) + .build(); + Assert.assertEquals(bearerCredentialsProvider, config.proxyConfiguration().getProxyCredentialsProvider()); + FileInputStream p12File = new FileInputStream("src/test/resources/keyStore.p12"); + config = SplitClientConfig.builder() + .proxyConfiguration(new ProxyConfiguration.Builder() + .url(new URL("https://proxy-host:888")) + .mtls(p12File, "pass-key") + .build()) + .build(); + Assert.assertEquals(p12File, config.proxyConfiguration().getP12File()); + Assert.assertEquals("pass-key", config.proxyConfiguration().getPassKey()); + } + + @Test(expected = IllegalArgumentException.class) + public void cannotUseInvalidHttpScheme() throws MalformedURLException { + SplitClientConfig.builder() + .proxyConfiguration(new ProxyConfiguration.Builder() + .url(new URL("ftp://proxy-host:888")) + .build()) + .build(); + } + + @Test(expected = MalformedURLException.class) + public void cannotUseInvalidUrl() throws MalformedURLException { + SplitClientConfig.builder() + .proxyConfiguration(new ProxyConfiguration.Builder() + .url(new URL("")) + .build()) + .build(); + } + + @Test(expected = IllegalArgumentException.class) + public void mustUseUrl() throws MalformedURLException { + SplitClientConfig.builder() + .proxyConfiguration(new ProxyConfiguration.Builder() + .build()) + .build(); + } + + @Test(expected = IllegalArgumentException.class) + public void mustUseP12FileWithProxyMtls() throws MalformedURLException { + SplitClientConfig.builder() + .proxyConfiguration(new ProxyConfiguration.Builder() + .url(new URL("https://proxy-host:888")) + .mtls(null, "pass-key") + .build()) + .build(); + } + + @Test(expected = IllegalArgumentException.class) + public void mustUseP12PassKeyWithProxyMtls() throws MalformedURLException, FileNotFoundException { + SplitClientConfig.builder() + .proxyConfiguration(new ProxyConfiguration.Builder() + .url(new URL("https://proxy-host:888")) + .mtls(new FileInputStream("src/test/resources/keyStore.p12"), null) + .build()) + .build(); + } + + @Test + public void fallbackTreatmentCheckRegex() { + SplitClientConfig config = SplitClientConfig.builder() + .fallbackTreatments(new FallbackTreatmentsConfiguration(new FallbackTreatment("12#2"))) + .build(); + Assert.assertEquals(null, config.fallbackTreatments().getGlobalFallbackTreatment().getTreatment()); + + config = SplitClientConfig.builder() + .fallbackTreatments(new FallbackTreatmentsConfiguration(new HashMap() {{ put("flag", new FallbackTreatment("12#2")); }} )) + .build(); + Assert.assertEquals(0, config.fallbackTreatments().getByFlagFallbackTreatment().size()); + + config = SplitClientConfig.builder() + .fallbackTreatments(new FallbackTreatmentsConfiguration( + "on", + new HashMap() {{ put("flag", new FallbackTreatment("off")); }} )) + .build(); + Assert.assertEquals("on", config.fallbackTreatments().getGlobalFallbackTreatment().getTreatment()); + Assert.assertEquals("off", config.fallbackTreatments().getByFlagFallbackTreatment().get("flag").getTreatment()); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/SplitClientImplTest.java b/client/src/test/java/io/split/client/SplitClientImplTest.java index e2d00d9c5..26a850574 100644 --- a/client/src/test/java/io/split/client/SplitClientImplTest.java +++ b/client/src/test/java/io/split/client/SplitClientImplTest.java @@ -2,28 +2,28 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.reflect.TypeToken; import io.split.client.api.Key; import io.split.client.api.SplitResult; -import io.split.client.dtos.ConditionType; -import io.split.client.dtos.DataType; -import io.split.client.dtos.Event; -import io.split.client.dtos.Partition; +import io.split.client.dtos.*; import io.split.client.events.EventsStorageProducer; import io.split.client.events.NoopEventsStorageImp; import io.split.client.impressions.Impression; import io.split.client.impressions.ImpressionsManager; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; +import io.split.engine.matchers.PrerequisitesMatcher; +import io.split.engine.matchers.CombiningMatcher; +import io.split.engine.matchers.EqualToMatcher; +import io.split.engine.matchers.GreaterThanOrEqualToMatcher; +import io.split.engine.matchers.AllKeysMatcher; +import io.split.engine.matchers.DependencyMatcher; +import io.split.storages.RuleBasedSegmentCacheConsumer; import io.split.storages.SegmentCacheConsumer; import io.split.storages.SplitCacheConsumer; import io.split.engine.evaluator.EvaluatorImp; import io.split.engine.SDKReadinessGates; import io.split.engine.experiments.ParsedCondition; import io.split.engine.experiments.ParsedSplit; -import io.split.engine.matchers.AllKeysMatcher; -import io.split.engine.matchers.CombiningMatcher; -import io.split.engine.matchers.DependencyMatcher; -import io.split.engine.matchers.EqualToMatcher; -import io.split.engine.matchers.GreaterThanOrEqualToMatcher; import io.split.engine.matchers.collections.ContainsAnyOfSetMatcher; import io.split.engine.matchers.strings.WhitelistMatcher; import io.split.grammar.Treatments; @@ -47,13 +47,11 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeoutException; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.anyList; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; @@ -70,7 +68,9 @@ public class SplitClientImplTest { private static TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); - private SplitClientConfig config = SplitClientConfig.builder().setBlockUntilReadyTimeout(100).build(); + private SplitClientConfig config = SplitClientConfig.builder().setBlockUntilReadyTimeout(100).flagSetsFilter(new ArrayList<>( + Arrays.asList("set1", "set2", "set3"))).build(); + private FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("set1", "set2", "set3"))); @Before public void updateTelemetryStorage() { @@ -78,15 +78,18 @@ public void updateTelemetryStorage() { } @Test - public void null_key_results_in_control() { + public void nullKeyResultsInControl() { String test = "test1"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -96,24 +99,28 @@ public void null_key_results_in_control() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - - assertThat(client.getTreatment(null, "test1"), is(equalTo(Treatments.CONTROL))); + assertEquals(Treatments.CONTROL, client.getTreatment(null, "test1")); verifyZeroInteractions(splitCacheConsumer); } @Test - public void null_test_results_in_control() { + public void nullTestResultsInControl() { String test = "test1"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(), true, null); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -123,19 +130,21 @@ public void null_test_results_in_control() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - - assertThat(client.getTreatment("adil@relateiq.com", null), is(equalTo(Treatments.CONTROL))); + assertEquals(Treatments.CONTROL, client.getTreatment("adil@relateiq.com", null)); verifyZeroInteractions(splitCacheConsumer); } @Test - public void exceptions_result_in_control() { + public void exceptionsResultInControl() { SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(anyString())).thenThrow(RuntimeException.class); SplitClientImpl client = new SplitClientImpl( @@ -145,9 +154,11 @@ public void exceptions_result_in_control() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("adil@relateiq.com", "test1"), is(equalTo(Treatments.CONTROL))); + assertEquals(Treatments.CONTROL, client.getTreatment("adil@relateiq.com", "test1")); verify(splitCacheConsumer).get("test1"); } @@ -156,13 +167,16 @@ public void exceptions_result_in_control() { public void works() { String test = "test1"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); when(gates.isSDKReady()).thenReturn(true); @@ -173,13 +187,15 @@ public void works() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); int numKeys = 5; for (int i = 0; i < numKeys; i++) { String randomKey = RandomStringUtils.random(10); - assertThat(client.getTreatment(randomKey, test), is(equalTo("on"))); + Assert.assertEquals("on", client.getTreatment(randomKey, test)); } verify(splitCacheConsumer, times(numKeys)).get(test); @@ -190,16 +206,17 @@ public void works() { * There is no config for this treatment */ @Test - public void works_null_config() { + public void worksNullConfig() { String test = "test1"; ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -209,15 +226,14 @@ public void works_null_config() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - - String randomKey = RandomStringUtils.random(10); SplitResult result = client.getTreatmentWithConfig(randomKey, test); - assertThat(result.treatment(), is(equalTo(Treatments.ON))); - assertThat(result.config(), is(nullValue())); - + assertEquals(Treatments.ON, result.treatment()); + assertNull(result.config()); verify(splitCacheConsumer).get(test); } @@ -232,11 +248,12 @@ public void worksAndHasConfig() { Map configurations = new HashMap<>(); configurations.put(Treatments.ON, "{\"size\" : 30}"); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, configurations); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, configurations, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -246,15 +263,17 @@ public void worksAndHasConfig() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); int numKeys = 5; for (int i = 0; i < numKeys; i++) { Map attributes = new HashMap<>(); String randomKey = RandomStringUtils.random(10); - assertThat(client.getTreatment(randomKey, test), is(equalTo("on"))); - assertThat(client.getTreatmentWithConfig(randomKey, test, attributes).config(), is(equalTo(configurations.get("on")))); + assertEquals("on", client.getTreatment(randomKey, test)); + assertEquals(configurations.get("on"), client.getTreatmentWithConfig(randomKey, test, attributes).config()); } // Times 2 because we are calling getTreatment twice. Once for getTreatment and one for getTreatmentWithConfig @@ -262,16 +281,17 @@ public void worksAndHasConfig() { } @Test - public void last_condition_is_always_default() { + public void lastConditionIsAlwaysDefault() { String test = "test1"; ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(Lists.newArrayList("adil@codigo.com"))), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, "user", 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, "user", 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -281,10 +301,12 @@ public void last_condition_is_always_default() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("pato@codigo.com", test), is(equalTo(Treatments.OFF))); + assertEquals(Treatments.OFF, client.getTreatment("pato@codigo.com", test)); verify(splitCacheConsumer).get(test); } @@ -293,21 +315,24 @@ public void last_condition_is_always_default() { * Tests that we retrieve configs from the default treatment */ @Test - public void last_condition_is_always_default_but_with_treatment() { + public void lastConditionIsAlwaysDefaultButWithTreatment() { String test = "test1"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(Lists.newArrayList("adil@codigo.com"))), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher( + Lists.newArrayList("adil@codigo.com"))), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); // Add config for only one treatment(default) Map configurations = new HashMap<>(); configurations.put(Treatments.OFF, "{\"size\" : 30}"); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, "user", 1, 1, configurations); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + "user", 1, 1, configurations, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -317,18 +342,20 @@ public void last_condition_is_always_default_but_with_treatment() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); SplitResult result = client.getTreatmentWithConfig("pato@codigo.com", test); - assertThat(result.treatment(), is(equalTo(Treatments.OFF))); - assertThat(result.config(), is(equalTo("{\"size\" : 30}"))); + assertEquals(Treatments.OFF, result.treatment()); + assertEquals("{\"size\" : 30}", result.config()); verify(splitCacheConsumer).get(test); } @Test - public void multiple_conditions_work() { + public void multipleConditionsWork() { String test = "test1"; ParsedCondition adil_is_always_on = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(Lists.newArrayList("adil@codigo.com"))), Lists.newArrayList(partition("on", 100))); @@ -336,11 +363,12 @@ public void multiple_conditions_work() { ParsedCondition trevor_is_always_shown = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(Lists.newArrayList("trevor@codigo.com"))), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(adil_is_always_on, pato_is_never_shown, trevor_is_always_shown); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); when(gates.isSDKReady()).thenReturn(false); @@ -351,12 +379,14 @@ public void multiple_conditions_work() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("adil@codigo.com", test), is(equalTo("on"))); - assertThat(client.getTreatment("pato@codigo.com", test), is(equalTo("off"))); - assertThat(client.getTreatment("trevor@codigo.com", test), is(equalTo("on"))); + assertEquals("on", client.getTreatment("adil@codigo.com", test)); + assertEquals("off", client.getTreatment("pato@codigo.com", test)); + assertEquals("on", client.getTreatment("adil@codigo.com", test)); verify(splitCacheConsumer, times(3)).get(test); verify(TELEMETRY_STORAGE, times(3)).recordNonReadyUsage(); @@ -364,16 +394,17 @@ public void multiple_conditions_work() { @Test - public void killed_test_always_goes_to_default() { + public void killedTestAlwaysGoesToDefault() { String test = "test1"; ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(Lists.newArrayList("adil@codigo.com"))), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, true, Treatments.OFF, conditions, "user", 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, true, Treatments.OFF, conditions, "user", 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -383,10 +414,12 @@ public void killed_test_always_goes_to_default() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("adil@codigo.com", test), is(equalTo(Treatments.OFF))); + assertEquals(Treatments.OFF, client.getTreatment("adil@codigo.com", test)); verify(splitCacheConsumer).get(test); } @@ -395,21 +428,24 @@ public void killed_test_always_goes_to_default() { * when killed, the evaluator follows a slightly different path. So testing that when there is a config. */ @Test - public void killed_test_always_goes_to_default_has_config() { + public void killedTestAlwaysGoesToDefaultHasConfig() { String test = "test1"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(Lists.newArrayList("adil@codigo.com"))), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher( + Lists.newArrayList("adil@codigo.com"))), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); // Add config for only one treatment(default) Map configurations = new HashMap<>(); configurations.put(Treatments.OFF, "{\"size\" : 30}"); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, true, Treatments.OFF, conditions, "user", 1, 1, configurations); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, true, Treatments.OFF, conditions, + "user", 1, 1, configurations, new HashSet<>(), true, null); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -419,32 +455,35 @@ public void killed_test_always_goes_to_default_has_config() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); SplitResult result = client.getTreatmentWithConfig("adil@codigo.com", test); - assertThat(result.treatment(), is(equalTo(Treatments.OFF))); - assertThat(result.config(), is(equalTo("{\"size\" : 30}"))); + assertEquals(Treatments.OFF, result.treatment()); + assertEquals("{\"size\" : 30}", result.config()); verify(splitCacheConsumer).get(test); } @Test - public void dependency_matcher_on() { + public void dependencyMatcherOn() { String parent = "parent"; String dependent = "dependent"; ParsedCondition parent_is_on = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition(Treatments.ON, 100))); List parent_conditions = Lists.newArrayList(parent_is_on); - ParsedSplit parentSplit = ParsedSplit.createParsedSplitForTests(parent, 123, false, Treatments.OFF, parent_conditions, null, 1, 1); + ParsedSplit parentSplit = ParsedSplit.createParsedSplitForTests(parent, 123, false, Treatments.OFF, parent_conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); ParsedCondition dependent_needs_parent = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new DependencyMatcher(parent, Lists.newArrayList(Treatments.ON))), Lists.newArrayList(partition(Treatments.ON, 100))); List dependent_conditions = Lists.newArrayList(dependent_needs_parent); - ParsedSplit dependentSplit = ParsedSplit.createParsedSplitForTests(dependent, 123, false, Treatments.OFF, dependent_conditions, null, 1, 1); + ParsedSplit dependentSplit = ParsedSplit.createParsedSplitForTests(dependent, 123, false, Treatments.OFF, dependent_conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(parent)).thenReturn(parentSplit); when(splitCacheConsumer.get(dependent)).thenReturn(dependentSplit); @@ -455,32 +494,35 @@ public void dependency_matcher_on() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("key", parent), is(equalTo(Treatments.ON))); - assertThat(client.getTreatment("key", dependent), is(equalTo(Treatments.ON))); + assertEquals(Treatments.ON, client.getTreatment("key", parent)); + assertEquals(Treatments.ON, client.getTreatment("key", dependent)); } @Test - public void dependency_matcher_off() { + public void dependencyMatcherOff() { String parent = "parent"; String dependent = "dependent"; ParsedCondition parent_is_on = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition(Treatments.ON, 100))); List parent_conditions = Lists.newArrayList(parent_is_on); - ParsedSplit parentSplit = ParsedSplit.createParsedSplitForTests(parent, 123, false, Treatments.OFF, parent_conditions, null, 1, 1); + ParsedSplit parentSplit = ParsedSplit.createParsedSplitForTests(parent, 123, false, Treatments.OFF, parent_conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); ParsedCondition dependent_needs_parent = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new DependencyMatcher(parent, Lists.newArrayList(Treatments.OFF))), Lists.newArrayList(partition(Treatments.ON, 100))); List dependent_conditions = Lists.newArrayList(dependent_needs_parent); - ParsedSplit dependentSplit = ParsedSplit.createParsedSplitForTests(dependent, 123, false, Treatments.OFF, dependent_conditions, null, 1, 1); + ParsedSplit dependentSplit = ParsedSplit.createParsedSplitForTests(dependent, 123, false, Treatments.OFF, dependent_conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(parent)).thenReturn(parentSplit); when(splitCacheConsumer.get(dependent)).thenReturn(dependentSplit); - + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(null); SplitClientImpl client = new SplitClientImpl( mock(SplitFactory.class), splitCacheConsumer, @@ -488,25 +530,29 @@ public void dependency_matcher_off() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculatorImp), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + fallbackTreatmentCalculatorImp ); - assertThat(client.getTreatment("key", parent), is(equalTo(Treatments.ON))); - assertThat(client.getTreatment("key", dependent), is(equalTo(Treatments.OFF))); + assertEquals(Treatments.ON, client.getTreatment("key", parent)); + assertEquals(Treatments.OFF, client.getTreatment("key", dependent)); } @Test - public void dependency_matcher_control() { + public void dependencyMatcherControl() { String dependent = "dependent"; ParsedCondition dependent_needs_parent = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new DependencyMatcher("not-exists", Lists.newArrayList(Treatments.OFF))), Lists.newArrayList(partition(Treatments.OFF, 100))); List dependent_conditions = Lists.newArrayList(dependent_needs_parent); - ParsedSplit dependentSplit = ParsedSplit.createParsedSplitForTests(dependent, 123, false, Treatments.ON, dependent_conditions, null, 1, 1); + ParsedSplit dependentSplit = ParsedSplit.createParsedSplitForTests(dependent, 123, false, Treatments.ON, dependent_conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(dependent)).thenReturn(dependentSplit); + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(null); SplitClientImpl client = new SplitClientImpl( mock(SplitFactory.class), @@ -515,25 +561,28 @@ public void dependency_matcher_control() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculatorImp), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + fallbackTreatmentCalculatorImp ); - assertThat(client.getTreatment("key", dependent), is(equalTo(Treatments.ON))); + assertEquals(Treatments.ON, client.getTreatment("key", dependent)); } @Test - public void attributes_work() { + public void attributesWork() { String test = "test1"; ParsedCondition adil_is_always_on = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(Lists.newArrayList("adil@codigo.com"))), Lists.newArrayList(partition(Treatments.ON, 100))); ParsedCondition users_with_age_greater_than_10_are_on = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of("age", new GreaterThanOrEqualToMatcher(10, DataType.NUMBER)), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(adil_is_always_on, users_with_age_greater_than_10_are_on); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -543,31 +592,33 @@ public void attributes_work() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("adil@codigo.com", test), is(equalTo("on"))); - assertThat(client.getTreatment("adil@codigo.com", test, null), is(equalTo("on"))); - assertThat(client.getTreatment("adil@codigo.com", test, ImmutableMap.of()), is(equalTo("on"))); - - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 10)), is(equalTo("on"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 9)), is(equalTo("off"))); + assertEquals("on", client.getTreatment("adil@codigo.com", test)); + assertEquals("on", client.getTreatment("adil@codigo.com", test, new HashMap<>())); + assertEquals("on", client.getTreatment("adil@codigo.com", test, ImmutableMap.of())); + assertEquals("on", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 10))); + assertEquals("off", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 9))); verify(splitCacheConsumer, times(5)).get(test); } @Test - public void attributes_work_2() { + public void attributesWork2() { String test = "test1"; ParsedCondition age_equal_to_0_should_be_on = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of("age", new EqualToMatcher(0, DataType.NUMBER)), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(age_equal_to_0_should_be_on); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, "user", 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, "user", 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -577,31 +628,34 @@ public void attributes_work_2() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("adil@codigo.com", test), is(equalTo("off"))); - assertThat(client.getTreatment("adil@codigo.com", test, null), is(equalTo("off"))); - assertThat(client.getTreatment("adil@codigo.com", test, ImmutableMap.of()), is(equalTo("off"))); + assertEquals("off", client.getTreatment("adil@codigo.com", test)); + assertEquals("off", client.getTreatment("adil@codigo.com", test, new HashMap<>())); + assertEquals("off", client.getTreatment("adil@codigo.com", test, ImmutableMap.of())); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 10)), is(equalTo("off"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 0)), is(equalTo("on"))); + assertEquals("off", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 10))); + assertEquals("on", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 0))); verify(splitCacheConsumer, times(5)).get(test); } @Test - public void attributes_greater_than_negative_number() { + public void attributesGreaterThanNegativeNumber() { String test = "test1"; ParsedCondition age_equal_to_0_should_be_on = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of("age", new EqualToMatcher(-20, DataType.NUMBER)), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(age_equal_to_0_should_be_on); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -611,34 +665,36 @@ public void attributes_greater_than_negative_number() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("adil@codigo.com", test), is(equalTo("off"))); - assertThat(client.getTreatment("adil@codigo.com", test, null), is(equalTo("off"))); - assertThat(client.getTreatment("adil@codigo.com", test, ImmutableMap.of()), is(equalTo("off"))); - - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 10)), is(equalTo("off"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", -20)), is(equalTo("on"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 20)), is(equalTo("off"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", -21)), is(equalTo("off"))); + assertEquals("off", client.getTreatment("adil@codigo.com", test)); + assertEquals("off", client.getTreatment("adil@codigo.com", test, new HashMap<>())); + assertEquals("off", client.getTreatment("adil@codigo.com", test, ImmutableMap.of())); + assertEquals("off", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 10))); + assertEquals("on", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", -20))); + assertEquals("off", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", 20))); + assertEquals("off", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("age", -21))); verify(splitCacheConsumer, times(7)).get(test); } @Test - public void attributes_for_sets() { + public void attributesForSets() { String test = "test1"; ParsedCondition any_of_set = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of("products", new ContainsAnyOfSetMatcher(Lists.newArrayList("sms", "video"))), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(any_of_set); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -648,25 +704,27 @@ public void attributes_for_sets() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer ,segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer ,segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("adil@codigo.com", test), is(equalTo("off"))); - assertThat(client.getTreatment("adil@codigo.com", test, null), is(equalTo("off"))); - assertThat(client.getTreatment("adil@codigo.com", test, ImmutableMap.of()), is(equalTo("off"))); + assertEquals("off", client.getTreatment("adil@codigo.com", test)); + assertEquals("off", client.getTreatment("adil@codigo.com", test, new HashMap<>())); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList())), is(equalTo("off"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList(""))), is(equalTo("off"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList("talk"))), is(equalTo("off"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList("sms"))), is(equalTo("on"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList("sms", "video"))), is(equalTo("on"))); - assertThat(client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList("video"))), is(equalTo("on"))); + assertEquals("off", client.getTreatment("adil@codigo.com", test, ImmutableMap.of())); + assertEquals("off", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList()))); + assertEquals("off", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList("")))); + assertEquals("off", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList("talk")))); + assertEquals("on", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList("sms")))); + assertEquals("on", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList("sms", "video")))); + assertEquals("on", client.getTreatment("pato@codigo.com", test, ImmutableMap.of("products", Lists.newArrayList("video")))); verify(splitCacheConsumer, times(9)).get(test); } @Test - public void labels_are_populated() { + public void labelsArePopulated() { String test = "test1"; ParsedCondition age_equal_to_0_should_be_on = new ParsedCondition(ConditionType.ROLLOUT, @@ -676,10 +734,11 @@ public void labels_are_populated() { ); List conditions = Lists.newArrayList(age_equal_to_0_should_be_on); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SDKReadinessGates gates = mock(SDKReadinessGates.class); @@ -691,29 +750,31 @@ public void labels_are_populated() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); Map attributes = ImmutableMap.of("age", -20, "acv", "1000000"); - assertThat(client.getTreatment("pato@codigo.com", test, attributes), is(equalTo("on"))); + assertEquals("on", client.getTreatment("pato@codigo.com", test, attributes)); ArgumentCaptor impressionCaptor = ArgumentCaptor.forClass(List.class); verify(impressionsManager).track(impressionCaptor.capture()); - List impressions = impressionCaptor.getValue(); + List impressions = impressionCaptor.getValue(); assertNotNull(impressions); assertEquals(1, impressions.size()); - Impression impression = impressions.get(0); + Impression impression = impressions.get(0).impression(); - assertThat(impression.appliedRule(), is(equalTo("foolabel"))); + assertEquals("foolabel", impression.appliedRule()); - assertThat(impression.attributes(), is(attributes)); + assertEquals(attributes, impression.attributes()); } @Test - public void not_in_split_if_no_allocation() { - traffic_allocation("pato@split.io", 0, 123, "off", "not in split"); + public void notInSplitIfNoAllocation() { + trafficAllocation("pato@split.io", 0, 123, "off", "not in split"); } /** @@ -727,57 +788,62 @@ public void not_in_split_if_no_allocation() { * @author adil */ @Test - public void not_in_split_if_10_percent_allocation() { + public void notInSplitIf10PercentAllocation() { String key = "pato@split.io"; int i = 0; for (; i <= 9; i++) { - traffic_allocation(key, i, 123, "off", "not in split"); + trafficAllocation(key, i, 123, "off", "not in split"); } for (; i <= 100; i++) { - traffic_allocation(key, i, 123, "on", "in segment all"); + trafficAllocation(key, i, 123, "on", "in segment all"); } } @Test - public void traffic_allocation_one_percent() { + public void trafficAllocationOnePercent() { //This key, with this seed it should fall in the 1% String fallsInOnePercent = "pato193"; - traffic_allocation(fallsInOnePercent, 1, 123, "on", "in segment all"); + trafficAllocation(fallsInOnePercent, 1, 123, "on", "in segment all"); //All these others should not be in split for (int offset = 0; offset <= 100; offset++) { - traffic_allocation("pato" + String.valueOf(offset), 1, 123, "off", "not in split"); + trafficAllocation("pato" + String.valueOf(offset), 1, 123, "off", "not in split"); } } @Test - public void in_split_if_100_percent_allocation() { - traffic_allocation("pato@split.io", 100, 123, "on", "in segment all"); + public void inSplitIf100PercentAllocation() { + trafficAllocation("pato@split.io", 100, 123, "on", "in segment all"); } @Test - public void whitelist_overrides_traffic_allocation() { - traffic_allocation("adil@split.io", 0, 123, "on", "whitelisted user"); + public void whitelistOverridesTrafficAllocation() { + trafficAllocation("adil@split.io", 0, 123, "on", "whitelisted user"); } - private void traffic_allocation(String key, int trafficAllocation, int trafficAllocationSeed, String expected_treatment_on_or_off, String label) { + private void trafficAllocation(String key, int trafficAllocation, int trafficAllocationSeed, String expected_treatment_on_or_off, String label) { String test = "test1"; - ParsedCondition whitelistCondition = new ParsedCondition(ConditionType.WHITELIST, CombiningMatcher.of(new WhitelistMatcher(Lists.newArrayList("adil@split.io"))), Lists.newArrayList(partition("on", 100), partition("off", 0)), "whitelisted user"); - ParsedCondition rollOutToEveryone = new ParsedCondition(ConditionType.ROLLOUT, CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100), partition("off", 0)), "in segment all"); + ParsedCondition whitelistCondition = new ParsedCondition(ConditionType.WHITELIST, CombiningMatcher.of(new WhitelistMatcher( + Lists.newArrayList("adil@split.io"))), Lists.newArrayList(partition("on", 100), partition( + "off", 0)), "whitelisted user"); + ParsedCondition rollOutToEveryone = new ParsedCondition(ConditionType.ROLLOUT, CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100), partition("off", 0)), "in segment all"); List conditions = Lists.newArrayList(whitelistCondition, rollOutToEveryone); - ParsedSplit parsedSplit = new ParsedSplit(test, 123, false, Treatments.OFF, conditions, null, 1, trafficAllocation, trafficAllocationSeed, 1, null); + ParsedSplit parsedSplit = new ParsedSplit(test, 123, false, Treatments.OFF, conditions, null, 1, + trafficAllocation, trafficAllocationSeed, 1, null, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); ImpressionsManager impressionsManager = mock(ImpressionsManager.class); @@ -788,18 +854,20 @@ private void traffic_allocation(String key, int trafficAllocation, int trafficAl NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment(key, test), is(equalTo(expected_treatment_on_or_off))); + assertEquals(expected_treatment_on_or_off, client.getTreatment(key, test)); ArgumentCaptor impressionCaptor = ArgumentCaptor.forClass(List.class); verify(impressionsManager).track(impressionCaptor.capture()); assertNotNull(impressionCaptor.getValue()); assertEquals(1, impressionCaptor.getValue().size()); - Impression impression = (Impression) impressionCaptor.getValue().get(0); - assertThat(impression.appliedRule(), is(equalTo(label))); + DecoratedImpression impression = (DecoratedImpression) impressionCaptor.getValue().get(0); + assertEquals(label, impression.impression().appliedRule()); } /** @@ -817,15 +885,18 @@ public void notInTrafficAllocationDefaultConfig() { configurations.put(Treatments.ON, "{\"size\" : 30}"); configurations.put(Treatments.OFF, "{\"size\" : 30}"); // OFF is default treatment - ParsedCondition rollOutToEveryone = new ParsedCondition(ConditionType.ROLLOUT, CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100), partition("off", 0)), "in segment all"); + ParsedCondition rollOutToEveryone = new ParsedCondition(ConditionType.ROLLOUT, CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100), partition("off", 0)), "in segment all"); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = new ParsedSplit(test, 123, false, Treatments.OFF, conditions, null, 1, trafficAllocation, trafficAllocationSeed, 1, configurations); + ParsedSplit parsedSplit = new ParsedSplit(test, 123, false, Treatments.OFF, conditions, null, + 1, trafficAllocation, trafficAllocationSeed, 1, configurations, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); ImpressionsManager impressionsManager = mock(ImpressionsManager.class); @@ -838,27 +909,28 @@ public void notInTrafficAllocationDefaultConfig() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertThat(client.getTreatment("pato@split.io", test), is(equalTo(Treatments.OFF))); - + assertEquals(Treatments.OFF, client.getTreatment("pato@split.io", test)); SplitResult result = client.getTreatmentWithConfig("pato@split.io", test); - assertThat(result.treatment(), is(equalTo(Treatments.OFF))); - assertThat(result.config(), is(equalTo("{\"size\" : 30}"))); + assertEquals(Treatments.OFF, result.treatment()); + assertEquals("{\"size\" : 30}", result.config()); ArgumentCaptor impressionCaptor = ArgumentCaptor.forClass(List.class); verify(impressionsManager, times(2)).track(impressionCaptor.capture()); assertNotNull(impressionCaptor.getValue()); assertEquals(1, impressionCaptor.getValue().size()); - Impression impression = (Impression) impressionCaptor.getValue().get(0); - assertThat(impression.appliedRule(), is(equalTo("not in split"))); + DecoratedImpression impression = (DecoratedImpression) impressionCaptor.getValue().get(0); + assertEquals("not in split", impression.impression().appliedRule()); } @Test - public void matching_bucketing_keys_work() { + public void matchingBucketingKeysWork() { String test = "test1"; @@ -867,11 +939,12 @@ public void matching_bucketing_keys_work() { ParsedCondition aijaz_should_match = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(whitelist)), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(aijaz_should_match); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, "user", 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, "user", 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -881,20 +954,106 @@ public void matching_bucketing_keys_work() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); Key bad_key = new Key("adil", "aijaz"); Key good_key = new Key("aijaz", "adil"); - assertThat(client.getTreatment(bad_key, test, Collections.emptyMap()), is(equalTo("off"))); - assertThat(client.getTreatment(good_key, test, Collections.emptyMap()), is(equalTo("on"))); + assertEquals("off", client.getTreatment(bad_key, test, Collections.emptyMap())); + assertEquals("on", client.getTreatment(good_key, test, Collections.emptyMap())); verify(splitCacheConsumer, times(2)).get(test); } @Test - public void impression_metadata_is_propagated() { + public void matchingBucketingKeysByFlagSetWork() { + String test = "test1"; + + Set whitelist = new HashSet<>(); + whitelist.add("aijaz"); + ParsedCondition aijaz_should_match = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(whitelist)), Lists.newArrayList(partition("on", 100))); + + List conditions = Lists.newArrayList(aijaz_should_match); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, "user", 1, 1, new HashSet<>(Arrays.asList("set1")), true, new PrerequisitesMatcher(null)); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + HashMap> flagsBySets = new HashMap<>(); + flagsBySets.put("set1", new HashSet<>(Arrays.asList(test))); + when(splitCacheConsumer.getNamesByFlagSets(Arrays.asList("set1"))).thenReturn(flagsBySets); + + Map fetchManyResult = new HashMap<>(); + fetchManyResult.put(test, parsedSplit); + when(splitCacheConsumer.fetchMany(Arrays.asList(test))).thenReturn(fetchManyResult); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) + ); + + Key bad_key = new Key("adil", "aijaz"); + Key good_key = new Key("aijaz", "adil"); + + assertEquals("off", client.getTreatmentsByFlagSet(bad_key, "set1", Collections.emptyMap()).get(test)); + assertEquals("on", client.getTreatmentsByFlagSet(good_key, "set1", Collections.emptyMap()).get(test)); + } + + @Test + public void matchingBucketingKeysByFlagSetsWork() { + String test = "test1"; + + Set whitelist = new HashSet<>(); + whitelist.add("aijaz"); + ParsedCondition aijaz_should_match = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(whitelist)), Lists.newArrayList(partition("on", 100))); + + List conditions = Lists.newArrayList(aijaz_should_match); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, "user", 1, 1, new HashSet<>(Arrays.asList("set1")), true, new PrerequisitesMatcher(null)); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + HashMap> flagsBySets = new HashMap<>(); + flagsBySets.put("set1", new HashSet<>(Arrays.asList(test))); + when(splitCacheConsumer.getNamesByFlagSets(Arrays.asList("set1"))).thenReturn(flagsBySets); + + Map fetchManyResult = new HashMap<>(); + fetchManyResult.put(test, parsedSplit); + when(splitCacheConsumer.fetchMany(Arrays.asList(test))).thenReturn(fetchManyResult); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) + ); + + Key bad_key = new Key("adil", "aijaz"); + Key good_key = new Key("aijaz", "adil"); + + assertEquals("off", client.getTreatmentsByFlagSets(bad_key, Arrays.asList("set1"), Collections.emptyMap()).get(test)); + assertEquals("on", client.getTreatmentsByFlagSets(good_key, Arrays.asList("set1"), Collections.emptyMap()).get(test)); + } + + @Test + public void impressionMetadataIsPropagated() { String test = "test1"; ParsedCondition age_equal_to_0_should_be_on = new ParsedCondition(ConditionType.ROLLOUT, @@ -904,11 +1063,12 @@ public void impression_metadata_is_propagated() { ); List conditions = Lists.newArrayList(age_equal_to_0_should_be_on); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); ImpressionsManager impressionsManager = mock(ImpressionsManager.class); @@ -919,12 +1079,14 @@ public void impression_metadata_is_propagated() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); Map attributes = ImmutableMap.of("age", -20, "acv", "1000000"); - assertThat(client.getTreatment("pato@codigo.com", test, attributes), is(equalTo("on"))); + assertEquals("on", client.getTreatment("pato@codigo.com", test, attributes)); ArgumentCaptor impressionCaptor = ArgumentCaptor.forClass(List.class); @@ -933,10 +1095,10 @@ public void impression_metadata_is_propagated() { assertNotNull(impressionCaptor.getValue()); assertEquals(1, impressionCaptor.getValue().size()); - Impression impression = (Impression) impressionCaptor.getValue().get(0); + DecoratedImpression impression = (DecoratedImpression) impressionCaptor.getValue().get(0); - assertThat(impression.appliedRule(), is(equalTo("foolabel"))); - assertThat(impression.attributes(), is(equalTo(attributes))); + assertEquals("foolabel", impression.impression().appliedRule()); + assertEquals(attributes, impression.impression().attributes()); } private Partition partition(String treatment, int size) { @@ -947,10 +1109,11 @@ private Partition partition(String treatment, int size) { } @Test - public void block_until_ready_does_not_time_when_sdk_is_ready() throws TimeoutException, InterruptedException { + public void blockUntilReadyDoesNotTimeWhenSdkIsReady() throws TimeoutException, InterruptedException { SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SDKReadinessGates ready = mock(SDKReadinessGates.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(ready.waitUntilInternalReady(100)).thenReturn(true); SplitClientImpl client = new SplitClientImpl( @@ -960,17 +1123,20 @@ public void block_until_ready_does_not_time_when_sdk_is_ready() throws TimeoutEx NoopEventsStorageImp.create(), config, ready, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); client.blockUntilReady(); } @Test(expected = TimeoutException.class) - public void block_until_ready_times_when_sdk_is_not_ready() throws TimeoutException, InterruptedException { + public void blockUntilReadyTimesWhenSdkIsNotReady() throws TimeoutException, InterruptedException { SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SDKReadinessGates ready = mock(SDKReadinessGates.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(ready.waitUntilInternalReady(100)).thenReturn(false); SplitClientImpl client = new SplitClientImpl( @@ -980,17 +1146,20 @@ public void block_until_ready_times_when_sdk_is_not_ready() throws TimeoutExcept NoopEventsStorageImp.create(), config, ready, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); client.blockUntilReady(); } @Test - public void track_with_valid_parameters() { + public void trackWithValidParameters() { SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(gates.isSDKReady()).thenReturn(false); SplitClientImpl client = new SplitClientImpl( mock(SplitFactory.class), @@ -999,26 +1168,26 @@ public void track_with_valid_parameters() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - Assert.assertThat(client.track("validKey", "valid_traffic_type", "valid_event"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(true))); + assertTrue(client.track("validKey", "valid_traffic_type", "valid_event")); String validEventSize = new String(new char[80]).replace('\0', 'a'); String validKeySize = new String(new char[250]).replace('\0', 'a'); - Assert.assertThat(client.track(validKeySize, "valid_traffic_type", validEventSize, 10), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(true))); + assertTrue(client.track(validKeySize, "valid_traffic_type", validEventSize, 10)); verify(TELEMETRY_STORAGE, times(2)).recordLatency(Mockito.anyObject(), Mockito.anyLong()); - } @Test - public void track_with_invalid_event_type_ids() { + public void trackWithInvalidEventTypeIds() { SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); - + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + SplitClientImpl client = new SplitClientImpl( mock(SplitFactory.class), splitCacheConsumer, @@ -1026,30 +1195,25 @@ public void track_with_invalid_event_type_ids() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - - Assert.assertThat(client.track("validKey", "valid_traffic_type", ""), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); - - Assert.assertThat(client.track("validKey", "valid_traffic_type", null), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); - - Assert.assertThat(client.track("validKey", "valid_traffic_type", "invalid#char"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); + Assert.assertFalse(client.track("validKey", "valid_traffic_type", "")); + Assert.assertFalse(client.track("validKey", "valid_traffic_type", null)); + Assert.assertFalse(client.track("validKey", "valid_traffic_type", "invalid#char")); String invalidEventSize = new String(new char[81]).replace('\0', 'a'); - Assert.assertThat(client.track("validKey", "valid_traffic_type", invalidEventSize), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); - + Assert.assertFalse(client.track("validKey", "valid_traffic_type", invalidEventSize)); } @Test - public void track_with_invalid_traffic_type_names() { + public void trackWithInvalidTrafficTypeNames() { SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); - + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + SplitClientImpl client = new SplitClientImpl( mock(SplitFactory.class), splitCacheConsumer, @@ -1057,22 +1221,22 @@ public void track_with_invalid_traffic_type_names() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - Assert.assertThat(client.track("validKey", "", "valid"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); - - Assert.assertThat(client.track("validKey", null, "valid"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); + Assert.assertFalse(client.track("validKey", "", "valid")); + Assert.assertFalse(client.track("validKey", null, "valid")); } @Test - public void track_with_invalid_keys() { + public void trackWithInvalidKeys() { SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); - + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + SplitClientImpl client = new SplitClientImpl( mock(SplitFactory.class), splitCacheConsumer, @@ -1080,31 +1244,30 @@ public void track_with_invalid_keys() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - Assert.assertThat(client.track("", "valid_traffic_type", "valid"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); - - Assert.assertThat(client.track(null, "valid_traffic_type", "valid"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); + Assert.assertFalse(client.track("", "valid_traffic_type", "valid")); + Assert.assertFalse(client.track(null, "valid_traffic_type", "valid")); String invalidKeySize = new String(new char[251]).replace('\0', 'a'); - Assert.assertThat(client.track(invalidKeySize, "valid_traffic_type", "valid"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); + Assert.assertFalse(client.track(invalidKeySize, "valid_traffic_type", "valid")); } @Test - public void getTreatment_with_invalid_keys() { + public void getTreatmentWithInvalidKeys() { String test = "split"; ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -1114,60 +1277,48 @@ public void getTreatment_with_invalid_keys() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - - Assert.assertThat(client.getTreatment("valid", "split"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.not(Treatments.CONTROL))); - - Assert.assertThat(client.getTreatment("", "split"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(Treatments.CONTROL))); - - Assert.assertThat(client.getTreatment(null, "split"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(Treatments.CONTROL))); + Assert.assertNotEquals(Treatments.CONTROL, client.getTreatment("valid", "split")); + assertEquals(Treatments.CONTROL, client.getTreatment("", "split")); + assertEquals(Treatments.CONTROL, client.getTreatment(null, "split")); String invalidKeySize = new String(new char[251]).replace('\0', 'a'); - Assert.assertThat(client.getTreatment(invalidKeySize, "split"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(Treatments.CONTROL))); - - Assert.assertThat(client.getTreatment("valid", ""), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(Treatments.CONTROL))); + assertEquals(Treatments.CONTROL, client.getTreatment(invalidKeySize, "split")); - Assert.assertThat(client.getTreatment("valid", null), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(Treatments.CONTROL))); + assertEquals(Treatments.CONTROL, client.getTreatment("valid", "")); + assertEquals(Treatments.CONTROL, client.getTreatment("valid", null)); String matchingKey = new String(new char[250]).replace('\0', 'a'); String bucketingKey = new String(new char[250]).replace('\0', 'a'); Key key = new Key(matchingKey, bucketingKey); - Assert.assertThat(client.getTreatment(key, "split", null), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.not(Treatments.CONTROL))); + Assert.assertNotEquals(Treatments.CONTROL, client.getTreatment(key, "split", null)); key = new Key("valid", ""); - Assert.assertThat(client.getTreatment(key, "split", null), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(Treatments.CONTROL))); + assertEquals(Treatments.CONTROL, client.getTreatment(key, "split", null)); key = new Key("", "valid"); - Assert.assertThat(client.getTreatment(key, "split", null), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(Treatments.CONTROL))); + assertEquals(Treatments.CONTROL, client.getTreatment(key, "split", null)); matchingKey = new String(new char[251]).replace('\0', 'a'); bucketingKey = new String(new char[250]).replace('\0', 'a'); key = new Key(matchingKey, bucketingKey); - Assert.assertThat(client.getTreatment(key, "split", null), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.is(Treatments.CONTROL))); + assertEquals(Treatments.CONTROL, client.getTreatment(key, "split", null)); matchingKey = new String(new char[250]).replace('\0', 'a'); bucketingKey = new String(new char[251]).replace('\0', 'a'); key = new Key(matchingKey, bucketingKey); - Assert.assertThat(client.getTreatment(key, "split", null), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.is(Treatments.CONTROL))); + assertEquals(Treatments.CONTROL, client.getTreatment(key, "split", null)); } @Test - public void track_with_properties() { + public void trackWithProperties() { SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); EventsStorageProducer eventClientMock = Mockito.mock(EventsStorageProducer.class); Mockito.when(eventClientMock.track((Event) Mockito.any(), Mockito.anyInt())).thenReturn(true); @@ -1178,7 +1329,9 @@ public void track_with_properties() { eventClientMock, config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); HashMap properties = new HashMap<>(); @@ -1186,57 +1339,53 @@ public void track_with_properties() { properties.put("ok_property", 123); properties.put("some_property", new Object()); - Assert.assertThat(client.track("key1", "user", "purchase", properties), - org.hamcrest.Matchers.is(true)); + assertTrue(client.track("key1", "user", "purchase", properties)); verify(eventClientMock).track(eventArgumentCaptor.capture(), Mockito.anyInt()); Event captured = eventArgumentCaptor.getValue(); - Assert.assertThat(captured.properties.size(), org.hamcrest.Matchers.is(2)); - Assert.assertThat((Integer) captured.properties.get("ok_property"), org.hamcrest.Matchers.is(123)); - Assert.assertThat(captured.properties.get("some_property"), org.hamcrest.Matchers.nullValue()); + assertEquals(2, captured.properties.size()); + assertEquals(123, captured.properties.get("ok_property")); + assertNull(captured.properties.get("some_property")); properties.clear(); Mockito.reset(eventClientMock); Mockito.when(eventClientMock.track((Event) Mockito.any(), Mockito.anyInt())).thenReturn(true); properties.put("ok_property", 123); properties.put("some_property", Arrays.asList(1, 2, 3)); - Assert.assertThat(client.track("key1", "user", "purchase", properties), - org.hamcrest.Matchers.is(true)); + assertTrue(client.track("key1", "user", "purchase", properties)); eventArgumentCaptor = ArgumentCaptor.forClass(Event.class); verify(eventClientMock).track(eventArgumentCaptor.capture(), Mockito.anyInt()); captured = eventArgumentCaptor.getValue(); - Assert.assertThat(captured.properties.size(), org.hamcrest.Matchers.is(2)); - Assert.assertThat((Integer) captured.properties.get("ok_property"), org.hamcrest.Matchers.is(123)); - Assert.assertThat(captured.properties.get("some_property"), org.hamcrest.Matchers.nullValue()); + assertEquals(2, captured.properties.size()); + assertEquals(123, captured.properties.get("ok_property")); + assertNull(captured.properties.get("some_property")); properties.clear(); Mockito.reset(eventClientMock); Mockito.when(eventClientMock.track((Event) Mockito.any(), Mockito.anyInt())).thenReturn(true); properties.put("ok_property", 123); properties.put("some_property", new HashMap()); - Assert.assertThat(client.track("key1", "user", "purchase", properties), - org.hamcrest.Matchers.is(true)); + assertTrue(client.track("key1", "user", "purchase", properties)); eventArgumentCaptor = ArgumentCaptor.forClass(Event.class); verify(eventClientMock).track(eventArgumentCaptor.capture(), Mockito.anyInt()); captured = eventArgumentCaptor.getValue(); - Assert.assertThat(captured.properties.size(), org.hamcrest.Matchers.is(2)); - Assert.assertThat((Integer) captured.properties.get("ok_property"), org.hamcrest.Matchers.is(123)); - Assert.assertThat(captured.properties.get("some_property"), org.hamcrest.Matchers.nullValue()); + assertEquals(2, captured.properties.size()); + assertEquals(123, captured.properties.get("ok_property")); + assertNull(captured.properties.get("some_property")); properties.clear(); Mockito.reset(eventClientMock); Mockito.when(eventClientMock.track((Event) Mockito.any(), Mockito.anyInt())).thenReturn(true); properties.put("ok_property", 123); - Assert.assertThat(client.track("key1", "user", "purchase", 123, properties), - org.hamcrest.Matchers.is(true)); + assertTrue(client.track("key1", "user", "purchase", 123, properties)); eventArgumentCaptor = ArgumentCaptor.forClass(Event.class); verify(eventClientMock).track(eventArgumentCaptor.capture(), Mockito.anyInt()); captured = eventArgumentCaptor.getValue(); - Assert.assertThat(captured.value, org.hamcrest.Matchers.is(123.0)); - Assert.assertThat(captured.trafficTypeName,org.hamcrest.Matchers.is("user")); - Assert.assertThat(captured.eventTypeId,org.hamcrest.Matchers.is("purchase")); - Assert.assertThat(captured.key,org.hamcrest.Matchers.is("key1")); - Assert.assertThat(captured.properties.size(), org.hamcrest.Matchers.is(1)); - Assert.assertThat((Integer) captured.properties.get("ok_property"), org.hamcrest.Matchers.is(123)); + assertEquals(123.0, captured.value, 0); + assertEquals("user", captured.trafficTypeName); + assertEquals("purchase", captured.eventTypeId); + assertEquals("key1", captured.key); + assertEquals(1, captured.properties.size()); + assertEquals(123, captured.properties.get("ok_property")); properties.clear(); Mockito.reset(eventClientMock); @@ -1247,18 +1396,17 @@ public void track_with_properties() { properties.put("prop4", "something"); properties.put("prop5", true); properties.put("prop6", null); - Assert.assertThat(client.track("key1", "user", "purchase", properties), - org.hamcrest.Matchers.is(true)); + assertTrue(client.track("key1", "user", "purchase", properties)); eventArgumentCaptor = ArgumentCaptor.forClass(Event.class); verify(eventClientMock).track(eventArgumentCaptor.capture(), Mockito.anyInt()); captured = eventArgumentCaptor.getValue(); - Assert.assertThat(captured.properties.size(), org.hamcrest.Matchers.is(6)); - Assert.assertThat((Integer) captured.properties.get("prop1"), org.hamcrest.Matchers.is(1)); - Assert.assertThat((Long) captured.properties.get("prop2"), org.hamcrest.Matchers.is(2L)); - Assert.assertThat((Double) captured.properties.get("prop3"), org.hamcrest.Matchers.is(7.56)); - Assert.assertThat((String) captured.properties.get("prop4"), org.hamcrest.Matchers.is("something")); - Assert.assertThat((Boolean) captured.properties.get("prop5"), org.hamcrest.Matchers.is(true)); - Assert.assertThat(captured.properties.get("prop6"), org.hamcrest.Matchers.nullValue()); + assertEquals(6, captured.properties.size()); + assertEquals(1, captured.properties.get("prop1")); + assertEquals(2L, captured.properties.get("prop2")); + assertEquals(7.56, captured.properties.get("prop3")); + assertEquals("something", captured.properties.get("prop4")); + assertTrue((Boolean) captured.properties.get("prop5")); + assertNull(captured.properties.get("prop6")); // 110 props of 300 bytes should be enough to make the event fail. properties.clear(); @@ -1266,20 +1414,21 @@ public void track_with_properties() { properties.put(new String(new char[300]).replace('\0', 'a') + i , new String(new char[300]).replace('\0', 'a') + i); } - Assert.assertThat(client.track("key1", "user", "purchase", properties), org.hamcrest.Matchers.is(false)); + Assert.assertFalse(client.track("key1", "user", "purchase", properties)); } @Test - public void client_cannot_perform_actions_when_destroyed() throws InterruptedException, URISyntaxException, TimeoutException, IOException { + public void clientCannotPerformActionsWhenDestroyed() throws InterruptedException, URISyntaxException, TimeoutException, IOException { String test = "split"; ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitFactory mockFactory = new SplitFactory() { @@ -1305,40 +1454,39 @@ public void client_cannot_perform_actions_when_destroyed() throws InterruptedExc NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - Assert.assertThat(client.getTreatment("valid", "split"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.not(Treatments.CONTROL))); - - Assert.assertThat(client.track("validKey", "valid_traffic_type", "valid_event"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(true))); + assertEquals(Treatments.ON, client.getTreatment("valid", "split")); + assertTrue(client.track("validKey", "valid_traffic_type", "valid_event")); client.destroy(); - Assert.assertThat(client.getTreatment("valid", "split"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(Treatments.CONTROL))); - - Assert.assertThat(client.track("validKey", "valid_traffic_type", "valid_event"), - org.hamcrest.Matchers.is(org.hamcrest.Matchers.equalTo(false))); + assertEquals(Treatments.CONTROL, client.getTreatment("valid", "split")); + Assert.assertFalse(client.track("validKey", "valid_traffic_type", "valid_event")); } @Test public void worksAndHasConfigTryKetTreatmentWithKey() { String test = "test1"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); // Add config for only one treatment Map configurations = new HashMap<>(); configurations.put(Treatments.ON, "{\"size\" : 30}"); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, configurations); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, configurations, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientImpl client = new SplitClientImpl( @@ -1348,7 +1496,9 @@ public void worksAndHasConfigTryKetTreatmentWithKey() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); int numKeys = 5; @@ -1356,25 +1506,127 @@ public void worksAndHasConfigTryKetTreatmentWithKey() { Map attributes = new HashMap<>(); String randomKey = RandomStringUtils.random(10); Key key = new Key(randomKey, "BucketingKey"); - assertThat(client.getTreatment(randomKey, test), is(equalTo("on"))); - assertThat(client.getTreatmentWithConfig(key, test, attributes).config(), is(equalTo(configurations.get("on")))); + assertEquals("on", client.getTreatment(randomKey, test)); + assertEquals("{\"size\" : 30}", client.getTreatmentWithConfig(key, test, attributes).config()); } // Times 2 because we are calling getTreatment twice. Once for getTreatment and one for getTreatmentWithConfig verify(splitCacheConsumer, times(numKeys * 2)).get(test); } + @Test + public void worksAndHasConfigByFlagSetTryKetTreatmentWithKey() { + String test = "test1"; + + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); + List conditions = Lists.newArrayList(rollOutToEveryone); + + // Add config for only one treatment + Map configurations = new HashMap<>(); + configurations.put(Treatments.ON, "{\"size\" : 30}"); + + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, configurations, new HashSet<>(Arrays.asList("set1")), true, new PrerequisitesMatcher(null)); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + HashMap> flagsBySets = new HashMap<>(); + flagsBySets.put("set1", new HashSet<>(Arrays.asList(test))); + when(splitCacheConsumer.getNamesByFlagSets(Arrays.asList("set1"))).thenReturn(flagsBySets); + + Map fetchManyResult = new HashMap<>(); + fetchManyResult.put(test, parsedSplit); + when(splitCacheConsumer.fetchMany(Arrays.asList(test))).thenReturn(fetchManyResult); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) + ); + + int numKeys = 5; + for (int i = 0; i < numKeys; i++) { + Map attributes = new HashMap<>(); + String randomKey = RandomStringUtils.random(10); + Key key = new Key(randomKey, "BucketingKey"); + assertEquals("on", client.getTreatmentsByFlagSet(randomKey, "set1", new HashMap<>()).get(test)); + assertEquals("{\"size\" : 30}", client.getTreatmentsWithConfigByFlagSet(key, "set1", attributes).get(test).config()); + } + assertEquals("on", client.getTreatmentsByFlagSet("randomKey", "set1").get(test)); + } + + @Test + public void worksAndHasConfigByFlagSetsTryKetTreatmentWithKey() { + String test = "test1"; + + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); + List conditions = Lists.newArrayList(rollOutToEveryone); + + // Add config for only one treatment + Map configurations = new HashMap<>(); + configurations.put(Treatments.ON, "{\"size\" : 30}"); + + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, configurations, new HashSet<>(Arrays.asList("set1")), true, new PrerequisitesMatcher(null)); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + HashMap> flagsBySets = new HashMap<>(); + flagsBySets.put("set1", new HashSet<>(Arrays.asList(test))); + when(splitCacheConsumer.getNamesByFlagSets(Arrays.asList("set1"))).thenReturn(flagsBySets); + + Map fetchManyResult = new HashMap<>(); + fetchManyResult.put(test, parsedSplit); + when(splitCacheConsumer.fetchMany(Arrays.asList(test))).thenReturn(fetchManyResult); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) + ); + + int numKeys = 5; + for (int i = 0; i < numKeys; i++) { + Map attributes = new HashMap<>(); + String randomKey = RandomStringUtils.random(10); + Key key = new Key(randomKey, "BucketingKey"); + assertEquals("on", client.getTreatmentsByFlagSets(randomKey, Arrays.asList("set1"), new HashMap<>()).get(test)); + assertEquals("{\"size\" : 30}", client.getTreatmentsWithConfigByFlagSets(key, Arrays.asList("set1"), attributes).get(test).config()); + } + } + @Test(expected = IllegalArgumentException.class) public void blockUntilReadyException() throws TimeoutException, InterruptedException { String test = "test1"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); SplitClientConfig config = SplitClientConfig.builder().setBlockUntilReadyTimeout(-100).build(); @@ -1385,23 +1637,28 @@ public void blockUntilReadyException() throws TimeoutException, InterruptedExcep NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); client.blockUntilReady(); } @Test - public void null_key_results_in_control_getTreatments() { + public void nullKeyResultsInControlGetTreatments() { String test = "test1"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); Map splits = new HashMap<>(); splits.put(test, parsedSplit); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.fetchMany(Collections.singletonList(test))).thenReturn(splits); SplitClientImpl client = new SplitClientImpl( @@ -1411,26 +1668,29 @@ public void null_key_results_in_control_getTreatments() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - - assertEquals(Treatments.CONTROL, client.getTreatments(null, Collections.singletonList("test1")).get("test1")); verifyZeroInteractions(splitCacheConsumer); } @Test - public void null_splits_results_in_empty_getTreatments() { + public void nullSplitsResultsInEmptyGetTreatments() { String test = "test1"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); Map splits = new HashMap<>(); splits.put(test, parsedSplit); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.fetchMany(Collections.singletonList(test))).thenReturn(splits); SplitClientImpl client = new SplitClientImpl( @@ -1440,19 +1700,21 @@ public void null_splits_results_in_empty_getTreatments() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - assertEquals(0, client.getTreatments("key", null).size()); verifyZeroInteractions(splitCacheConsumer); } @Test - public void exceptions_result_in_control_getTreatments() { + public void exceptionsResultInControlGetTreatments() { SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.fetchMany(anyList())).thenThrow(RuntimeException.class); SplitClientImpl client = new SplitClientImpl( @@ -1462,7 +1724,9 @@ public void exceptions_result_in_control_getTreatments() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); Map result = client.getTreatments("adil@relateiq.com", Arrays.asList("test1", "test2")); assertEquals(2, result.values().size()); @@ -1472,22 +1736,22 @@ public void exceptions_result_in_control_getTreatments() { verify(splitCacheConsumer).fetchMany(anyList()); } - - @Test - public void getTreatments_works() { + public void getTreatmentsWorks() { String test = "test1"; ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); Map splits = new HashMap<>(); splits.put(test, parsedSplit); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.fetchMany(anyList())).thenReturn(splits); when(gates.isSDKReady()).thenReturn(true); + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(null); SplitClientImpl client = new SplitClientImpl( mock(SplitFactory.class), @@ -1496,7 +1760,9 @@ public void getTreatments_works() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculatorImp), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + fallbackTreatmentCalculatorImp ); Map result = client.getTreatments("randomKey", Arrays.asList(test, "test2")); assertEquals("on", result.get(test)); @@ -1506,16 +1772,17 @@ public void getTreatments_works() { } @Test - public void empty_splits_results_in_null_getTreatments() { + public void emptySplitsResultsInNullGetTreatments() { String test = "test1"; ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); Map splits = new HashMap<>(); splits.put(test, parsedSplit); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.fetchMany(Collections.singletonList(test))).thenReturn(splits); SplitClientImpl client = new SplitClientImpl( @@ -1525,10 +1792,10 @@ public void empty_splits_results_in_null_getTreatments() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - - Map result = client.getTreatments("key", new ArrayList<>()); assertNotNull(result); assertTrue(result.isEmpty()); @@ -1537,10 +1804,11 @@ public void empty_splits_results_in_null_getTreatments() { } @Test - public void exceptions_result_in_control_treatments() { + public void exceptionsResultInControlTreatments() { SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.get(anyString())).thenThrow(RuntimeException.class); SplitClientImpl client = new SplitClientImpl( @@ -1550,7 +1818,9 @@ public void exceptions_result_in_control_treatments() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); Map result = client.getTreatments("adil@relateiq.com", Arrays.asList("test1")); assertEquals(1, result.size()); @@ -1558,14 +1828,17 @@ public void exceptions_result_in_control_treatments() { } @Test - public void works_treatments() { + public void worksTreatments() { String test = "test1"; String test2 = "test2"; - ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); - ParsedSplit parsedSplit2 = ParsedSplit.createParsedSplitForTests(test2, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); + ParsedSplit parsedSplit2 = ParsedSplit.createParsedSplitForTests(test2, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); Map parsedSplits = new HashMap<>(); parsedSplits.put(test, parsedSplit); parsedSplits.put(test2, parsedSplit2); @@ -1573,6 +1846,7 @@ public void works_treatments() { SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.fetchMany(anyList())).thenReturn(parsedSplits); when(gates.isSDKReady()).thenReturn(true); @@ -1583,36 +1857,38 @@ public void works_treatments() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) ); - Map result = client.getTreatments("anyKey", Arrays.asList(test, test2)); assertNotNull(result); assertEquals(2, result.size()); assertEquals("on", result.get(test)); assertEquals("on", result.get(test2)); - verify(splitCacheConsumer, times(1)).fetchMany(anyList()); verify(TELEMETRY_STORAGE, times(1)).recordLatency(Mockito.anyObject(), Mockito.anyLong()); } @Test - public void works_one_control_treatments() { + public void worksOneControlTreatments() { String test = "test1"; String test2 = "test2"; ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); List conditions = Lists.newArrayList(rollOutToEveryone); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(), true, new PrerequisitesMatcher(null)); Map parsedSplits = new HashMap<>(); parsedSplits.put(test, parsedSplit); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.fetchMany(anyList())).thenReturn(parsedSplits); when(gates.isSDKReady()).thenReturn(true); + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(null); SplitClientImpl client = new SplitClientImpl( mock(SplitFactory.class), @@ -1621,7 +1897,9 @@ public void works_one_control_treatments() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculatorImp), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + fallbackTreatmentCalculatorImp ); Map result = client.getTreatments("anyKey", Arrays.asList(test, test2)); @@ -1630,15 +1908,12 @@ public void works_one_control_treatments() { assertEquals("on", result.get(test)); assertEquals("control", result.get(test2)); - verify(splitCacheConsumer, times(1)).fetchMany(anyList()); verify(TELEMETRY_STORAGE, times(1)).recordLatency(Mockito.anyObject(), Mockito.anyLong()); } - - @Test - public void treatments_worksAndHasConfig() { + public void treatmentsWorksAndHasConfig() { String test = "test1"; String test2 = "test2"; @@ -1651,13 +1926,16 @@ public void treatments_worksAndHasConfig() { configurations.put(Treatments.CONTROL, "{\"size\" : 30}"); - ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, configurations); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, configurations, new HashSet<>(), true, new PrerequisitesMatcher(null)); Map parsedSplits = new HashMap<>(); parsedSplits.put(test, parsedSplit); SDKReadinessGates gates = mock(SDKReadinessGates.class); SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); when(splitCacheConsumer.fetchMany(anyList())).thenReturn(parsedSplits); + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(null); SplitClientImpl client = new SplitClientImpl( mock(SplitFactory.class), @@ -1666,9 +1944,10 @@ public void treatments_worksAndHasConfig() { NoopEventsStorageImp.create(), config, gates, - new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer), TELEMETRY_STORAGE, TELEMETRY_STORAGE + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculatorImp), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + fallbackTreatmentCalculatorImp ); - Map attributes = new HashMap<>(); Map result = client.getTreatmentsWithConfig("randomKey", Arrays.asList(test, test2, "", null), attributes); assertEquals(2, result.size()); @@ -1676,8 +1955,686 @@ public void treatments_worksAndHasConfig() { assertNull(result.get(test2).config()); assertEquals("control", result.get(test2).treatment()); + verify(splitCacheConsumer, times(1)).fetchMany(anyList()); + } + + @Test + public void testTreatmentsByFlagSet() { + String test = "test1"; + + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); + List conditions = Lists.newArrayList(rollOutToEveryone); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(Arrays.asList("set1", "set2")), true, new PrerequisitesMatcher(null)); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + Map fetchManyResult = new HashMap<>(); + fetchManyResult.put(test, parsedSplit); + when(splitCacheConsumer.fetchMany(new ArrayList<>(Arrays.asList(test)))).thenReturn(fetchManyResult); + List sets = new ArrayList<>(Arrays.asList("set1")); + Map> flagsBySets = new HashMap<>(); + flagsBySets.put("set1", new HashSet<>(Arrays.asList(test))); + when(splitCacheConsumer.getNamesByFlagSets(sets)).thenReturn(flagsBySets); + when(gates.isSDKReady()).thenReturn(true); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) + ); + + int numKeys = 5; + Map getTreatmentResult; + for (int i = 0; i < numKeys; i++) { + String randomKey = RandomStringUtils.random(10); + getTreatmentResult = client.getTreatmentsByFlagSet(randomKey, "set1", new HashMap<>()); + assertEquals("on", getTreatmentResult.get(test)); + } + verify(splitCacheConsumer, times(numKeys)).fetchMany(new ArrayList<>(Arrays.asList(test))); + verify(TELEMETRY_STORAGE, times(5)).recordLatency(Mockito.anyObject(), Mockito.anyLong()); + getTreatmentResult = client.getTreatmentsByFlagSet("randomKey", "set1"); + assertEquals("on", getTreatmentResult.get(test)); + } + + @Test + public void testTreatmentsByFlagSetInvalid() { + String test = "test1"; + + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); + List conditions = Lists.newArrayList(rollOutToEveryone); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(Arrays.asList("set1", "set2")), true, new PrerequisitesMatcher(null)); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + List sets = new ArrayList<>(); + when(gates.isSDKReady()).thenReturn(true); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) + ); + assertTrue(client.getTreatmentsByFlagSet(RandomStringUtils.random(10), "", new HashMap<>()).isEmpty()); + } + + @Test + public void testTreatmentsByFlagSets() { + String test = "test1"; + String test2 = "test2"; + + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), + Lists.newArrayList(partition("on", 100))); + List conditions = Lists.newArrayList(rollOutToEveryone); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(Arrays.asList("set1", "set2")), true, new PrerequisitesMatcher(null)); + ParsedSplit parsedSplit2 = ParsedSplit.createParsedSplitForTests(test2, 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(Arrays.asList("set3", "set4")), true, new PrerequisitesMatcher(null)); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + + Map fetchManyResult = new HashMap<>(); + fetchManyResult.put(test, parsedSplit); + fetchManyResult.put(test2, parsedSplit2); + when(splitCacheConsumer.fetchMany(new ArrayList<>(Arrays.asList(test2, test)))).thenReturn(fetchManyResult); + + List sets = new ArrayList<>(Arrays.asList("set3", "set1")); + Map> flagsBySets = new HashMap<>(); + flagsBySets.put("set1", new HashSet<>(Arrays.asList(test))); + flagsBySets.put("set3", new HashSet<>(Arrays.asList(test2))); + + when(splitCacheConsumer.getNamesByFlagSets(sets)).thenReturn(flagsBySets); + when(gates.isSDKReady()).thenReturn(true); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + new FallbackTreatmentCalculatorImp(null) + ); + int numKeys = 5; + Map getTreatmentResult; + for (int i = 0; i < numKeys; i++) { + String randomKey = RandomStringUtils.random(10); + getTreatmentResult = client.getTreatmentsByFlagSets(randomKey, Arrays.asList("set1", "set3"), new HashMap<>()); + assertEquals("on", getTreatmentResult.get(test)); + assertEquals("on", getTreatmentResult.get(test2)); + } + verify(splitCacheConsumer, times(numKeys)).fetchMany(new ArrayList<>(Arrays.asList(test2, test))); + verify(TELEMETRY_STORAGE, times(5)).recordLatency(Mockito.anyObject(), Mockito.anyLong()); + getTreatmentResult = client.getTreatmentsByFlagSets("key", Arrays.asList("set1", "set3")); + assertEquals("on", getTreatmentResult.get(test)); + assertEquals("on", getTreatmentResult.get(test2)); + } + + @Test + public void treatmentsWorksAndHasConfigFlagSet() { + String test = "test1"; + String test2 = "test2"; + + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + List conditions = Lists.newArrayList(rollOutToEveryone); + + // Add config for only one treatment + Map configurations = new HashMap<>(); + configurations.put(Treatments.ON, "{\"size\" : 30}"); + configurations.put(Treatments.CONTROL, "{\"size\" : 30}"); + + + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, configurations, new HashSet<>(Arrays.asList("set1")), true, new PrerequisitesMatcher(null)); + Map parsedSplits = new HashMap<>(); + parsedSplits.put(test, parsedSplit); + + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + when(splitCacheConsumer.fetchMany(anyList())).thenReturn(parsedSplits); + + List sets = new ArrayList<>(Arrays.asList("set1")); + Map> flagsBySets = new HashMap<>(); + flagsBySets.put("set1", new HashSet<>(Arrays.asList(test, test2))); + when(splitCacheConsumer.getNamesByFlagSets(sets)).thenReturn(flagsBySets); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(null); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculatorImp), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + fallbackTreatmentCalculatorImp + ); + Map attributes = new HashMap<>(); + Map result = client.getTreatmentsWithConfigByFlagSet("randomKey", "set1", attributes); + assertEquals(2, result.size()); + assertEquals(configurations.get("on"), result.get(test).config()); + assertNull(result.get(test2).config()); + assertEquals("control", result.get(test2).treatment()); + + verify(splitCacheConsumer, times(1)).fetchMany(anyList()); + + result = client.getTreatmentsWithConfigByFlagSet("randomKey", "set1"); + assertEquals(2, result.size()); + assertEquals(configurations.get("on"), result.get(test).config()); + assertNull(result.get(test2).config()); + assertEquals("control", result.get(test2).treatment()); + } + + @Test + public void treatmentsWorksAndHasConfigFlagSets() { + String test = "test1"; + String test2 = "test2"; + + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(partition("on", 100))); + List conditions = Lists.newArrayList(rollOutToEveryone); + + // Add config for only one treatment + Map configurations = new HashMap<>(); + configurations.put(Treatments.ON, "{\"size\" : 30}"); + configurations.put(Treatments.CONTROL, "{\"size\" : 30}"); + + + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, + null, 1, 1, configurations, new HashSet<>(Arrays.asList("set1")), true, new PrerequisitesMatcher(null)); + Map parsedSplits = new HashMap<>(); + parsedSplits.put(test, parsedSplit); + + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + when(splitCacheConsumer.fetchMany(anyList())).thenReturn(parsedSplits); + + List sets = new ArrayList<>(Arrays.asList("set1")); + Map> flagsBySets = new HashMap<>(); + flagsBySets.put("set1", new HashSet<>(Arrays.asList(test, test2))); + when(splitCacheConsumer.getNamesByFlagSets(sets)).thenReturn(flagsBySets); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(null); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculatorImp), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + flagSetsFilter, + fallbackTreatmentCalculatorImp + ); + Map attributes = new HashMap<>(); + Map result = client.getTreatmentsWithConfigByFlagSets("randomKey", new ArrayList<>(Arrays.asList("set1")), attributes); + assertEquals(2, result.size()); + assertEquals(configurations.get("on"), result.get(test).config()); + assertNull(result.get(test2).config()); + assertEquals("control", result.get(test2).treatment()); verify(splitCacheConsumer, times(1)).fetchMany(anyList()); } -} + @Test + public void impressionPropertiesTest() { + String test = "test1"; + + ParsedCondition age_equal_to_0_should_be_on = new ParsedCondition(ConditionType.ROLLOUT, + CombiningMatcher.of("age", new EqualToMatcher(-20, DataType.NUMBER)), + Lists.newArrayList(partition("on", 100)), + "foolabel" + ); + + List conditions = Lists.newArrayList(age_equal_to_0_should_be_on); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests(test, 123, false, Treatments.OFF, conditions, null, 1, 1, new HashSet<>(Arrays.asList("set")), true, new PrerequisitesMatcher(null)); + Map parsedSplits = new HashMap<>(); + parsedSplits.put(test, parsedSplit); + + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + when(splitCacheConsumer.get(test)).thenReturn(parsedSplit); + when(splitCacheConsumer.fetchMany(Arrays.asList(test))).thenReturn(parsedSplits); + Map> splits = new HashMap<>(); + splits.put("set", new HashSet<>(Arrays.asList(test))); + when(splitCacheConsumer.getNamesByFlagSets(Arrays.asList("set"))).thenReturn(splits); + + SDKReadinessGates gates = mock(SDKReadinessGates.class); + ImpressionsManager impressionsManager = mock(ImpressionsManager.class); + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + impressionsManager, + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, null), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + new FlagSetsFilterImpl(new HashSet<>()), + new FallbackTreatmentCalculatorImp(null) + + ); + Map attributes = ImmutableMap.of("age", -20, "acv", "1000000"); + EvaluationOptions properties = new EvaluationOptions(new HashMap() + {{ + put("prop2", "val2"); + put("prop1", "val1"); + }}); + Map result = new HashMap<>(); + result.put(test, Treatments.ON); + List split_names = Arrays.asList(test); + + assertEquals("on", client.getTreatment("pato@codigo.com", test, attributes, properties)); + assertEquals("on", client.getTreatmentWithConfig("bilal1@codigo.com", test, attributes, properties).treatment()); + assertEquals("on", client.getTreatments("bilal2@codigo.com", Arrays.asList(test), attributes, properties).get(test)); + assertEquals("on", client.getTreatmentsWithConfig("bilal3@codigo.com", Arrays.asList(test), attributes, properties).get(test).treatment()); + assertEquals("on", client.getTreatmentsByFlagSet("bilal4@codigo.com", "set", attributes, properties).get(test)); + assertEquals("on", client.getTreatmentsByFlagSets("bilal5@codigo.com", Arrays.asList("set"), attributes, properties).get(test)); + assertEquals("on", client.getTreatmentsWithConfigByFlagSet("bilal6@codigo.com", "set", attributes, properties).get(test).treatment()); + assertEquals("on", client.getTreatmentsWithConfigByFlagSets("bilal7@codigo.com", Arrays.asList("set"), attributes, properties).get(test).treatment()); + assertEquals("on", client.getTreatment(new Key("bilal8@codigo.com", "bilal8@codigo.com"), test, attributes, properties)); + assertEquals("on", client.getTreatmentWithConfig(new Key("bilal9@codigo.com", "bilal9@codigo.com"), test, attributes, properties).treatment()); + assertEquals("on", client.getTreatments(new Key("bilal10@codigo.com", "bilal10@codigo.com"), Arrays.asList(test), attributes, properties).get(test)); + assertEquals("on", client.getTreatmentsWithConfig(new Key("bilal11@codigo.com", "bilal11@codigo.com"), Arrays.asList(test), attributes, properties).get(test).treatment()); + assertEquals("on", client.getTreatmentsByFlagSet(new Key("bilal12@codigo.com", "bilal12@codigo.com"), "set", attributes, properties).get(test)); + assertEquals("on", client.getTreatmentsByFlagSets(new Key("bilal13@codigo.com", "bilal13@codigo.com"), Arrays.asList("set"), attributes, properties).get(test)); + assertEquals("on", client.getTreatmentsWithConfigByFlagSet(new Key("bilal14@codigo.com", "bilal14@codigo.com"), "set", attributes, properties).get(test).treatment()); + assertEquals("on", client.getTreatmentsWithConfigByFlagSets(new Key("bilal15@codigo.com", "bilal15@codigo.com"), Arrays.asList("set"), attributes, properties).get(test).treatment()); + assertEquals("off", client.getTreatment("bilal16@codigo.com", test, properties)); + assertEquals("off", client.getTreatmentWithConfig("bilal17@codigo.com", test, properties).treatment()); + assertEquals("off", client.getTreatments("bilal18@codigo.com", Arrays.asList(test), properties).get(test)); + assertEquals("off", client.getTreatmentsWithConfig("bilal19@codigo.com", Arrays.asList(test), properties).get(test).treatment()); + assertEquals("off", client.getTreatmentsByFlagSet("bilal20@codigo.com", "set", properties).get(test)); + assertEquals("off", client.getTreatmentsByFlagSets("bilal21@codigo.com", Arrays.asList("set"), properties).get(test)); + assertEquals("off", client.getTreatmentsWithConfigByFlagSet("bilal22@codigo.com", "set", properties).get(test).treatment()); + assertEquals("off", client.getTreatmentsWithConfigByFlagSets("bilal23@codigo.com", Arrays.asList("set"), properties).get(test).treatment()); + + ArgumentCaptor impressionCaptor = ArgumentCaptor.forClass(List.class); + verify(impressionsManager, times(24)).track(impressionCaptor.capture()); + assertNotNull(impressionCaptor.getValue()); + + DecoratedImpression impression = (DecoratedImpression) impressionCaptor.getAllValues().get(0).get(0); + assertEquals("pato@codigo.com", impression.impression().key()); + assertEquals("{\"prop2\":\"val2\",\"prop1\":\"val1\"}", impression.impression().properties()); + + for (int i=1; i<=23; i++) { + impression = (DecoratedImpression) impressionCaptor.getAllValues().get(i).get(0); + assertEquals("bilal" + i + "@codigo.com", impression.impression().key()); + assertEquals("{\"prop2\":\"val2\",\"prop1\":\"val1\"}", impression.impression().properties()); + } + } + + @Test + public void fallbackTreatmentWithExceptionsResult() { + SDKReadinessGates gates = mock(SDKReadinessGates.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + when(splitCacheConsumer.get(anyString())).thenThrow(RuntimeException.class); + when(splitCacheConsumer.fetchMany(anyList())).thenThrow(RuntimeException.class); + HashMap> features = new HashMap<>(); + features.put("flag", new HashSet<>(Arrays.asList("test1"))); + when(splitCacheConsumer.getNamesByFlagSets(anyList())).thenReturn(features); + + String fallbcakConfigGlobal = "{\"prop1\", \"val1\"}"; + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration( + new FallbackTreatment("on", fallbcakConfigGlobal)); + FallbackTreatmentCalculator fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculator), + TELEMETRY_STORAGE, TELEMETRY_STORAGE, + new FlagSetsFilterImpl(new HashSet<>()), + fallbackTreatmentCalculator + ); + assertEquals("on", client.getTreatment("adil@relateiq.com", "test1")); + assertEquals("on", client.getTreatmentWithConfig("adil@relateiq.com", "test1").treatment()); + assertEquals(fallbcakConfigGlobal, client.getTreatmentWithConfig("adil@relateiq.com", "test1").config()); + assertEquals("on", client.getTreatments("adil@relateiq.com", Arrays.asList("test1")).get("test1")); + assertEquals("on", client.getTreatmentsWithConfig("adil@relateiq.com", Arrays.asList("test1")).get("test1").treatment()); + assertEquals(fallbcakConfigGlobal, client.getTreatmentsWithConfig("adil@relateiq.com", Arrays.asList("test1")).get("test1").config()); + + assertEquals("on", client.getTreatmentsByFlagSet("adil@relateiq.com", "flag").get("test1")); + assertEquals("on", client.getTreatmentsByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("test1")); + assertEquals("on", client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("test1").treatment()); + assertEquals(fallbcakConfigGlobal, client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("test1").config()); + assertEquals("on", client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("test1").treatment()); + assertEquals(fallbcakConfigGlobal, client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("test1").config()); + + String fallbcakConfigByFlag = "{\"prop2\", \"val2\"}"; + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on", fallbcakConfigGlobal), + new HashMap() {{ put("feature", new FallbackTreatment("off", fallbcakConfigByFlag)); }}); + + features = new HashMap<>(); + features.put("flag", new HashSet<>(Arrays.asList("test", "feature"))); + when(splitCacheConsumer.getNamesByFlagSets(anyList())).thenReturn(features); + + fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + + client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculator), + TELEMETRY_STORAGE, TELEMETRY_STORAGE, + new FlagSetsFilterImpl(new HashSet<>()), + fallbackTreatmentCalculator + ); + assertEquals("on", client.getTreatment("adil@relateiq.com", "test")); + assertEquals("off", client.getTreatment("adil@relateiq.com", "feature")); + assertEquals("on", client.getTreatmentWithConfig("adil@relateiq.com", "test1").treatment()); + assertEquals(fallbcakConfigGlobal, client.getTreatmentWithConfig("adil@relateiq.com", "test1").config()); + assertEquals("off", client.getTreatmentWithConfig("adil@relateiq.com", "feature").treatment()); + assertEquals(fallbcakConfigByFlag, client.getTreatmentWithConfig("adil@relateiq.com", "feature").config()); + Map result = client.getTreatments("adil@relateiq.com", Arrays.asList("feature", "test")); + assertEquals("off", result.get("feature")); + assertEquals("on", result.get("test")); + Map results = client.getTreatmentsWithConfig("adil@relateiq.com", Arrays.asList("feature", "test")); + assertEquals("off", results.get("feature").treatment()); + assertEquals(fallbcakConfigByFlag, results.get("feature").config()); + assertEquals("on", results.get("test").treatment()); + assertEquals(fallbcakConfigGlobal, results.get("test").config()); + + assertEquals("on", client.getTreatmentsByFlagSet("adil@relateiq.com", "flag").get("test")); + assertEquals("off", client.getTreatmentsByFlagSet("adil@relateiq.com", "flag").get("feature")); + assertEquals("on", client.getTreatmentsByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("test")); + assertEquals("off", client.getTreatmentsByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("feature")); + assertEquals("on", client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("test").treatment()); + assertEquals(fallbcakConfigGlobal, client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("test").config()); + assertEquals("off", client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("feature").treatment()); + assertEquals(fallbcakConfigByFlag, client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("feature").config()); + assertEquals("on", client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("test").treatment()); + assertEquals(fallbcakConfigGlobal, client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("test").config()); + assertEquals("off", client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("feature").treatment()); + assertEquals(fallbcakConfigByFlag, client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("feature").config()); + + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration( + new HashMap() {{ put("feature", new FallbackTreatment("off", fallbcakConfigByFlag)); }}); + + fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + + client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculator), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + new FlagSetsFilterImpl(new HashSet<>()), + fallbackTreatmentCalculator + ); + assertEquals(Treatments.CONTROL, client.getTreatment("adil@relateiq.com", "test")); + assertEquals("off", client.getTreatment("adil@relateiq.com", "feature")); + assertEquals(Treatments.CONTROL, client.getTreatmentWithConfig("adil@relateiq.com", "test1").treatment()); + assertEquals(null, client.getTreatmentWithConfig("adil@relateiq.com", "test1").config()); + assertEquals("off", client.getTreatmentWithConfig("adil@relateiq.com", "feature").treatment()); + assertEquals(fallbcakConfigByFlag, client.getTreatmentWithConfig("adil@relateiq.com", "feature").config()); + result = client.getTreatments("adil@relateiq.com", Arrays.asList("feature", "test")); + assertEquals("off", result.get("feature")); + assertEquals(Treatments.CONTROL, result.get("test")); + results = client.getTreatmentsWithConfig("adil@relateiq.com", Arrays.asList("feature", "test")); + assertEquals("off", results.get("feature").treatment()); + assertEquals(fallbcakConfigByFlag, results.get("feature").config()); + assertEquals(Treatments.CONTROL, results.get("test").treatment()); + assertEquals(null, results.get("test").config()); + + assertEquals(Treatments.CONTROL, client.getTreatmentsByFlagSet("adil@relateiq.com", "flag").get("test")); + assertEquals("off", client.getTreatmentsByFlagSet("adil@relateiq.com", "flag").get("feature")); + assertEquals(Treatments.CONTROL, client.getTreatmentsByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("test")); + assertEquals("off", client.getTreatmentsByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("feature")); + assertEquals(Treatments.CONTROL, client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("test").treatment()); + assertEquals(null, client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("test").config()); + assertEquals("off", client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("feature").treatment()); + assertEquals(fallbcakConfigByFlag, client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag").get("feature").config()); + assertEquals(Treatments.CONTROL, client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("test").treatment()); + assertEquals(null, client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("test").config()); + assertEquals("off", client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("feature").treatment()); + assertEquals(fallbcakConfigByFlag, client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")).get("feature").config()); + } + + @Test + public void fallbackTreatmentWithSplitNotFoundResult() { + SDKReadinessGates gates = mock(SDKReadinessGates.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + SegmentCacheConsumer segmentCacheConsumer = mock(SegmentCacheConsumer.class); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = mock(RuleBasedSegmentCacheConsumer.class); + ParsedCondition rollOutToEveryone = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new WhitelistMatcher(Lists.newArrayList("adil@codigo.com"))), Lists.newArrayList(partition("on", 100))); + List conditions = Lists.newArrayList(rollOutToEveryone); + ParsedSplit parsedSplit = ParsedSplit.createParsedSplitForTests("test", 123, false, Treatments.OFF, conditions, + null, 1, 1, new HashSet<>(), false, new PrerequisitesMatcher(null)); + + when(splitCacheConsumer.get("test1")).thenReturn(parsedSplit); + when(splitCacheConsumer.get("test2")).thenReturn(null); + when(splitCacheConsumer.get("test3")).thenReturn(null); + HashMap features = new HashMap<>(); + features.put("test1", parsedSplit); + features.put("test2", null); + features.put("test3", null); + when(splitCacheConsumer.fetchMany(anyList())).thenReturn(features); + HashMap> flagFeatures = new HashMap<>(); + flagFeatures.put("flag", new HashSet<>(Arrays.asList("test1", "test2", "test3"))); + when(splitCacheConsumer.getNamesByFlagSets(anyList())).thenReturn(flagFeatures); + + String fallbcakConfigGlobal = "{\"prop1\", \"val1\"}"; + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration( + new FallbackTreatment("on", fallbcakConfigGlobal)); + FallbackTreatmentCalculator fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + + SplitClientImpl client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculator), + TELEMETRY_STORAGE, TELEMETRY_STORAGE, + new FlagSetsFilterImpl(new HashSet<>()), + fallbackTreatmentCalculator + ); + assertEquals("off", client.getTreatment("adil@relateiq.com", "test1")); + assertEquals("on", client.getTreatment("adil@relateiq.com", "test2")); + assertEquals("on", client.getTreatmentWithConfig("adil@relateiq.com", "test2").treatment()); + assertEquals(fallbcakConfigGlobal, client.getTreatmentWithConfig("adil@relateiq.com", "test2").config()); + + Map result = client.getTreatments("adil@relateiq.com", Arrays.asList("test1", "test2")); + assertEquals("off", result.get("test1")); + assertEquals("on", result.get("test2")); + Map resultWithConfig = client.getTreatmentsWithConfig("adil@relateiq.com", Arrays.asList("test1", "test2")); + assertEquals("off", resultWithConfig.get("test1").treatment()); + assertEquals(null, resultWithConfig.get("test1").config()); + assertEquals("on", resultWithConfig.get("test2").treatment()); + assertEquals(fallbcakConfigGlobal, resultWithConfig.get("test2").config()); + + result = client.getTreatmentsByFlagSet("adil@relateiq.com", "flag"); + assertEquals("off", result.get("test1")); + assertEquals("on", result.get("test2")); + result = client.getTreatmentsByFlagSets("adil@relateiq.com", Arrays.asList("flag")); + assertEquals("off", result.get("test1")); + assertEquals("on", result.get("test2")); + resultWithConfig = client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag"); + assertEquals("off", resultWithConfig.get("test1").treatment()); + assertEquals(null, resultWithConfig.get("test1").config()); + assertEquals("on", resultWithConfig.get("test2").treatment()); + assertEquals(fallbcakConfigGlobal, resultWithConfig.get("test2").config()); + resultWithConfig = client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")); + assertEquals("off", resultWithConfig.get("test1").treatment()); + assertEquals(null, resultWithConfig.get("test1").config()); + assertEquals("on", resultWithConfig.get("test2").treatment()); + assertEquals(fallbcakConfigGlobal, resultWithConfig.get("test2").config()); + + String fallbcakConfigByFlag = "{\"prop2\", \"val2\"}"; + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on", fallbcakConfigGlobal), + new HashMap() {{ put("test2", new FallbackTreatment("off-fallback", fallbcakConfigByFlag)); }}); + + fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + + client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculator), + TELEMETRY_STORAGE, TELEMETRY_STORAGE, + new FlagSetsFilterImpl(new HashSet<>()), + fallbackTreatmentCalculator + ); + assertEquals("off", client.getTreatment("adil@relateiq.com", "test1")); + assertEquals("off-fallback", client.getTreatment("adil@relateiq.com", "test2")); + assertEquals("on", client.getTreatment("adil@relateiq.com", "test3")); + + assertEquals("off", client.getTreatmentWithConfig("adil@relateiq.com", "test1").treatment()); + assertEquals(null, client.getTreatmentWithConfig("adil@relateiq.com", "test1").config()); + assertEquals("off-fallback", client.getTreatmentWithConfig("adil@relateiq.com", "test2").treatment()); + assertEquals(fallbcakConfigByFlag, client.getTreatmentWithConfig("adil@relateiq.com", "test2").config()); + assertEquals("on", client.getTreatmentWithConfig("adil@relateiq.com", "test3").treatment()); + assertEquals(fallbcakConfigGlobal, client.getTreatmentWithConfig("adil@relateiq.com", "test3").config()); + + result = client.getTreatments("adil@relateiq.com", Arrays.asList("test1", "test2", "test3")); + assertEquals("off", result.get("test1")); + assertEquals("off-fallback", result.get("test2")); + assertEquals("on", result.get("test3")); + + Map results = client.getTreatmentsWithConfig("adil@relateiq.com", Arrays.asList("test1", "test2", "test3")); + assertEquals("off", results.get("test1").treatment()); + assertEquals(null, results.get("test1").config()); + assertEquals("off-fallback", results.get("test2").treatment()); + assertEquals(fallbcakConfigByFlag, results.get("test2").config()); + assertEquals("on", results.get("test3").treatment()); + assertEquals(fallbcakConfigGlobal, results.get("test3").config()); + + result = client.getTreatmentsByFlagSet("adil@relateiq.com", "flag"); + assertEquals("off", result.get("test1")); + assertEquals("off-fallback", result.get("test2")); + assertEquals("on", result.get("test3")); + + result = client.getTreatmentsByFlagSets("adil@relateiq.com", Arrays.asList("flag")); + assertEquals("off", result.get("test1")); + assertEquals("off-fallback", result.get("test2")); + assertEquals("on", result.get("test3")); + + results = client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag"); + assertEquals("off", results.get("test1").treatment()); + assertEquals(null, results.get("test1").config()); + assertEquals("off-fallback", results.get("test2").treatment()); + assertEquals(fallbcakConfigByFlag, results.get("test2").config()); + assertEquals("on", results.get("test3").treatment()); + assertEquals(fallbcakConfigGlobal, results.get("test3").config()); + + results = client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")); + assertEquals("off", results.get("test1").treatment()); + assertEquals(null, results.get("test1").config()); + assertEquals("off-fallback", results.get("test2").treatment()); + assertEquals(fallbcakConfigByFlag, results.get("test2").config()); + assertEquals("on", results.get("test3").treatment()); + assertEquals(fallbcakConfigGlobal, results.get("test3").config()); + + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration( + new HashMap() {{ put("test2", new FallbackTreatment("off-fallback", fallbcakConfigByFlag)); }}); + + fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + + client = new SplitClientImpl( + mock(SplitFactory.class), + splitCacheConsumer, + new ImpressionsManager.NoOpImpressionsManager(), + NoopEventsStorageImp.create(), + config, + gates, + new EvaluatorImp(splitCacheConsumer, segmentCacheConsumer, ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculator), TELEMETRY_STORAGE, TELEMETRY_STORAGE, + new FlagSetsFilterImpl(new HashSet<>()), + fallbackTreatmentCalculator + ); + assertEquals("off", client.getTreatment("adil@relateiq.com", "test1")); + assertEquals("off-fallback", client.getTreatment("adil@relateiq.com", "test2")); + assertEquals(Treatments.CONTROL, client.getTreatment("adil@relateiq.com", "test3")); + + assertEquals("off", client.getTreatmentWithConfig("adil@relateiq.com", "test1").treatment()); + assertEquals(null, client.getTreatmentWithConfig("adil@relateiq.com", "test1").config()); + assertEquals("off-fallback", client.getTreatmentWithConfig("adil@relateiq.com", "test2").treatment()); + assertEquals(fallbcakConfigByFlag, client.getTreatmentWithConfig("adil@relateiq.com", "test2").config()); + assertEquals(Treatments.CONTROL, client.getTreatmentWithConfig("adil@relateiq.com", "test3").treatment()); + assertEquals(null, client.getTreatmentWithConfig("adil@relateiq.com", "test3").config()); + + result = client.getTreatments("adil@relateiq.com", Arrays.asList("test1", "test2", "test3")); + assertEquals("off", result.get("test1")); + assertEquals("off-fallback", result.get("test2")); + assertEquals(Treatments.CONTROL, result.get("test3")); + + results = client.getTreatmentsWithConfig("adil@relateiq.com", Arrays.asList("test1", "test2", "test3")); + assertEquals("off", results.get("test1").treatment()); + assertEquals(null, results.get("test1").config()); + assertEquals("off-fallback", results.get("test2").treatment()); + assertEquals(fallbcakConfigByFlag, results.get("test2").config()); + assertEquals(Treatments.CONTROL, results.get("test3").treatment()); + assertEquals(null, results.get("test3").config()); + + result = client.getTreatmentsByFlagSet("adil@relateiq.com", "flag"); + assertEquals("off", result.get("test1")); + assertEquals("off-fallback", result.get("test2")); + assertEquals(Treatments.CONTROL, result.get("test3")); + + result = client.getTreatmentsByFlagSets("adil@relateiq.com", Arrays.asList("flag")); + assertEquals("off", result.get("test1")); + assertEquals("off-fallback", result.get("test2")); + assertEquals(Treatments.CONTROL, result.get("test3")); + + results = client.getTreatmentsWithConfigByFlagSet("adil@relateiq.com", "flag"); + assertEquals("off", results.get("test1").treatment()); + assertEquals(null, results.get("test1").config()); + assertEquals("off-fallback", results.get("test2").treatment()); + assertEquals(fallbcakConfigByFlag, results.get("test2").config()); + assertEquals(Treatments.CONTROL, results.get("test3").treatment()); + assertEquals(null, results.get("test3").config()); + + results = client.getTreatmentsWithConfigByFlagSets("adil@relateiq.com", Arrays.asList("flag")); + assertEquals("off", results.get("test1").treatment()); + assertEquals(null, results.get("test1").config()); + assertEquals("off-fallback", results.get("test2").treatment()); + assertEquals(fallbcakConfigByFlag, results.get("test2").config()); + assertEquals(Treatments.CONTROL, results.get("test3").treatment()); + assertEquals(null, results.get("test3").config()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/SplitClientIntegrationTest.java b/client/src/test/java/io/split/client/SplitClientIntegrationTest.java index 37c80c59d..69b1b3662 100644 --- a/client/src/test/java/io/split/client/SplitClientIntegrationTest.java +++ b/client/src/test/java/io/split/client/SplitClientIntegrationTest.java @@ -3,40 +3,63 @@ import io.split.SSEMockServer; import io.split.SplitMockServer; import io.split.client.api.SplitView; -import io.split.client.dtos.Event; +import io.split.client.dtos.EvaluationOptions; +import io.split.client.dtos.FallbackTreatment; +import io.split.client.dtos.FallbackTreatmentsConfiguration; import io.split.client.impressions.ImpressionsManager; import io.split.client.utils.CustomDispatcher; -import io.split.integrations.IntegrationsConfig; import io.split.storages.enums.OperationMode; import io.split.storages.enums.StorageMode; import io.split.storages.pluggable.CustomStorageWrapperImp; import io.split.storages.pluggable.domain.EventConsumer; import io.split.storages.pluggable.domain.ImpressionConsumer; -import io.split.telemetry.domain.enums.MethodEnum; -import io.split.telemetry.utils.AtomicLongArray; + +import okhttp3.mockwebserver.Dispatcher; import okhttp3.mockwebserver.MockResponse; +import okhttp3.mockwebserver.MockWebServer; +import okhttp3.mockwebserver.RecordedRequest; import org.awaitility.Awaitility; import org.glassfish.grizzly.utils.Pair; import org.glassfish.jersey.media.sse.OutboundEvent; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; -import org.mockito.ArgumentCaptor; import javax.ws.rs.sse.OutboundSseEvent; import java.io.IOException; import java.net.URISyntaxException; -import java.util.*; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Queue; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import java.util.stream.Collectors; public class SplitClientIntegrationTest { - // TODO: review this test. @Test - @Ignore public void getTreatmentWithStreamingEnabled() throws Exception { - SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder().build()); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"d\":[],\"s\":1585948850109,\"t\":1585948850109}}"); + MockResponse response2 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850110, \"t\":1585948850110}, \"rbs\":{\"d\":[],\"s\":1585948850110,\"t\":1585948850110}}"); + MockResponse response3 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850111, \"t\":1585948850111}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); + Queue responses = new LinkedList<>(); + responses.add(response); + Queue responses2 = new LinkedList<>(); + responses2.add(response2); + Queue responses3 = new LinkedList<>(); + responses3.add(response3); + SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder() + .path(CustomDispatcher.SINCE_1585948850109, responses) + .path(CustomDispatcher.SINCE_1585948850110, responses2) + .path(CustomDispatcher.SINCE_1585948850111, responses3) + .build()); SSEMockServer.SseEventQueue eventQueue = new SSEMockServer.SseEventQueue(); SSEMockServer sseServer = buildSSEMockServer(eventQueue); @@ -48,6 +71,8 @@ public void getTreatmentWithStreamingEnabled() throws Exception { .endpoint(splitServer.getUrl(), splitServer.getUrl()) .authServiceURL(String.format("%s/api/auth/enabled", splitServer.getUrl())) .streamingServiceURL("http://localhost:" + sseServer.getPort()) + .featuresRefreshRate(20) + .segmentsRefreshRate(30) .streamingEnabled(true) .build(); @@ -59,6 +84,21 @@ public void getTreatmentWithStreamingEnabled() throws Exception { Assert.assertEquals("on_whitelist", result); // SPLIT_UPDATED should fetch -> changeNumber > since + + OutboundSseEvent sseEventWithPublishers = new OutboundEvent + .Builder() + .name("message") + .data("{\"id\":\"22\",\"timestamp\":1588254668328,\"encoding\":\"json\",\"channel\":\"[?occupancy=metrics.publishers]control_pri\",\"data\":\"{\\\"metrics\\\":{\\\"publishers\\\":2}}\",\"name\":\"[meta]occupancy\"}") + .build(); + eventQueue.push(sseEventWithPublishers); + + OutboundSseEvent sseEventWithoutPublishers = new OutboundEvent + .Builder() + .name("message") + .data("{\"id\":\"22\",\"timestamp\":1588254668328,\"encoding\":\"json\",\"channel\":\"[?occupancy=metrics.publishers]control_pri\",\"data\":\"{\\\"metrics\\\":{\\\"publishers\\\":0}}\",\"name\":\"[meta]occupancy\"}") + .build(); + eventQueue.push(sseEventWithoutPublishers); + OutboundSseEvent sseEvent1 = new OutboundEvent .Builder() .name("message") @@ -78,33 +118,14 @@ public void getTreatmentWithStreamingEnabled() throws Exception { .build(); eventQueue.push(sseEvent4); + Awaitility.await() .atMost(50L, TimeUnit.SECONDS) .until(() -> "after_notification_received".equals(client.getTreatment("admin", "push_test")) && "on_rollout".equals(client.getTreatment("test_in_segment", "push_test"))); - OutboundSseEvent sseEvent2 = new OutboundEvent - .Builder() - .name("message") - .data("{\"id\":\"22\",\"clientId\":\"22\",\"timestamp\":1592591696052,\"encoding\":\"json\",\"channel\":\"xxxx_xxxx_segments\",\"data\":\"{\\\"type\\\":\\\"SEGMENT_UPDATE\\\",\\\"changeNumber\\\":1585948850111,\\\"segmentName\\\":\\\"segment3\\\"}\"}") - .build(); - eventQueue.push(sseEvent2); - - Awaitility.await() - .atMost(50L, TimeUnit.SECONDS) - .until(() -> "in_segment_match".equals(client.getTreatment("test_in_segment", "push_test"))); - - // SEGMENT_UPDATE should not fetch -> changeNumber < since - OutboundSseEvent sseEvent5 = new OutboundEvent - .Builder() - .name("message") - .data("{\"id\":\"22\",\"clientId\":\"22\",\"timestamp\":1592591696052,\"encoding\":\"json\",\"channel\":\"xxxx_xxxx_segments\",\"data\":\"{\\\"type\\\":\\\"SEGMENT_UPDATE\\\",\\\"changeNumber\\\":1585948850109,\\\"segmentName\\\":\\\"segment3\\\"}\"}") - .build(); - eventQueue.push(sseEvent5); - - Awaitility.await() - .atMost(50L, TimeUnit.SECONDS) - .until(() -> "in_segment_match".equals(client.getTreatment("test_in_segment", "push_test"))); + eventQueue.push(sseEventWithPublishers); + eventQueue.push(sseEventWithoutPublishers); // SPLIT_KILL should fetch. OutboundSseEvent sseEvent3 = new OutboundEvent @@ -125,7 +146,7 @@ public void getTreatmentWithStreamingEnabled() throws Exception { @Test public void getTreatmentWithStreamingEnabledAndAuthDisabled() throws Exception { - MockResponse response = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850109, \"till\":1585948850109}"); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"s\":1585948850109,\"t\":1585948850109,\"d\":[]}}"); Queue responses = new LinkedList<>(); responses.add(response); SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder() @@ -146,14 +167,19 @@ public void getTreatmentWithStreamingEnabledAndAuthDisabled() throws Exception { String result = client.getTreatment("admin", "push_test"); Assert.assertEquals("on_whitelist", result); - + Assert.assertEquals("on", client.getTreatment("bilal@@split.io", "rbs_flag", new HashMap() {{ + put("email", "bilal@@split.io"); + }})); + Assert.assertEquals("off", client.getTreatment("mauro@split.io", "rbs_flag", new HashMap() {{ + put("email", "mauro@split.io"); + }})); client.destroy(); splitServer.stop(); } @Test public void getTreatmentWithStreamingDisabled() throws Exception { - MockResponse response = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850109, \"till\":1585948850109}"); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"d\":[],\"s\":1585948850109,\"t\":1585948850109}}"); Queue responses = new LinkedList<>(); responses.add(response); SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder() @@ -186,7 +212,7 @@ public void getTreatmentWithStreamingDisabled() throws Exception { @Test public void managerSplitsWithStreamingEnabled() throws Exception { - MockResponse response = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850109, \"till\":1585948850109}"); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"d\":[],\"s\":1585948850109,\"t\":1585948850109}}"); Queue responses = new LinkedList<>(); responses.add(response); SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder() @@ -205,8 +231,8 @@ public void managerSplitsWithStreamingEnabled() throws Exception { manager.blockUntilReady(); List results = manager.splits(); - Assert.assertEquals(4, results.size()); - Assert.assertEquals(3, results.stream().filter(r -> !r.killed).toArray().length); + Assert.assertEquals(5, results.size()); + Assert.assertEquals(4, results.stream().filter(r -> !r.killed).toArray().length); // SPLIT_KILL should fetch. OutboundSseEvent sseEventSplitKill = new OutboundEvent @@ -218,7 +244,7 @@ public void managerSplitsWithStreamingEnabled() throws Exception { Awaitility.await() .atMost(2L, TimeUnit.MINUTES) - .until(() -> 2 == manager.splits().stream().filter(r -> !r.killed).toArray().length); + .until(() -> 3 == manager.splits().stream().filter(r -> !r.killed).toArray().length); splitServer.stop(); sseServer.stop(); @@ -226,9 +252,9 @@ public void managerSplitsWithStreamingEnabled() throws Exception { @Test public void splitClientOccupancyNotifications() throws Exception { - MockResponse response = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850109, \"till\":1585948850109}"); - MockResponse response2 = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850110, \"till\":1585948850110}"); - MockResponse response3 = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850111, \"till\":1585948850111}"); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"d\":[],\"s\":1585948850109,\"t\":1585948850109}}"); + MockResponse response2 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850110, \"t\":1585948850110}, \"rbs\":{\"d\":[],\"s\":1585948850110,\"t\":1585948850110}}"); + MockResponse response3 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850111, \"t\":1585948850111}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); Queue responses = new LinkedList<>(); responses.add(response); Queue responses2 = new LinkedList<>(); @@ -301,9 +327,9 @@ public void splitClientOccupancyNotifications() throws Exception { @Test public void splitClientControlNotifications() throws Exception { - MockResponse response = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850109, \"till\":1585948850109}"); - MockResponse response2 = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850110, \"till\":1585948850110}"); - MockResponse response3 = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850111, \"till\":1585948850111}"); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"d\":[],\"s\":1585948850109,\"t\":1585948850109}}"); + MockResponse response2 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850110, \"t\":1585948850110}, \"rbs\":{\"d\":[],\"s\":1585948850110,\"t\":1585948850110}}"); + MockResponse response3 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850111, \"t\":1585948850111}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); Queue responses = new LinkedList<>(); responses.add(response); Queue responses2 = new LinkedList<>(); @@ -396,7 +422,7 @@ public void splitClientControlNotifications() throws Exception { @Test public void splitClientMultiFactory() throws Exception { - MockResponse response = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850109, \"till\":1585948850109}"); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"d\":[],\"s\":1585948850109,\"t\":1585948850109}}"); Queue responses = new LinkedList<>(); responses.add(response); responses.add(response); @@ -406,7 +432,17 @@ public void splitClientMultiFactory() throws Exception { responses.add(response); responses.add(response); responses.add(response); - SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder() + + SplitMockServer splitServer1 = new SplitMockServer(CustomDispatcher.builder() + .path(CustomDispatcher.SINCE_1585948850109, responses) + .build()); + SplitMockServer splitServer2 = new SplitMockServer(CustomDispatcher.builder() + .path(CustomDispatcher.SINCE_1585948850109, responses) + .build()); + SplitMockServer splitServer3 = new SplitMockServer(CustomDispatcher.builder() + .path(CustomDispatcher.SINCE_1585948850109, responses) + .build()); + SplitMockServer splitServer4 = new SplitMockServer(CustomDispatcher.builder() .path(CustomDispatcher.SINCE_1585948850109, responses) .build()); @@ -422,28 +458,31 @@ public void splitClientMultiFactory() throws Exception { SSEMockServer.SseEventQueue eventQueue4 = new SSEMockServer.SseEventQueue(); SSEMockServer sseServer4 = buildSSEMockServer(eventQueue4); - splitServer.start(); + splitServer1.start(); + splitServer2.start(); + splitServer3.start(); + splitServer4.start(); sseServer1.start(); sseServer2.start(); sseServer3.start(); sseServer4.start(); - SplitClientConfig config1 = buildSplitClientConfig("enabled", splitServer.getUrl(), sseServer1.getPort(), true, 20); + SplitClientConfig config1 = buildSplitClientConfig("enabled", splitServer1.getUrl(), sseServer1.getPort(), true, 20); SplitFactory factory1 = SplitFactoryBuilder.build("fake-api-token-1", config1); SplitClient client1 = factory1.client(); client1.blockUntilReady(); - SplitClientConfig config2 = buildSplitClientConfig("enabled", splitServer.getUrl(), sseServer2.getPort(), true, 20); + SplitClientConfig config2 = buildSplitClientConfig("enabled", splitServer2.getUrl(), sseServer2.getPort(), true, 20); SplitFactory factory2 = SplitFactoryBuilder.build("fake-api-token-2", config2); SplitClient client2 = factory2.client(); client2.blockUntilReady(); - SplitClientConfig config3 = buildSplitClientConfig("enabled", splitServer.getUrl(), sseServer3.getPort(), true, 20); + SplitClientConfig config3 = buildSplitClientConfig("enabled", splitServer3.getUrl(), sseServer3.getPort(), true, 20); SplitFactory factory3 = SplitFactoryBuilder.build("fake-api-token-3", config3); SplitClient client3 = factory3.client(); client3.blockUntilReady(); - SplitClientConfig config4 = buildSplitClientConfig("disabled", splitServer.getUrl(), sseServer4.getPort(), true, 100); + SplitClientConfig config4 = buildSplitClientConfig("disabled", splitServer4.getUrl(), sseServer4.getPort(), true, 100); SplitFactory factory4 = SplitFactoryBuilder.build("fake-api-token-4", config4); SplitClient client4 = factory4.client(); client4.blockUntilReady(); @@ -478,11 +517,11 @@ public void splitClientMultiFactory() throws Exception { eventQueue3.push(sseEventInitial); eventQueue4.push(sseEventInitial); - Thread.sleep(1000); + Thread.sleep(10000); eventQueue1.push(sseEventSplitUpdate); Awaitility.await() - .atMost(50L, TimeUnit.SECONDS) + .atMost(100L, TimeUnit.SECONDS) .until(() -> "split_killed".equals(client1.getTreatment("admin", "push_test"))); @@ -491,7 +530,7 @@ public void splitClientMultiFactory() throws Exception { .until(() -> "on_whitelist".equals(client2.getTreatment("admin", "push_test"))); Awaitility.await() - .atMost(50L, TimeUnit.SECONDS) + .atMost(100L, TimeUnit.SECONDS) .until(() -> "on_whitelist".equals(client3.getTreatment("admin", "push_test"))); Awaitility.await() @@ -513,26 +552,34 @@ public void splitClientMultiFactory() throws Exception { .until(() -> "split_killed".equals(client3.getTreatment("admin", "push_test"))); Awaitility.await() - .atMost(50L, TimeUnit.SECONDS) + .atMost(100L, TimeUnit.SECONDS) .until(() -> "on_whitelist".equals(client4.getTreatment("admin", "push_test"))); - client1.destroy(); client2.destroy(); client3.destroy(); client4.destroy(); - splitServer.stop(); + splitServer1.stop(); + splitServer2.stop(); + splitServer3.stop(); + splitServer4.stop(); sseServer1.stop(); sseServer2.stop(); sseServer3.stop(); sseServer4.stop(); } - // TODO: review this test. @Test - @Ignore public void keepAlive() throws Exception { - SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder().build()); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"d\":[],\"s\":1585948850109,\"t\":1585948850109}}"); + Queue responses = new LinkedList<>(); + responses.add(response); + + SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder() + .path(CustomDispatcher.SINCE_1585948850109, responses) + .build()); + + //plitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder().build()); SSEMockServer.SseEventQueue eventQueue = new SSEMockServer.SseEventQueue(); SSEMockServer sseServer = buildSSEMockServer(eventQueue); @@ -548,7 +595,7 @@ public void keepAlive() throws Exception { Assert.assertEquals("on_whitelist", result); // wait to check keep alive notification. - Thread.sleep(80000); + Thread.sleep(50000); // must reconnect and after the second syncAll the result must be different Awaitility.await() @@ -562,7 +609,7 @@ public void keepAlive() throws Exception { @Test public void testConnectionClosedByRemoteHostIsProperlyHandled() throws Exception { - MockResponse response = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850109, \"till\":1585948850109}"); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"d\":[],\"s\":1585948850109,\"t\":1585948850109}}"); Queue responses = new LinkedList<>(); responses.add(response); SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder() @@ -600,7 +647,7 @@ public void testConnectionClosedByRemoteHostIsProperlyHandled() throws Exception @Test public void testConnectionClosedIsProperlyHandled() throws Exception { - MockResponse response = new MockResponse().setBody("{\"splits\": [], \"since\":1585948850109, \"till\":1585948850109}"); + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109}, \"rbs\":{\"d\":[],\"s\":1585948850109,\"t\":1585948850109}}"); Queue responses = new LinkedList<>(); responses.add(response); SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder() @@ -650,7 +697,8 @@ public void testPluggableMode() throws IOException, URISyntaxException { SplitClient client = splitFactory.client(); try { client.blockUntilReady(); - SplitManager splitManager = splitFactory.manager();HashMap properties = new HashMap<>(); + SplitManager splitManager = splitFactory.manager(); + HashMap properties = new HashMap<>(); properties.put("number_property", 123); properties.put("object_property", new Object()); @@ -665,32 +713,761 @@ public void testPluggableMode() throws IOException, URISyntaxException { Assert.assertTrue(events.stream().anyMatch(e -> "keyValue".equals(e.getEventDto().key) && e.getEventDto().value == 12L)); Assert.assertTrue(events.stream().anyMatch(e -> "keyProperties".equals(e.getEventDto().key) && e.getEventDto().properties != null)); - Assert.assertEquals(2, splits.size()); - Assert.assertTrue(splits.stream().anyMatch(sw -> "first.name".equals(sw.name))); - Assert.assertTrue(splits.stream().anyMatch(sw -> "second.name".equals(sw.name))); - Assert.assertEquals("on", client.getTreatment("key", "first.name")); - Assert.assertEquals("off", client.getTreatmentWithConfig("FakeKey", "second.name").treatment()); + Assert.assertEquals(3, splits.size()); + Assert.assertTrue(splits.stream().anyMatch(sw -> "first-name".equals(sw.name))); + Assert.assertTrue(splits.stream().anyMatch(sw -> "second-name".equals(sw.name))); + Assert.assertEquals("on", client.getTreatment("key", "first-name")); + Assert.assertEquals("off", client.getTreatmentWithConfig("FakeKey", "second-name").treatment()); Assert.assertEquals("control", client.getTreatment("FakeKey", "noSplit")); + Assert.assertEquals("on", client.getTreatment("bilal@@split.io", "rbs_flag", new HashMap() {{ + put("email", "bilal@@split.io"); + }})); + Assert.assertEquals("off", client.getTreatment("mauro@split.io", "rbs_flag", new HashMap() {{ + put("email", "mauro@split.io"); + }})); List impressions = customStorageWrapper.getImps(); - Assert.assertEquals(2, impressions.size()); - Assert.assertTrue(impressions.stream().anyMatch(imp -> "first.name".equals(imp.getKeyImpression().feature) && "on".equals(imp.getKeyImpression().treatment))); - Assert.assertTrue(impressions.stream().anyMatch(imp -> "second.name".equals(imp.getKeyImpression().feature) && "off".equals(imp.getKeyImpression().treatment))); + Assert.assertEquals(4, impressions.size()); + Assert.assertTrue(impressions.stream().anyMatch(imp -> "first-name".equals(imp.getKeyImpression().feature) && "on".equals(imp.getKeyImpression().treatment))); + Assert.assertTrue(impressions.stream().anyMatch(imp -> "second-name".equals(imp.getKeyImpression().feature) && "off".equals(imp.getKeyImpression().treatment))); + + Map latencies = customStorageWrapper.getLatencies(); + + List keys = new ArrayList<>(latencies.keySet()); - Map latencies = customStorageWrapper.get_methodLatencies(); + String key1 = keys.stream().filter(key -> key.contains("track/")).collect(Collectors.toList()).get(0); + String key2 = keys.stream().filter(key -> key.contains("getTreatment/")).collect(Collectors.toList()).get(0); + String key3 = keys.stream().filter(key -> key.contains("getTreatmentWithConfig/")).collect(Collectors.toList()).get(0); - Assert.assertEquals(3, latencies.get(MethodEnum.TRACK.getMethod()).fetchAndClearAll().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, latencies.get(MethodEnum.TREATMENT.getMethod()).fetchAndClearAll().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, latencies.get(MethodEnum.TREATMENT_WITH_CONFIG.getMethod()).fetchAndClearAll().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(Optional.of(3L), Optional.ofNullable(latencies.get(key1))); + Assert.assertEquals(Optional.of(3L), Optional.of(latencies.get(key2))); + Assert.assertEquals(Optional.of(1L), Optional.of(latencies.get(key3))); Thread.sleep(500); - Assert.assertNotNull(customStorageWrapper.get_telemetryInit()); - Assert.assertEquals(StorageMode.PLUGGABLE.name(), customStorageWrapper.get_telemetryInit().get_storage()); + Assert.assertNotNull(customStorageWrapper.getConfig()); + String key = customStorageWrapper.getConfig().keySet().stream().collect(Collectors.toList()).get(0); + Assert.assertTrue(customStorageWrapper.getConfig().get(key).contains(StorageMode.PLUGGABLE.name())); } catch (TimeoutException | InterruptedException e) { } } + @Test + public void getTreatmentFlagSetWithPolling() throws Exception { + MockResponse response = new MockResponse().setBody("{\"ff\":{\"d\":[{\"trafficTypeName\":\"client\",\"name\":\"workm\",\"trafficAllocation\":100,\"trafficAllocationSeed\":147392224,\"seed\":524417105,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"on\",\"changeNumber\":1602796638344,\"algo\":2,\"configurations\":{},\"sets\":[\"set1\",\"set2\"],\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"new_segment\"},\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":100},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"in segment new_segment\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":100},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":0},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"default rule\"}]},{\"trafficTypeName\":\"client\",\"name\":\"workm_set_3\",\"trafficAllocation\":100,\"trafficAllocationSeed\":147392224,\"seed\":524417105,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"on\",\"changeNumber\":1602796638344,\"algo\":2,\"configurations\":{},\"sets\":[\"set3\"],\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"new_segment\"},\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":100},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"in segment new_segment\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":100},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":0},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"default rule\"}]}],\"s\":-1,\"t\":1602796638344},\"rbs\":{\"d\":[],\"t\":-1,\"s\":-1}}"); + MockResponse responseFlag = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1602796638344, \"t\":1602796638344},\"rbs\":{\"d\":[],\"t\":-1,\"s\":-1}}"); + MockResponse segmentResponse = new MockResponse().setBody("{\"name\":\"new_segment\",\"added\":[\"user-1\"],\"removed\":[\"user-2\",\"user-3\"],\"since\":-1,\"till\":-1}"); + Queue responses = new LinkedList<>(); + responses.add(response); + Queue responsesFlags = new LinkedList<>(); + responsesFlags.add(responseFlag); + Queue segmentResponses = new LinkedList<>(); + segmentResponses.add(segmentResponse); + SplitMockServer splitServer = new SplitMockServer(CustomDispatcher.builder() + .path(CustomDispatcher.INITIAL_FLAGS_BY_SETS, responses) + .path(CustomDispatcher.SINCE_1602796638344, responsesFlags) + .path(CustomDispatcher.SEGMENT_BY_FLAG_SET, segmentResponses) + .build()); + splitServer.start(); + + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(splitServer.getUrl(), splitServer.getUrl()) + .authServiceURL(String.format("%s/api/auth/enabled", splitServer.getUrl())) + .streamingEnabled(false) + .flagSetsFilter(Arrays.asList("set2", "set1")) + .featuresRefreshRate(5) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + client.blockUntilReady(); + + String result = client.getTreatment("admin", "workm"); + Assert.assertEquals("on", result); + Assert.assertEquals("on", client.getTreatmentsByFlagSet("admin", "set1", new HashMap<>()).get("workm")); + + client.destroy(); + splitServer.stop(); + } + + @Test + public void ImpressionToggleOptimizedModeTest() throws Exception { + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + List allRequests = new ArrayList<>(); + Dispatcher dispatcher = new Dispatcher() { + @Override + public MockResponse dispatch(RecordedRequest request) { + allRequests.add(request); + switch (request.getPath()) { + case "/api/splitChanges?s=1.3&since=-1&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody(splits); + case "/api/splitChanges?s=1.3&since=1602796638344&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody("{\"ff\":{\"d\":[], \"s\":1602796638344, \"t\":1602796638344}, \"rbs\":{\"d\":[],\"s\":-1,\"t\":-1}}"); + case "/api/testImpressions/bulk": + return new MockResponse().setResponseCode(200); + case "/api/testImpressions/count": + return new MockResponse().setResponseCode(200); + case "/v1/keys/ss": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/usage": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/config": + return new MockResponse().setResponseCode(200); + } + return new MockResponse().setResponseCode(404); + } + }; + + MockWebServer server = new MockWebServer(); + server.setDispatcher(dispatcher); + + server.start(); + String serverURL = String.format("http://%s:%s", server.getHostName(), server.getPort()); + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(serverURL, serverURL) + .authServiceURL(String.format("%s/api/auth/enabled", serverURL)) + .telemetryURL(serverURL + "/v1") + .streamingEnabled(false) + .featuresRefreshRate(5) + .impressionsMode(ImpressionsManager.Mode.OPTIMIZED) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + client.blockUntilReady(); + + Assert.assertEquals("off", client.getTreatment("user1", "without_impression_toggle", new HashMap<>())); + Assert.assertEquals("off", client.getTreatment("user2", "impression_toggle_on", new HashMap<>())); + Assert.assertEquals("off", client.getTreatment("user3", "impression_toggle_off", new HashMap<>())); + client.destroy(); + boolean check1 = false, check2 = false; + for (int i=0; i < allRequests.size(); i++ ) { + if (allRequests.get(i).getPath().equals("/api/testImpressions/bulk") ) { + check1 = true; + String body = allRequests.get(i).getBody().readUtf8(); + Assert.assertTrue(body.contains("without_impression_toggle")); + Assert.assertTrue(body.contains("impression_toggle_on")); + Assert.assertFalse(body.contains("impression_toggle_off")); + } + if (allRequests.get(i).getPath().equals("/v1/keys/ss")) { + check2 = true; + String body = allRequests.get(i).getBody().readUtf8(); + Assert.assertFalse(body.contains("without_impression_toggle")); + Assert.assertFalse(body.contains("impression_toggle_on")); + Assert.assertTrue(body.contains("impression_toggle_off")); + } + } + server.shutdown(); + Assert.assertTrue(check1); + Assert.assertTrue(check2); + } + + @Test + public void ImpressionToggleDebugModeTest() throws Exception { + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + List allRequests = new ArrayList<>(); + Dispatcher dispatcher = new Dispatcher() { + @Override + public MockResponse dispatch(RecordedRequest request) { + allRequests.add(request); + switch (request.getPath()) { + case "/api/splitChanges?s=1.3&since=-1&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody(splits); + case "/api/splitChanges?s=1.3&since=1602796638344&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody("{\"ff\":{\"d\":[], \"s\":1602796638344, \"t\":1602796638344}, \"rbs\":{\"d\":[],\"s\":-1,\"t\":-1}}"); + case "/api/testImpressions/bulk": + return new MockResponse().setResponseCode(200); + case "/api/testImpressions/count": + return new MockResponse().setResponseCode(200); + case "/v1/keys/ss": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/usage": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/config": + return new MockResponse().setResponseCode(200); + } + return new MockResponse().setResponseCode(404); + } + }; + + MockWebServer server = new MockWebServer(); + server.setDispatcher(dispatcher); + + server.start(); + String serverURL = String.format("http://%s:%s", server.getHostName(), server.getPort()); + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(serverURL, serverURL) + .telemetryURL(serverURL + "/v1") + .authServiceURL(String.format("%s/api/auth/enabled", serverURL)) + .streamingEnabled(false) + .featuresRefreshRate(5) + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + client.blockUntilReady(); + + Assert.assertEquals("off", client.getTreatment("user1", "without_impression_toggle", new HashMap<>())); + Assert.assertEquals("off", client.getTreatment("user2", "impression_toggle_on", new HashMap<>())); + Assert.assertEquals("off", client.getTreatment("user3", "impression_toggle_off", new HashMap<>())); + client.destroy(); + boolean check1 = false, check2 = false, check3 = false; + for (int i=0; i < allRequests.size(); i++ ) { + if (allRequests.get(i).getPath().equals("/api/testImpressions/bulk") ) { + check1 = true; + String body = allRequests.get(i).getBody().readUtf8(); + Assert.assertTrue(body.contains("without_impression_toggle")); + Assert.assertTrue(body.contains("impression_toggle_on")); + Assert.assertFalse(body.contains("impression_toggle_off")); + } + if (allRequests.get(i).getPath().equals("/v1/keys/ss")) { + check2 = true; + String body = allRequests.get(i).getBody().readUtf8(); + Assert.assertFalse(body.contains("without_impression_toggle")); + Assert.assertFalse(body.contains("impression_toggle_on")); + Assert.assertTrue(body.contains("impression_toggle_off")); + } + if (allRequests.get(i).getPath().equals("/api/testImpressions/count")) { + check3 = true; + String body = allRequests.get(i).getBody().readUtf8(); + Assert.assertFalse(body.contains("without_impression_toggle")); + Assert.assertFalse(body.contains("impression_toggle_on")); + Assert.assertTrue(body.contains("impression_toggle_off")); + } + } + server.shutdown(); + Assert.assertTrue(check1); + Assert.assertTrue(check2); + Assert.assertTrue(check3); + } + + @Test + public void ImpressionToggleNoneModeTest() throws Exception { + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + List allRequests = new ArrayList<>(); + Dispatcher dispatcher = new Dispatcher() { + @Override + public MockResponse dispatch(RecordedRequest request) { + allRequests.add(request); + switch (request.getPath()) { + case "/api/splitChanges?s=1.3&since=-1&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody(splits); + case "/api/splitChanges?s=1.3&since=1602796638344&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody("{\"ff\":{\"d\":[], \"s\":1602796638344, \"t\":1602796638344}, \"rbs\":{\"d\":[],\"s\":-1,\"t\":-1}}"); + case "/api/testImpressions/bulk": + return new MockResponse().setResponseCode(200); + case "/api/testImpressions/count": + return new MockResponse().setResponseCode(200); + case "/v1/keys/ss": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/usage": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/config": + return new MockResponse().setResponseCode(200); + } + return new MockResponse().setResponseCode(404); + } + }; + + MockWebServer server = new MockWebServer(); + server.setDispatcher(dispatcher); + + server.start(); + String serverURL = String.format("http://%s:%s", server.getHostName(), server.getPort()); + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(serverURL, serverURL) + .telemetryURL(serverURL + "/v1") + .authServiceURL(String.format("%s/api/auth/enabled", serverURL)) + .streamingEnabled(false) + .featuresRefreshRate(5) + .impressionsMode(ImpressionsManager.Mode.NONE) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + client.blockUntilReady(); + + Assert.assertEquals("off", client.getTreatment("user1", "without_impression_toggle", new HashMap<>())); + Assert.assertEquals("off", client.getTreatment("user2", "impression_toggle_on", new HashMap<>())); + Assert.assertEquals("off", client.getTreatment("user3", "impression_toggle_off", new HashMap<>())); + client.destroy(); + boolean check1 = false, check2 = false, check3 = false; + for (int i=0; i < allRequests.size(); i++ ) { + if (allRequests.get(i).getPath().equals("/api/testImpressions/bulk") ) { + check1 = true; + } + if (allRequests.get(i).getPath().equals("/v1/keys/ss")) { + check2 = true; + String body = allRequests.get(i).getBody().readUtf8(); + Assert.assertTrue(body.contains("without_impression_toggle")); + Assert.assertTrue(body.contains("impression_toggle_on")); + Assert.assertTrue(body.contains("impression_toggle_off")); + } + if (allRequests.get(i).getPath().equals("/api/testImpressions/count")) { + check3 = true; + String body = allRequests.get(i).getBody().readUtf8(); + Assert.assertTrue(body.contains("without_impression_toggle")); + Assert.assertTrue(body.contains("impression_toggle_on")); + Assert.assertTrue(body.contains("impression_toggle_off")); + } + } + server.shutdown(); + Assert.assertFalse(check1); + Assert.assertTrue(check2); + Assert.assertTrue(check3); + } + + @Test + public void ImpressionPropertiesTest() throws Exception { + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + List allRequests = new ArrayList<>(); + Dispatcher dispatcher = new Dispatcher() { + @Override + public MockResponse dispatch(RecordedRequest request) { + allRequests.add(request); + switch (request.getPath()) { + case "/api/splitChanges?s=1.3&since=-1&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody(splits); + case "/api/splitChanges?s=1.3&since=1602796638344&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody("{\"ff\":{\"d\":[], \"s\":1602796638344, \"t\":1602796638344}, \"rbs\":{\"d\":[],\"s\":-1,\"t\":-1}}"); + case "/api/testImpressions/bulk": + return new MockResponse().setResponseCode(200); + case "/api/testImpressions/count": + return new MockResponse().setResponseCode(200); + case "/v1/keys/ss": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/usage": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/config": + return new MockResponse().setResponseCode(200); + } + return new MockResponse().setResponseCode(404); + } + }; + + MockWebServer server = new MockWebServer(); + server.setDispatcher(dispatcher); + + server.start(); + String serverURL = String.format("http://%s:%s", server.getHostName(), server.getPort()); + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(serverURL, serverURL) + .telemetryURL(serverURL + "/v1") + .authServiceURL(String.format("%s/api/auth/enabled", serverURL)) + .streamingEnabled(false) + .featuresRefreshRate(5) + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + client.blockUntilReady(); + + Assert.assertEquals("off", client.getTreatment("user1", "without_impression_toggle", new HashMap<>(), new EvaluationOptions(new HashMap() {{ put("prop1", "val1"); }}))); + Assert.assertEquals("off", client.getTreatment("user2", "impression_toggle_on", new HashMap<>(), new EvaluationOptions(new HashMap() + {{ + put("prop1", "val1"); + put("prop2", "val2"); + }}))); + Assert.assertEquals("off", client.getTreatment("user3", "impression_toggle_on", new EvaluationOptions(null))); + client.destroy(); + boolean check1 = false, check2 = false, check3 = false; + for (int i=0; i < allRequests.size(); i++ ) { + if (allRequests.get(i).getPath().equals("/api/testImpressions/bulk") ) { + String body = allRequests.get(i).getBody().readUtf8(); + if (body.contains("user1")) { + check1 = true; + Assert.assertTrue(body.contains("without_impression_toggle")); + Assert.assertTrue(body.contains("\"properties\":\"{\\\"prop1\\\":\\\"val1\\\"}\"")); + } + if (body.contains("user2")) { + check2 = true; + Assert.assertTrue(body.contains("impression_toggle_on")); + Assert.assertTrue(body.contains("\"properties\":\"{\\\"prop2\\\":\\\"val2\\\",\\\"prop1\\\":\\\"val1\\\"}\"")); + } + if (body.contains("user3")) { + check3 = true; + Assert.assertTrue(body.contains("impression_toggle_on")); + Assert.assertTrue(body.contains("\"properties\":null")); + } + } + } + server.shutdown(); + Assert.assertTrue(check1); + Assert.assertTrue(check2); + } + + @Test + public void getTreatmentWithPrerequisites() throws Exception { + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_prereq.json")), StandardCharsets.UTF_8); + List allRequests = new ArrayList<>(); + Dispatcher dispatcher = new Dispatcher() { + @Override + public MockResponse dispatch(RecordedRequest request) { + allRequests.add(request); + switch (request.getPath()) { + case "/api/splitChanges?s=1.3&since=-1&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody(splits); + case "/api/splitChanges?s=1.3&since=1585948850109&rbSince=1585948850109": + return new MockResponse().setResponseCode(200).setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850109},\"rbs\":{\"d\":[],\"t\":1585948850109,\"s\":1585948850109}}"); + case "/api/segmentChanges/segment-test?since=-1": + return new MockResponse().setResponseCode(200).setBody("{\"name\":\"segment-test\",\"added\":[\"user-1\"],\"removed\":[],\"since\":-1,\"till\":-1}"); + case "/api/testImpressions/bulk": + return new MockResponse().setResponseCode(200); + case "/api/testImpressions/count": + return new MockResponse().setResponseCode(200); + case "/v1/keys/ss": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/usage": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/config": + return new MockResponse().setResponseCode(200); + } + return new MockResponse().setResponseCode(404); + } + }; + + MockWebServer splitServer = new MockWebServer(); + splitServer.setDispatcher(dispatcher); + splitServer.start(); + String serverURL = String.format("http://%s:%s", splitServer.getHostName(), splitServer.getPort()); + + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(serverURL, serverURL) + .telemetryURL(serverURL + "/v1") + .authServiceURL(String.format("%s/api/auth/enabled", serverURL)) + .streamingEnabled(false) + .featuresRefreshRate(5) + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + client.blockUntilReady(); + + Assert.assertEquals("on", client.getTreatment("bilal@split.io", "test_prereq", new HashMap() {{ + put("email", "bilal@@split.io"); + }})); + Assert.assertEquals("def_treatment", client.getTreatment("bilal@split.io", "test_prereq")); + Assert.assertEquals("def_treatment", client.getTreatment("mauro@split.io", "test_prereq", new HashMap() {{ + put("email", "mauro@@split.io"); + }})); + Assert.assertEquals("on", client.getTreatment("pato@split.io", "test_prereq", new HashMap() {{ + put("email", "pato@@split.io"); + }})); + + Assert.assertEquals("on_whitelist", client.getTreatment("bilal@split.io", "prereq_chain", new HashMap() {{ + put("email", "bilal@@split.io"); + }})); + Assert.assertEquals("on", client.getTreatment("pato@split.io", "prereq_chain", new HashMap() {{ + put("email", "pato@@split.io"); + }})); + Assert.assertEquals("on_default", client.getTreatment("mauro@split.io", "prereq_chain", new HashMap() {{ + put("email", "mauro@@split.io"); + }})); + + client.destroy(); + splitServer.shutdown(); + } + + @Test + public void FallbackTreatmentGlobalAndByFlagTest() throws Exception { + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + List allRequests = new ArrayList<>(); + Dispatcher dispatcher = new Dispatcher() { + @Override + public MockResponse dispatch(RecordedRequest request) { + allRequests.add(request); + switch (request.getPath()) { + case "/api/splitChanges?s=1.3&since=-1&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody(splits); + case "/api/splitChanges?s=1.3&since=1602796638344&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody("{\"ff\":{\"d\":[], \"s\":1602796638344, \"t\":1602796638344}, \"rbs\":{\"d\":[],\"s\":-1,\"t\":-1}}"); + case "/api/testImpressions/bulk": + return new MockResponse().setResponseCode(200); + case "/api/testImpressions/count": + return new MockResponse().setResponseCode(200); + case "/v1/keys/ss": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/usage": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/config": + return new MockResponse().setResponseCode(200); + } + return new MockResponse().setResponseCode(404); + } + }; + + MockWebServer server = new MockWebServer(); + server.setDispatcher(dispatcher); + + server.start(); + String serverURL = String.format("http://%s:%s", server.getHostName(), server.getPort()); + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on-fallback", "{\"prop1\", \"val1\"}"), + new HashMap() {{ put("feature", new FallbackTreatment("off-fallback", "{\"prop2\", \"val2\"}")); }}); + + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(serverURL, serverURL) + .telemetryURL(serverURL + "/v1") + .authServiceURL(String.format("%s/api/auth/enabled", serverURL)) + .streamingEnabled(false) + .featuresRefreshRate(5) + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .fallbackTreatments(fallbackTreatmentsConfiguration) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + client.blockUntilReady(); + + Assert.assertEquals("off", client.getTreatment("user1", "without_impression_toggle")); + Assert.assertEquals("off-fallback", client.getTreatmentWithConfig("user2", "feature").treatment()); + Assert.assertEquals("{\"prop2\", \"val2\"}", client.getTreatmentWithConfig("user2", "feature").config()); + Assert.assertEquals("on-fallback", client.getTreatmentWithConfig("user2", "feature2").treatment()); + Assert.assertEquals("{\"prop1\", \"val1\"}", client.getTreatmentWithConfig("user2", "feature2").config()); + + client.destroy(); + boolean check1 = false; + for (int i=0; i < allRequests.size(); i++ ) { + if (allRequests.get(i).getPath().equals("/api/testImpressions/bulk") ) { + String body = allRequests.get(i).getBody().readUtf8(); + if (body.contains("user1")) { + check1 = true; + Assert.assertTrue(body.contains("without_impression_toggle")); + } + } + } + server.shutdown(); + Assert.assertTrue(check1); + } + + @Test + public void FallbackTreatmentGlobalTest() throws Exception { + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + List allRequests = new ArrayList<>(); + Dispatcher dispatcher = new Dispatcher() { + @Override + public MockResponse dispatch(RecordedRequest request) { + allRequests.add(request); + switch (request.getPath()) { + case "/api/splitChanges?s=1.3&since=-1&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody(splits); + case "/api/splitChanges?s=1.3&since=1602796638344&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody("{\"ff\":{\"d\":[], \"s\":1602796638344, \"t\":1602796638344}, \"rbs\":{\"d\":[],\"s\":-1,\"t\":-1}}"); + case "/api/testImpressions/bulk": + return new MockResponse().setResponseCode(200); + case "/api/testImpressions/count": + return new MockResponse().setResponseCode(200); + case "/v1/keys/ss": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/usage": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/config": + return new MockResponse().setResponseCode(200); + } + return new MockResponse().setResponseCode(404); + } + }; + + MockWebServer server = new MockWebServer(); + server.setDispatcher(dispatcher); + + server.start(); + String serverURL = String.format("http://%s:%s", server.getHostName(), server.getPort()); + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on-fallback", "{\"prop1\", \"val1\"}")); + + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(serverURL, serverURL) + .telemetryURL(serverURL + "/v1") + .authServiceURL(String.format("%s/api/auth/enabled", serverURL)) + .streamingEnabled(false) + .featuresRefreshRate(5) + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .fallbackTreatments(fallbackTreatmentsConfiguration) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + client.blockUntilReady(); + + Assert.assertEquals("off", client.getTreatment("user1", "without_impression_toggle")); + Assert.assertEquals("on-fallback", client.getTreatmentWithConfig("user2", "feature").treatment()); + Assert.assertEquals("{\"prop1\", \"val1\"}", client.getTreatmentWithConfig("user2", "feature").config()); + Assert.assertEquals("on-fallback", client.getTreatmentWithConfig("user2", "feature2").treatment()); + Assert.assertEquals("{\"prop1\", \"val1\"}", client.getTreatmentWithConfig("user2", "feature2").config()); + + client.destroy(); + boolean check1 = false; + for (int i=0; i < allRequests.size(); i++ ) { + if (allRequests.get(i).getPath().equals("/api/testImpressions/bulk") ) { + String body = allRequests.get(i).getBody().readUtf8(); + if (body.contains("user1")) { + check1 = true; + Assert.assertTrue(body.contains("without_impression_toggle")); + } + } + } + server.shutdown(); + Assert.assertTrue(check1); + } + + @Test + public void FallbackTreatmentByFlagTest() throws Exception { + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + List allRequests = new ArrayList<>(); + Dispatcher dispatcher = new Dispatcher() { + @Override + public MockResponse dispatch(RecordedRequest request) { + allRequests.add(request); + switch (request.getPath()) { + case "/api/splitChanges?s=1.3&since=-1&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody(splits); + case "/api/splitChanges?s=1.3&since=1602796638344&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody("{\"ff\":{\"d\":[], \"s\":1602796638344, \"t\":1602796638344}, \"rbs\":{\"d\":[],\"s\":-1,\"t\":-1}}"); + case "/api/testImpressions/bulk": + return new MockResponse().setResponseCode(200); + case "/api/testImpressions/count": + return new MockResponse().setResponseCode(200); + case "/v1/keys/ss": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/usage": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/config": + return new MockResponse().setResponseCode(200); + } + return new MockResponse().setResponseCode(404); + } + }; + + MockWebServer server = new MockWebServer(); + server.setDispatcher(dispatcher); + + server.start(); + String serverURL = String.format("http://%s:%s", server.getHostName(), server.getPort()); + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration( + new HashMap() {{ put("feature", new FallbackTreatment("off-fallback", "{\"prop2\", \"val2\"}")); }}); + + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(serverURL, serverURL) + .telemetryURL(serverURL + "/v1") + .authServiceURL(String.format("%s/api/auth/enabled", serverURL)) + .streamingEnabled(false) + .featuresRefreshRate(5) + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .fallbackTreatments(fallbackTreatmentsConfiguration) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + client.blockUntilReady(); + + Assert.assertEquals("off", client.getTreatment("user1", "without_impression_toggle")); + Assert.assertEquals("off-fallback", client.getTreatmentWithConfig("user2", "feature").treatment()); + Assert.assertEquals("{\"prop2\", \"val2\"}", client.getTreatmentWithConfig("user2", "feature").config()); + Assert.assertEquals("control", client.getTreatmentWithConfig("user2", "feature2").treatment()); + Assert.assertEquals(null, client.getTreatmentWithConfig("user2", "feature2").config()); + + client.destroy(); + boolean check1 = false; + for (int i=0; i < allRequests.size(); i++ ) { + if (allRequests.get(i).getPath().equals("/api/testImpressions/bulk") ) { + String body = allRequests.get(i).getBody().readUtf8(); + if (body.contains("user1")) { + check1 = true; + Assert.assertTrue(body.contains("without_impression_toggle")); + } + } + } + server.shutdown(); + Assert.assertTrue(check1); + } + + @Test + public void FallbackTreatmentNotReadyTest() throws Exception { + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + List allRequests = new ArrayList<>(); + Dispatcher dispatcher = new Dispatcher() { + @Override + public MockResponse dispatch(RecordedRequest request) throws InterruptedException { + allRequests.add(request); + switch (request.getPath()) { + case "/api/splitChanges?s=1.3&since=-1&rbSince=-1": + Thread.sleep(1000); + return new MockResponse().setResponseCode(200).setBody(splits); + case "/api/splitChanges?s=1.3&since=1602796638344&rbSince=-1": + return new MockResponse().setResponseCode(200).setBody("{\"ff\":{\"d\":[], \"s\":1602796638344, \"t\":1602796638344}, \"rbs\":{\"d\":[],\"s\":-1,\"t\":-1}}"); + case "/api/testImpressions/bulk": + return new MockResponse().setResponseCode(200); + case "/api/testImpressions/count": + return new MockResponse().setResponseCode(200); + case "/v1/keys/ss": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/usage": + return new MockResponse().setResponseCode(200); + case "/v1/metrics/config": + return new MockResponse().setResponseCode(200); + } + return new MockResponse().setResponseCode(404); + } + }; + + MockWebServer server = new MockWebServer(); + server.setDispatcher(dispatcher); + + server.start(); + String serverURL = String.format("http://%s:%s", server.getHostName(), server.getPort()); + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration("on-fallback"); + + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(serverURL, serverURL) + .telemetryURL(serverURL + "/v1") + .authServiceURL(String.format("%s/api/auth/enabled", serverURL)) + .streamingEnabled(false) + .featuresRefreshRate(5) + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .fallbackTreatments(fallbackTreatmentsConfiguration) + .build(); + + SplitFactory factory = SplitFactoryBuilder.build("fake-api-token", config); + SplitClient client = factory.client(); + + Assert.assertEquals("on-fallback", client.getTreatment("user1", "without_impression_toggle")); + Assert.assertEquals("on-fallback", client.getTreatment("user2", "feature")); + client.blockUntilReady(); + + client.destroy(); + boolean check1 = false, check2 = false; + for (int i=0; i < allRequests.size(); i++ ) { + if (allRequests.get(i).getPath().equals("/api/testImpressions/bulk") ) { + String body = allRequests.get(i).getBody().readUtf8(); + if (body.contains("user2")) { + check1 = true; + Assert.assertTrue(body.contains("feature")); + Assert.assertTrue(body.contains("fallback - not ready")); + } + if (body.contains("user1")) { + check2 = true; + Assert.assertTrue(body.contains("without_impression_toggle")); + Assert.assertTrue(body.contains("fallback - not ready")); + } + } + } + server.shutdown(); + Assert.assertTrue(check1); + Assert.assertTrue(check2); + } + private SSEMockServer buildSSEMockServer(SSEMockServer.SseEventQueue eventQueue) { return new SSEMockServer(eventQueue, (token, version, channel) -> { if (!"1.1".equals(version)) { @@ -710,5 +1487,4 @@ private SplitClientConfig buildSplitClientConfig(String authUrl, String splitSer .streamingEnabled(streamingEnabled) .build(); } - -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/SplitFactoryImplTest.java b/client/src/test/java/io/split/client/SplitFactoryImplTest.java index 2ac8cc380..82f3ea8c3 100644 --- a/client/src/test/java/io/split/client/SplitFactoryImplTest.java +++ b/client/src/test/java/io/split/client/SplitFactoryImplTest.java @@ -1,19 +1,47 @@ package io.split.client; +import io.split.client.dtos.FallbackTreatment; +import io.split.client.dtos.FallbackTreatmentCalculatorImp; +import io.split.client.dtos.FallbackTreatmentsConfiguration; +import io.split.client.dtos.ProxyConfiguration; import io.split.client.impressions.ImpressionsManager; +import io.split.client.utils.FileTypeEnum; +import io.split.engine.evaluator.EvaluatorImp; import io.split.integrations.IntegrationsConfig; +import io.split.service.SplitHttpClientImpl; import io.split.storages.enums.OperationMode; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import io.split.telemetry.storage.TelemetryStorage; import io.split.telemetry.synchronizer.TelemetrySynchronizer; import junit.framework.TestCase; +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; +import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider; +import org.apache.hc.client5.http.impl.io.DefaultHttpClientConnectionOperator; +import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManager; +import org.apache.hc.client5.http.impl.routing.DefaultProxyRoutePlanner; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.config.Registry; +import org.awaitility.Awaitility; +import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; +import static org.mockito.Mockito.when; import pluggable.CustomStorageWrapper; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.net.URISyntaxException; +import java.net.URL; +import java.util.HashMap; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; + public class SplitFactoryImplTest extends TestCase { public static final String API_KEY ="29013ionasdasd09u"; @@ -32,11 +60,26 @@ public void testFactoryInstantiation() throws Exception { .authServiceURL(AUTH_SERVICE) .setBlockUntilReadyTimeout(10000) .telemetryURL(SplitClientConfig.TELEMETRY_ENDPOINT) + .fallbackTreatments(new FallbackTreatmentsConfiguration(new FallbackTreatment("on"))) .build(); SplitFactoryImpl splitFactory = new SplitFactoryImpl(API_KEY, splitClientConfig); assertNotNull(splitFactory.client()); assertNotNull(splitFactory.manager()); + + Field fallbackField = SplitClientImpl.class.getDeclaredField("_fallbackTreatmentCalculator"); + fallbackField.setAccessible(true); + FallbackTreatmentCalculatorImp fallbackCalc = (FallbackTreatmentCalculatorImp) fallbackField.get(splitFactory.client()); + assertNotNull(fallbackCalc); + + Field evalField = SplitClientImpl.class.getDeclaredField("_evaluator"); + evalField.setAccessible(true); + EvaluatorImp evaluatorImp = (EvaluatorImp) evalField.get(splitFactory.client()); + assertNotNull(fallbackCalc); + fallbackField = EvaluatorImp.class.getDeclaredField("_fallbackTreatmentCalculator"); + fallbackField.setAccessible(true); + fallbackCalc = (FallbackTreatmentCalculatorImp) fallbackField.get(evaluatorImp); + assertNotNull(fallbackCalc); } @Test @@ -75,24 +118,205 @@ public void testFactoryInstantiationIntegrationsConfig() throws Exception { } @Test - public void testFactoryInstantiationWithProxy() throws Exception { + public void testFactoryInstantiationWithLegacyProxy() throws Exception { SplitClientConfig splitClientConfig = SplitClientConfig.builder() .enableDebug() .impressionsMode(ImpressionsManager.Mode.DEBUG) .impressionsRefreshRate(1) - .endpoint(ENDPOINT,EVENTS_ENDPOINT) + .endpoint(ENDPOINT, EVENTS_ENDPOINT) .telemetryURL(SplitClientConfig.TELEMETRY_ENDPOINT) .authServiceURL(AUTH_SERVICE) .setBlockUntilReadyTimeout(1000) - .proxyPort(6060) - .proxyUsername("test") - .proxyPassword("password") - .proxyHost(ENDPOINT) + .proxyPort(8888) + .proxyHost("proxy-host") + .proxyUsername("user") + .proxyPassword("pass") .build(); SplitFactoryImpl splitFactory = new SplitFactoryImpl(API_KEY, splitClientConfig); + assertNotNull(splitFactory.client()); + splitFactory.destroy(); + } + + @Test + public void testFactoryInstantiationWithProxyCredentials() throws Exception { + class MyBearerCredentialsProvider implements io.split.client.dtos.BasicCredentialsProvider { + @Override + public String getUsername() { + return "test"; + } + @Override + public String getPassword() { + return "password"; + } + }; + + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .enableDebug() + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .impressionsRefreshRate(1) + .endpoint(ENDPOINT, EVENTS_ENDPOINT) + .telemetryURL(SplitClientConfig.TELEMETRY_ENDPOINT) + .authServiceURL(AUTH_SERVICE) + .setBlockUntilReadyTimeout(1000) + .proxyConfiguration(ProxyConfiguration.builder() + .url(new URL("http://proxy-name:6060")) + .credentialsProvider(new MyBearerCredentialsProvider()) + .build()) + .build(); + SplitFactoryImpl splitFactory = new SplitFactoryImpl(API_KEY, splitClientConfig); assertNotNull(splitFactory.client()); assertNotNull(splitFactory.manager()); + + Field splitHttpClientField = SplitFactoryImpl.class.getDeclaredField("_splitHttpClient"); + splitHttpClientField.setAccessible(true); + SplitHttpClientImpl client = (SplitHttpClientImpl) splitHttpClientField.get(splitFactory); + + Field httpClientField = SplitHttpClientImpl.class.getDeclaredField("_client"); + httpClientField.setAccessible(true); + Class InternalHttp = Class.forName("org.apache.hc.client5.http.impl.classic.InternalHttpClient"); + + Field routePlannerField = InternalHttp.getDeclaredField("routePlanner"); + routePlannerField.setAccessible(true); + DefaultProxyRoutePlanner routePlanner = (DefaultProxyRoutePlanner) routePlannerField.get(InternalHttp.cast(httpClientField.get(client))); + + Field proxyField = DefaultProxyRoutePlanner.class.getDeclaredField("proxy"); + proxyField.setAccessible(true); + HttpHost proxy = (HttpHost) proxyField.get(routePlanner); + + Assert.assertEquals("http", proxy.getSchemeName()); + Assert.assertEquals("proxy-name", proxy.getHostName()); + Assert.assertEquals(6060, proxy.getPort()); + + Field credentialsProviderField = InternalHttp.getDeclaredField("credentialsProvider"); + credentialsProviderField.setAccessible(true); + BasicCredentialsProvider credentialsProvider = (BasicCredentialsProvider) credentialsProviderField.get(InternalHttp.cast(httpClientField.get(client))); + + Field credMapField = BasicCredentialsProvider.class.getDeclaredField("credMap"); + credMapField.setAccessible(true); + ConcurrentHashMap credMap = (ConcurrentHashMap) credMapField.get(credentialsProvider); + + Assert.assertEquals("test", credMap.entrySet().stream().iterator().next().getValue().getUserName()); + assertNotNull(credMap.entrySet().stream().iterator().next().getValue().getUserPassword()); + + splitFactory.destroy(); + } + + @Test + public void testFactoryInstantiationWithProxyToken() throws Exception { + class MyBearerCredentialsProvider implements io.split.client.dtos.BearerCredentialsProvider { + @Override + public String getToken() { + return "123456789"; + } + }; + + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .enableDebug() + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .impressionsRefreshRate(1) + .endpoint(ENDPOINT, EVENTS_ENDPOINT) + .telemetryURL(SplitClientConfig.TELEMETRY_ENDPOINT) + .authServiceURL(AUTH_SERVICE) + .setBlockUntilReadyTimeout(1000) + .proxyConfiguration(ProxyConfiguration.builder() + .url(new URL("http://proxy-name:6060")) + .credentialsProvider(new MyBearerCredentialsProvider()) + .build()) + .build(); + SplitFactoryImpl splitFactory2 = new SplitFactoryImpl(API_KEY, splitClientConfig); + assertNotNull(splitFactory2.client()); + assertNotNull(splitFactory2.manager()); + + Field splitHttpClientField2 = SplitFactoryImpl.class.getDeclaredField("_splitHttpClient"); + splitHttpClientField2.setAccessible(true); + SplitHttpClientImpl client2 = (SplitHttpClientImpl) splitHttpClientField2.get(splitFactory2); + + Field httpClientField2 = SplitHttpClientImpl.class.getDeclaredField("_client"); + httpClientField2.setAccessible(true); + Class InternalHttp2 = Class.forName("org.apache.hc.client5.http.impl.classic.InternalHttpClient"); + + Field credentialsProviderField2 = InternalHttp2.getDeclaredField("credentialsProvider"); + credentialsProviderField2.setAccessible(true); + HttpClientDynamicCredentials credentialsProvider2 = (HttpClientDynamicCredentials) credentialsProviderField2.get(InternalHttp2.cast(httpClientField2.get(client2))); + + Field proxyRuntimeField = HttpClientDynamicCredentials.class.getDeclaredField("_bearerCredentialsProvider"); + proxyRuntimeField.setAccessible(true); + MyBearerCredentialsProvider proxyRuntime = (MyBearerCredentialsProvider) proxyRuntimeField.get(credentialsProvider2); + + assertNotNull("123456789", proxyRuntime.getToken()); + + splitFactory2.destroy(); + } + + @Test + public void testFactoryInstantiationWithProxyMtls() throws Exception { + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .enableDebug() + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .impressionsRefreshRate(1) + .endpoint(ENDPOINT,EVENTS_ENDPOINT) + .telemetryURL(SplitClientConfig.TELEMETRY_ENDPOINT) + .authServiceURL(AUTH_SERVICE) + .setBlockUntilReadyTimeout(1000) + .proxyConfiguration(ProxyConfiguration.builder() + .url(new URL("http://proxy-name:6060")) + .mtls(new FileInputStream("src/test/resources/keyStore.p12"), "split") + .build()) + .build(); + SplitFactoryImpl splitFactory3 = new SplitFactoryImpl(API_KEY, splitClientConfig); + assertNotNull(splitFactory3.client()); + assertNotNull(splitFactory3.manager()); + + Field splitHttpClientField3 = SplitFactoryImpl.class.getDeclaredField("_splitHttpClient"); + splitHttpClientField3.setAccessible(true); + SplitHttpClientImpl client3 = (SplitHttpClientImpl) splitHttpClientField3.get(splitFactory3); + + Field httpClientField3 = SplitHttpClientImpl.class.getDeclaredField("_client"); + httpClientField3.setAccessible(true); + Class InternalHttp3 = Class.forName("org.apache.hc.client5.http.impl.classic.InternalHttpClient"); + + Field connManagerField = InternalHttp3.getDeclaredField("connManager"); + connManagerField.setAccessible(true); + PoolingHttpClientConnectionManager connManager = (PoolingHttpClientConnectionManager) connManagerField.get(InternalHttp3.cast(httpClientField3.get(client3))); + + Field connectionOperatorField = PoolingHttpClientConnectionManager.class.getDeclaredField("connectionOperator"); + connectionOperatorField.setAccessible(true); + DefaultHttpClientConnectionOperator connectionOperator = (DefaultHttpClientConnectionOperator) connectionOperatorField.get(connManager); + + Field tlsSocketStrategyLookupField = DefaultHttpClientConnectionOperator.class.getDeclaredField("tlsSocketStrategyLookup"); + tlsSocketStrategyLookupField.setAccessible(true); + Registry tlsSocketStrategyLookup = (Registry) tlsSocketStrategyLookupField.get(connectionOperator); + + Field mapField = Registry.class.getDeclaredField("map"); + mapField.setAccessible(true); + Class map = mapField.get(tlsSocketStrategyLookup).getClass(); + + Class value = ((ConcurrentHashMap) map.cast(mapField.get(tlsSocketStrategyLookup))).get("https").getClass(); + + Field arg1Field = value.getDeclaredField("arg$1"); + arg1Field.setAccessible(true); + Class sslConnectionSocketFactory = arg1Field.get(((ConcurrentHashMap) map.cast(mapField.get(tlsSocketStrategyLookup))).get("https")).getClass(); + + Field socketFactoryField = sslConnectionSocketFactory.getDeclaredField("socketFactory"); + socketFactoryField.setAccessible(true); + Class socketFactory = socketFactoryField.get(arg1Field.get(((ConcurrentHashMap) map.cast(mapField.get(tlsSocketStrategyLookup))).get("https"))).getClass(); + + Field contextField = socketFactory.getDeclaredField("context"); + contextField.setAccessible(true); + Class context = Class.forName("sun.security.ssl.SSLContextImpl"); + + Field keyManagerField = context.getDeclaredField("keyManager"); + keyManagerField.setAccessible(true); + Class keyManager = keyManagerField.get(contextField.get(socketFactoryField.get(arg1Field.get(((ConcurrentHashMap) map.cast(mapField.get(tlsSocketStrategyLookup))).get("https"))))).getClass(); + + Field credentialsMapField = keyManager.getDeclaredField("credentialsMap"); + credentialsMapField.setAccessible(true); + HashMap credentialsMap = (HashMap) credentialsMapField.get(keyManagerField.get(contextField.get(socketFactoryField.get(arg1Field.get(((ConcurrentHashMap) map.cast(mapField.get(tlsSocketStrategyLookup))).get("https")))))); + + assertNotNull(credentialsMap.get("1")); + + splitFactory3.destroy(); } @Test @@ -126,9 +350,9 @@ public void testFactoryDestroy() throws Exception { @Test public void testFactoryConsumerInstantiation() throws Exception { CustomStorageWrapper customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); - SafeUserStorageWrapper safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); + UserStorageWrapper userStorageWrapper = Mockito.mock(UserStorageWrapper.class); TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - Mockito.when(safeUserStorageWrapper.connect()).thenReturn(true); + when(userStorageWrapper.connect()).thenReturn(true); SplitClientConfig splitClientConfig = SplitClientConfig.builder() .enableDebug() @@ -142,12 +366,12 @@ public void testFactoryConsumerInstantiation() throws Exception { .customStorageWrapper(customStorageWrapper) .build(); SplitFactoryImpl splitFactory = new SplitFactoryImpl(API_KEY, splitClientConfig, customStorageWrapper); - Field splitFactoryImpl = SplitFactoryImpl.class.getDeclaredField("_safeUserStorageWrapper"); + Field splitFactoryImpl = SplitFactoryImpl.class.getDeclaredField("_userStorageWrapper"); splitFactoryImpl.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(splitFactoryImpl, splitFactoryImpl.getModifiers() & ~Modifier.FINAL); - splitFactoryImpl.set(splitFactory, safeUserStorageWrapper); + splitFactoryImpl.set(splitFactory, userStorageWrapper); Field telemetryStorageProducer = SplitFactoryImpl.class.getDeclaredField("_telemetrySynchronizer"); telemetryStorageProducer.setAccessible(true); @@ -158,17 +382,29 @@ public void testFactoryConsumerInstantiation() throws Exception { assertNotNull(splitFactory.client()); assertNotNull(splitFactory.manager()); Thread.sleep(1500); - Mockito.verify(safeUserStorageWrapper, Mockito.times(1)).connect(); + Mockito.verify(userStorageWrapper, Mockito.times(1)).connect(); Mockito.verify(telemetrySynchronizer, Mockito.times(1)).synchronizeConfig(Mockito.anyObject(), Mockito.anyLong(), Mockito.anyObject(), Mockito.anyObject()); - } + Field fallbackField = SplitClientImpl.class.getDeclaredField("_fallbackTreatmentCalculator"); + fallbackField.setAccessible(true); + FallbackTreatmentCalculatorImp fallbackCalc = (FallbackTreatmentCalculatorImp) fallbackField.get(splitFactory.client()); + assertNotNull(fallbackCalc); + Field evalField = SplitClientImpl.class.getDeclaredField("_evaluator"); + evalField.setAccessible(true); + EvaluatorImp evaluatorImp = (EvaluatorImp) evalField.get(splitFactory.client()); + assertNotNull(fallbackCalc); + fallbackField = EvaluatorImp.class.getDeclaredField("_fallbackTreatmentCalculator"); + fallbackField.setAccessible(true); + fallbackCalc = (FallbackTreatmentCalculatorImp) fallbackField.get(evaluatorImp); + assertNotNull(fallbackCalc); + } @Test public void testFactoryConsumerInstantiationRetryReadiness() throws Exception { CustomStorageWrapper customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); - SafeUserStorageWrapper safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); - Mockito.when(safeUserStorageWrapper.connect()).thenReturn(false).thenReturn(true); + UserStorageWrapper userStorageWrapper = Mockito.mock(UserStorageWrapper.class); + when(userStorageWrapper.connect()).thenReturn(false).thenReturn(true); SplitClientConfig splitClientConfig = SplitClientConfig.builder() .enableDebug() .impressionsMode(ImpressionsManager.Mode.DEBUG) @@ -181,23 +417,26 @@ public void testFactoryConsumerInstantiationRetryReadiness() throws Exception { .customStorageWrapper(customStorageWrapper) .build(); SplitFactoryImpl splitFactory = new SplitFactoryImpl(API_KEY, splitClientConfig, customStorageWrapper); - Field splitFactoryImpl = SplitFactoryImpl.class.getDeclaredField("_safeUserStorageWrapper"); + Field splitFactoryImpl = SplitFactoryImpl.class.getDeclaredField("_userStorageWrapper"); splitFactoryImpl.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(splitFactoryImpl, splitFactoryImpl.getModifiers() & ~Modifier.FINAL); - splitFactoryImpl.set(splitFactory, safeUserStorageWrapper); + splitFactoryImpl.set(splitFactory, userStorageWrapper); assertNotNull(splitFactory.client()); assertNotNull(splitFactory.manager()); - Thread.sleep(2000); - Mockito.verify(safeUserStorageWrapper, Mockito.times(2)).connect(); + Awaitility.await() + .atMost(5L, TimeUnit.SECONDS) + .untilAsserted(() -> Assert.assertTrue(userStorageWrapper.connect())); + + Mockito.verify(userStorageWrapper, Mockito.times(2)).connect(); } @Test public void testFactoryConsumerDestroy() throws NoSuchFieldException, URISyntaxException, IllegalAccessException { CustomStorageWrapper customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); - SafeUserStorageWrapper safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); - Mockito.when(safeUserStorageWrapper.connect()).thenReturn(false).thenReturn(true); + UserStorageWrapper userStorageWrapper = Mockito.mock(UserStorageWrapper.class); + when(userStorageWrapper.connect()).thenReturn(false).thenReturn(true); SplitClientConfig splitClientConfig = SplitClientConfig.builder() .enableDebug() .impressionsMode(ImpressionsManager.Mode.DEBUG) @@ -210,16 +449,133 @@ public void testFactoryConsumerDestroy() throws NoSuchFieldException, URISyntaxE .customStorageWrapper(customStorageWrapper) .build(); SplitFactoryImpl splitFactory = new SplitFactoryImpl(API_KEY, splitClientConfig, customStorageWrapper); - Field splitFactoryImpl = SplitFactoryImpl.class.getDeclaredField("_safeUserStorageWrapper"); + Field splitFactoryImpl = SplitFactoryImpl.class.getDeclaredField("_userStorageWrapper"); splitFactoryImpl.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(splitFactoryImpl, splitFactoryImpl.getModifiers() & ~Modifier.FINAL); - splitFactoryImpl.set(splitFactory, safeUserStorageWrapper); + splitFactoryImpl.set(splitFactory, userStorageWrapper); splitFactory.destroy(); assertTrue(splitFactory.isDestroyed()); - Mockito.verify(safeUserStorageWrapper, Mockito.times(1)).disconnect(); + Mockito.verify(userStorageWrapper, Mockito.times(1)).disconnect(); + } + + @Test + public void testLocalhostLegacy() throws URISyntaxException, NoSuchMethodException, InvocationTargetException, IllegalAccessException, IOException { + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .build(); + SplitFactoryImpl splitFactory = new SplitFactoryImpl("localhost", splitClientConfig); + + Method method = SplitFactoryImpl.class.getDeclaredMethod("createSplitChangeFetcher", SplitClientConfig.class); + method.setAccessible(true); + Object splitChangeFetcher = method.invoke(splitFactory, splitClientConfig); + Assert.assertTrue(splitChangeFetcher instanceof LegacyLocalhostSplitChangeFetcher); } -} + @Test + public void testLocalhostYaml() throws URISyntaxException, NoSuchMethodException, InvocationTargetException, IllegalAccessException, IOException { + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .splitFile("src/test/resources/split.yaml") + .setBlockUntilReadyTimeout(10000) + .build(); + SplitFactoryImpl splitFactory = new SplitFactoryImpl("localhost", splitClientConfig); + + Method method = SplitFactoryImpl.class.getDeclaredMethod("createSplitChangeFetcher", SplitClientConfig.class); + method.setAccessible(true); + Object splitChangeFetcher = method.invoke(splitFactory, splitClientConfig); + Assert.assertTrue(splitChangeFetcher instanceof YamlLocalhostSplitChangeFetcher); + } + + @Test + public void testLocalhosJson() throws URISyntaxException, NoSuchMethodException, InvocationTargetException, IllegalAccessException, IOException { + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .splitFile("src/test/resources/split_init.json") + .setBlockUntilReadyTimeout(10000) + .build(); + SplitFactoryImpl splitFactory = new SplitFactoryImpl("localhost", splitClientConfig); + + Method method = SplitFactoryImpl.class.getDeclaredMethod("createSplitChangeFetcher", SplitClientConfig.class); + method.setAccessible(true); + Object splitChangeFetcher = method.invoke(splitFactory, splitClientConfig); + Assert.assertTrue(splitChangeFetcher instanceof JsonLocalhostSplitChangeFetcher); + } + + @Test + public void testLocalhostYamlInputStream() throws URISyntaxException, NoSuchMethodException, InvocationTargetException, + IllegalAccessException, IOException { + InputStream inputStream = new FileInputStream("src/test/resources/split.yaml"); + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .splitFile(inputStream, FileTypeEnum.YAML) + .setBlockUntilReadyTimeout(10000) + .build(); + SplitFactoryImpl splitFactory = new SplitFactoryImpl("localhost", splitClientConfig); + + Method method = SplitFactoryImpl.class.getDeclaredMethod("createSplitChangeFetcher", SplitClientConfig.class); + method.setAccessible(true); + Object splitChangeFetcher = method.invoke(splitFactory, splitClientConfig); + Assert.assertTrue(splitChangeFetcher instanceof YamlLocalhostSplitChangeFetcher); + } + + @Test + public void testLocalhosJsonInputStream() throws URISyntaxException, NoSuchMethodException, InvocationTargetException, + IllegalAccessException, IOException { + InputStream inputStream = new FileInputStream("src/test/resources/split_init.json"); + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .splitFile(inputStream, FileTypeEnum.JSON) + .setBlockUntilReadyTimeout(10000) + .build(); + SplitFactoryImpl splitFactory = new SplitFactoryImpl("localhost", splitClientConfig); + + Method method = SplitFactoryImpl.class.getDeclaredMethod("createSplitChangeFetcher", SplitClientConfig.class); + method.setAccessible(true); + Object splitChangeFetcher = method.invoke(splitFactory, splitClientConfig); + Assert.assertTrue(splitChangeFetcher instanceof JsonLocalhostSplitChangeFetcher); + } + + @Test + public void testLocalhosJsonInputStreamNull() throws URISyntaxException, NoSuchMethodException, InvocationTargetException, IllegalAccessException, IOException { + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .splitFile(null, FileTypeEnum.JSON) + .setBlockUntilReadyTimeout(10000) + .build(); + SplitFactoryImpl splitFactory = new SplitFactoryImpl("localhost", splitClientConfig); + + Method method = SplitFactoryImpl.class.getDeclaredMethod("createSplitChangeFetcher", SplitClientConfig.class); + method.setAccessible(true); + Object splitChangeFetcher = method.invoke(splitFactory, splitClientConfig); + Assert.assertTrue(splitChangeFetcher instanceof LegacyLocalhostSplitChangeFetcher); + } + + @Test + public void testLocalhosJsonInputStreamAndFileTypeNull() throws URISyntaxException, NoSuchMethodException, InvocationTargetException, + IllegalAccessException, IOException { + InputStream inputStream = new FileInputStream("src/test/resources/split_init.json"); + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .splitFile(inputStream, null) + .setBlockUntilReadyTimeout(10000) + .build(); + SplitFactoryImpl splitFactory = new SplitFactoryImpl("localhost", splitClientConfig); + + Method method = SplitFactoryImpl.class.getDeclaredMethod("createSplitChangeFetcher", SplitClientConfig.class); + method.setAccessible(true); + Object splitChangeFetcher = method.invoke(splitFactory, splitClientConfig); + Assert.assertTrue(splitChangeFetcher instanceof LegacyLocalhostSplitChangeFetcher); + } + + @Test + public void testLocalhosJsonInputStreamNullAndFileTypeNull() throws URISyntaxException, NoSuchMethodException, InvocationTargetException, + IllegalAccessException, IOException { + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .splitFile(null, null) + .setBlockUntilReadyTimeout(10000) + .build(); + SplitFactoryImpl splitFactory = new SplitFactoryImpl("localhost", splitClientConfig); + + Method method = SplitFactoryImpl.class.getDeclaredMethod("createSplitChangeFetcher", SplitClientConfig.class); + method.setAccessible(true); + Object splitChangeFetcher = method.invoke(splitFactory, splitClientConfig); + Assert.assertTrue(splitChangeFetcher instanceof LegacyLocalhostSplitChangeFetcher); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/SplitManagerImplTest.java b/client/src/test/java/io/split/client/SplitManagerImplTest.java index 46677282b..f3c04454f 100644 --- a/client/src/test/java/io/split/client/SplitManagerImplTest.java +++ b/client/src/test/java/io/split/client/SplitManagerImplTest.java @@ -1,168 +1,188 @@ package io.split.client; import com.google.common.collect.Lists; + import io.split.client.api.SplitView; +import io.split.client.dtos.Prerequisites; +import io.split.client.dtos.Split; +import io.split.client.dtos.SplitChange; +import io.split.client.utils.Json; import io.split.engine.ConditionsTestUtil; import io.split.engine.SDKReadinessGates; import io.split.engine.experiments.ParsedCondition; import io.split.engine.experiments.ParsedSplit; +import io.split.engine.experiments.SplitParser; import io.split.engine.matchers.AllKeysMatcher; import io.split.engine.matchers.CombiningMatcher; +import io.split.engine.matchers.PrerequisitesMatcher; import io.split.grammar.Treatments; import io.split.storages.SplitCacheConsumer; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryStorage; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.mockito.Mockito; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.TimeoutException; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.*; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class SplitManagerImplTest { private SplitClientConfig config = SplitClientConfig.builder().setBlockUntilReadyTimeout(100).build(); - private static TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); + private static TelemetryStorage TELEMETRY_STORAGE = mock(InMemoryTelemetryStorage.class); @Before public void updateTelemetryStorage() { - TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); + TELEMETRY_STORAGE = mock(InMemoryTelemetryStorage.class); } @Test public void splitCallWithNonExistentSplit() { String nonExistent = "nonExistent"; - SplitCacheConsumer splitCacheConsumer = Mockito.mock(SplitCacheConsumer.class); - Mockito.when(splitCacheConsumer.get(nonExistent)).thenReturn(null); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + when(splitCacheConsumer.get(nonExistent)).thenReturn(null); SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, - Mockito.mock(SplitClientConfig.class), - Mockito.mock(SDKReadinessGates.class), TELEMETRY_STORAGE); - assertThat(splitManager.split("nonExistent"), is(nullValue())); + mock(SplitClientConfig.class), + mock(SDKReadinessGates.class), TELEMETRY_STORAGE); + Assert.assertNull(splitManager.split("nonExistent")); } @Test public void splitCallWithExistentSplit() { String existent = "existent"; - SplitCacheConsumer splitCacheConsumer = Mockito.mock(SplitCacheConsumer.class); - ParsedSplit response = ParsedSplit.createParsedSplitForTests("FeatureName", 123, true, "off", Lists.newArrayList(getTestCondition("off")), "traffic", 456L, 1); - Mockito.when(splitCacheConsumer.get(existent)).thenReturn(response); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + Prerequisites prereq = new Prerequisites(); + prereq.featureFlagName = "feature1"; + prereq.treatments = Lists.newArrayList("on"); + ParsedSplit response = ParsedSplit.createParsedSplitForTests("FeatureName", 123, true, "off", Lists.newArrayList(getTestCondition("off")), "traffic", 456L, 1, new HashSet<>(), false, + new PrerequisitesMatcher(Lists.newArrayList(prereq))); + when(splitCacheConsumer.get(existent)).thenReturn(response); SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, - Mockito.mock(SplitClientConfig.class), - Mockito.mock(SDKReadinessGates.class), TELEMETRY_STORAGE); + mock(SplitClientConfig.class), + mock(SDKReadinessGates.class), TELEMETRY_STORAGE); SplitView theOne = splitManager.split(existent); - assertThat(theOne.name, is(equalTo(response.feature()))); - assertThat(theOne.changeNumber, is(equalTo(response.changeNumber()))); - assertThat(theOne.killed, is(equalTo(response.killed()))); - assertThat(theOne.trafficType, is(equalTo(response.trafficTypeName()))); - assertThat(theOne.treatments.size(), is(equalTo(1))); - assertThat(theOne.treatments.get(0), is(equalTo("off"))); - assertThat(theOne.configs.size(), is(0)); + Assert.assertEquals(response.feature(), theOne.name); + Assert.assertEquals(response.changeNumber(), theOne.changeNumber); + Assert.assertEquals(response.killed(), theOne.killed); + Assert.assertEquals(response.trafficTypeName(), theOne.trafficType); + Assert.assertEquals(1, theOne.treatments.size()); + Assert.assertEquals("off", theOne.treatments.get(0)); + Assert.assertEquals(0, theOne.configs.size()); + Assert.assertEquals("off", theOne.defaultTreatment); + Assert.assertEquals(Lists.newArrayList(prereq), theOne.prerequisites); } @Test public void splitCallWithExistentSplitAndConfigs() { String existent = "existent"; - SplitCacheConsumer splitCacheConsumer = Mockito.mock(SplitCacheConsumer.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); // Add config for only one treatment(default) Map configurations = new HashMap<>(); configurations.put(Treatments.OFF, "{\"size\" : 30}"); - ParsedSplit response = ParsedSplit.createParsedSplitForTests("FeatureName", 123, true, "off", Lists.newArrayList(getTestCondition("off")), "traffic", 456L, 1, configurations); - Mockito.when(splitCacheConsumer.get(existent)).thenReturn(response); + ParsedSplit response = ParsedSplit.createParsedSplitForTests("FeatureName", 123, true, "off", Lists.newArrayList(getTestCondition("off")), "traffic", 456L, 1, configurations, new HashSet<>(), false, null); + when(splitCacheConsumer.get(existent)).thenReturn(response); SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, - Mockito.mock(SplitClientConfig.class), - Mockito.mock(SDKReadinessGates.class), TELEMETRY_STORAGE); + mock(SplitClientConfig.class), + mock(SDKReadinessGates.class), TELEMETRY_STORAGE); SplitView theOne = splitManager.split(existent); - assertThat(theOne.name, is(equalTo(response.feature()))); - assertThat(theOne.changeNumber, is(equalTo(response.changeNumber()))); - assertThat(theOne.killed, is(equalTo(response.killed()))); - assertThat(theOne.trafficType, is(equalTo(response.trafficTypeName()))); - assertThat(theOne.treatments.size(), is(equalTo(1))); - assertThat(theOne.treatments.get(0), is(equalTo("off"))); - assertThat(theOne.configs.get("off"), is(equalTo("{\"size\" : 30}"))); + + Assert.assertEquals(response.feature(), theOne.name); + Assert.assertEquals(response.changeNumber(), theOne.changeNumber); + Assert.assertEquals(response.killed(), theOne.killed); + Assert.assertEquals(response.trafficTypeName(), theOne.trafficType); + Assert.assertEquals(1, theOne.treatments.size()); + Assert.assertEquals("off", theOne.treatments.get(0)); + Assert.assertEquals("{\"size\" : 30}", theOne.configs.get("off")); } @Test public void splitsCallWithNoSplit() { - SplitCacheConsumer splitCacheConsumer = Mockito.mock(SplitCacheConsumer.class); - Mockito.when(splitCacheConsumer.getAll()).thenReturn(Lists.newArrayList()); - SDKReadinessGates gates = Mockito.mock(SDKReadinessGates.class); - Mockito.when(gates.isSDKReady()).thenReturn(false); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + when(splitCacheConsumer.getAll()).thenReturn(Lists.newArrayList()); + SDKReadinessGates gates = mock(SDKReadinessGates.class); + when(gates.isSDKReady()).thenReturn(false); SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, - Mockito.mock(SplitClientConfig.class), + mock(SplitClientConfig.class), gates, TELEMETRY_STORAGE); - assertThat(splitManager.splits(), is(empty())); + Assert.assertTrue(splitManager.splits().isEmpty()); verify(TELEMETRY_STORAGE, times(1)).recordNonReadyUsage(); } @Test public void splitsCallWithSplit() { - SplitCacheConsumer splitCacheConsumer = Mockito.mock(SplitCacheConsumer.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); List parsedSplits = Lists.newArrayList(); - SDKReadinessGates gates = Mockito.mock(SDKReadinessGates.class); - Mockito.when(gates.isSDKReady()).thenReturn(false); - ParsedSplit response = ParsedSplit.createParsedSplitForTests("FeatureName", 123, true, "off", Lists.newArrayList(getTestCondition("off")), "traffic", 456L, 1); + SDKReadinessGates gates = mock(SDKReadinessGates.class); + when(gates.isSDKReady()).thenReturn(false); + ParsedSplit response = ParsedSplit.createParsedSplitForTests("FeatureName", 123, true, "off", Lists.newArrayList(getTestCondition("off")), "traffic", 456L, 1, new HashSet<>(), false, null); parsedSplits.add(response); - Mockito.when(splitCacheConsumer.getAll()).thenReturn(parsedSplits); + when(splitCacheConsumer.getAll()).thenReturn(parsedSplits); SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, - Mockito.mock(SplitClientConfig.class), + mock(SplitClientConfig.class), gates, TELEMETRY_STORAGE); List splits = splitManager.splits(); - assertThat(splits.size(), is(equalTo(1))); - assertThat(splits.get(0).name, is(equalTo(response.feature()))); - assertThat(splits.get(0).changeNumber, is(equalTo(response.changeNumber()))); - assertThat(splits.get(0).killed, is(equalTo(response.killed()))); - assertThat(splits.get(0).trafficType, is(equalTo(response.trafficTypeName()))); - assertThat(splits.get(0).treatments.size(), is(equalTo(1))); - assertThat(splits.get(0).treatments.get(0), is(equalTo("off"))); + Assert.assertEquals(1, splits.size()); + Assert.assertEquals(response.feature(), splits.get(0).name); + Assert.assertEquals(response.changeNumber(), response.changeNumber()); + Assert.assertEquals(response.killed(), splits.get(0).killed); + Assert.assertEquals(response.trafficTypeName(), splits.get(0).trafficType); + Assert.assertEquals(1, splits.get(0).treatments.size()); + Assert.assertEquals("off", splits.get(0).treatments.get(0)); verify(TELEMETRY_STORAGE, times(1)).recordNonReadyUsage(); } @Test public void splitNamesCallWithNoSplit() { - SplitCacheConsumer splitCacheConsumer = Mockito.mock(SplitCacheConsumer.class); - Mockito.when(splitCacheConsumer.getAll()).thenReturn(Lists.newArrayList()); - SDKReadinessGates gates = Mockito.mock(SDKReadinessGates.class); - Mockito.when(gates.isSDKReady()).thenReturn(false); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + when(splitCacheConsumer.getAll()).thenReturn(Lists.newArrayList()); + SDKReadinessGates gates = mock(SDKReadinessGates.class); + when(gates.isSDKReady()).thenReturn(false); SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, - Mockito.mock(SplitClientConfig.class), + mock(SplitClientConfig.class), gates, TELEMETRY_STORAGE); - assertThat(splitManager.splitNames(), is(empty())); + Assert.assertTrue(splitManager.splitNames().isEmpty()); verify(TELEMETRY_STORAGE, times(1)).recordNonReadyUsage(); } @Test public void splitNamesCallWithSplit() { - SplitCacheConsumer splitCacheConsumer = Mockito.mock(SplitCacheConsumer.class); - List parsedSplits = Lists.newArrayList(); - ParsedSplit response = ParsedSplit.createParsedSplitForTests("FeatureName", 123, true, "off", Lists.newArrayList(getTestCondition("off")), "traffic", 456L, 1); - parsedSplits.add(response); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + List parsedSplits = new ArrayList<>(); + parsedSplits.add("FeatureName"); - Mockito.when(splitCacheConsumer.getAll()).thenReturn(parsedSplits); + when(splitCacheConsumer.splitNames()).thenReturn(parsedSplits); SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, - Mockito.mock(SplitClientConfig.class), - Mockito.mock(SDKReadinessGates.class), TELEMETRY_STORAGE); + mock(SplitClientConfig.class), + mock(SDKReadinessGates.class), TELEMETRY_STORAGE); List splitNames = splitManager.splitNames(); - assertThat(splitNames.size(), is(equalTo(1))); - assertThat(splitNames.get(0), is(equalTo(response.feature()))); + Assert.assertEquals(1, splitNames.size()); + Assert.assertEquals("FeatureName",splitNames.get(0)); } @Test - public void block_until_ready_does_not_time_when_sdk_is_ready() throws TimeoutException, InterruptedException { + public void blockUntilReadyDoesNotTimeWhenSdkIsReady() throws TimeoutException, InterruptedException { SDKReadinessGates ready = mock(SDKReadinessGates.class); when(ready.waitUntilInternalReady(100)).thenReturn(true); SplitManagerImpl splitManager = new SplitManagerImpl(mock(SplitCacheConsumer.class), @@ -173,7 +193,7 @@ public void block_until_ready_does_not_time_when_sdk_is_ready() throws TimeoutEx } @Test(expected = TimeoutException.class) - public void block_until_ready_times_when_sdk_is_not_ready() throws TimeoutException, InterruptedException { + public void blockUntilReadyTimesWhenSdkIsNotReady() throws TimeoutException, InterruptedException { SDKReadinessGates ready = mock(SDKReadinessGates.class); when(ready.waitUntilInternalReady(100)).thenReturn(false); @@ -185,8 +205,59 @@ public void block_until_ready_times_when_sdk_is_not_ready() throws TimeoutExcept verify(TELEMETRY_STORAGE, times(1)).recordBURTimeout(); } + @Test + public void splitCallWithExistentSets() { + String existent = "existent"; + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + ParsedSplit response = ParsedSplit.createParsedSplitForTests("FeatureName", 123, true, "off", + Lists.newArrayList(getTestCondition("off")), "traffic", 456L, 1, new HashSet<>(Arrays.asList("set1", "set2", "set3")), false, null); + when(splitCacheConsumer.get(existent)).thenReturn(response); + + SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, + mock(SplitClientConfig.class), + mock(SDKReadinessGates.class), TELEMETRY_STORAGE); + SplitView theOne = splitManager.split(existent); + Assert.assertEquals(response.flagSets().size(), theOne.sets.size()); + } + + @Test + public void splitCallWithEmptySets() { + String existent = "existent"; + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + ParsedSplit response = ParsedSplit.createParsedSplitForTests("FeatureName", 123, true, "off", + Lists.newArrayList(getTestCondition("off")), "traffic", 456L, 1, null, false, null); + when(splitCacheConsumer.get(existent)).thenReturn(response); + + SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, + mock(SplitClientConfig.class), + mock(SDKReadinessGates.class), TELEMETRY_STORAGE); + SplitView theOne = splitManager.split(existent); + Assert.assertEquals(0, theOne.sets.size()); + } + private ParsedCondition getTestCondition(String treatment) { return ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(ConditionsTestUtil.partition(treatment, 10))); } -} + @Test + public void ImpressionToggleParseTest() throws IOException { + SplitParser parser = new SplitParser(); + String splits = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(splits, SplitChange.class); + SplitCacheConsumer splitCacheConsumer = mock(SplitCacheConsumer.class); + for (Split split : change.featureFlags.d) { + ParsedSplit parsedSplit = parser.parse(split); + when(splitCacheConsumer.get(split.name)).thenReturn(parsedSplit); + } + SplitManagerImpl splitManager = new SplitManagerImpl(splitCacheConsumer, + mock(SplitClientConfig.class), + mock(SDKReadinessGates.class), TELEMETRY_STORAGE); + + SplitView splitView = splitManager.split("without_impression_toggle"); + assertFalse(splitView.impressionsDisabled); + splitView = splitManager.split("impression_toggle_on"); + assertFalse(splitView.impressionsDisabled); + splitView = splitManager.split("impression_toggle_off"); + assertTrue(splitView.impressionsDisabled); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/YamlLocalhostSplitChangeFetcherTest.java b/client/src/test/java/io/split/client/YamlLocalhostSplitChangeFetcherTest.java new file mode 100644 index 000000000..f37367ae4 --- /dev/null +++ b/client/src/test/java/io/split/client/YamlLocalhostSplitChangeFetcherTest.java @@ -0,0 +1,86 @@ +package io.split.client; + +import io.split.client.dtos.Split; +import io.split.client.dtos.SplitChange; +import io.split.client.utils.FileInputStreamProvider; +import io.split.client.utils.InputStreamProvider; +import io.split.client.utils.LocalhostUtils; +import io.split.engine.common.FetchOptions; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; +import org.yaml.snakeyaml.Yaml; + +import java.io.File; +import java.io.IOException; +import java.io.StringWriter; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +public class YamlLocalhostSplitChangeFetcherTest { + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Test + public void testParseSplitChange() throws IOException { + File file = folder.newFile(SplitClientConfig.LOCALHOST_DEFAULT_FILE); + + List> allSplits = new ArrayList(); + + Map split1_user_a = new LinkedHashMap<>(); + Map split1_user_a_data = new LinkedHashMap<>(); + split1_user_a_data.put("keys", "user_a"); + split1_user_a_data.put("treatment", "off"); + split1_user_a_data.put("config", "{ \"size\" : 20 }"); + split1_user_a.put("split_1", split1_user_a_data); + allSplits.add(split1_user_a); + + Map split1_user_b = new LinkedHashMap<>(); + Map split1_user_b_data = new LinkedHashMap<>(); + split1_user_b_data.put("keys", "user_b"); + split1_user_b_data.put("treatment", "on"); + split1_user_b.put("split_1", split1_user_b_data); + allSplits.add(split1_user_b); + + Map split2_user_a = new LinkedHashMap<>(); + Map split2_user_a_data = new LinkedHashMap<>(); + split2_user_a_data.put("keys", "user_a"); + split2_user_a_data.put("treatment", "off"); + split2_user_a_data.put("config", "{ \"size\" : 20 }"); + split2_user_a.put("split_2", split2_user_a_data); + allSplits.add(split2_user_a); + + + Yaml yaml = new Yaml(); + StringWriter writer = new StringWriter(); + yaml.dump(allSplits, writer); + LocalhostUtils.writeFile(file, writer); + + InputStreamProvider inputStreamProvider = new FileInputStreamProvider(file.getAbsolutePath()); + YamlLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new YamlLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + + Assert.assertEquals(2, splitChange.featureFlags.d.size()); + Assert.assertEquals(-1, splitChange.featureFlags.s); + Assert.assertEquals(-1, splitChange.featureFlags.t); + + + for (Split split: splitChange.featureFlags.d) { + Assert.assertEquals("control", split.defaultTreatment); + } + } + + @Test(expected = IllegalStateException.class) + public void processTestForException() { + InputStreamProvider inputStreamProvider = new FileInputStreamProvider("src/test/resources/notExist.yaml"); + YamlLocalhostSplitChangeFetcher localhostSplitChangeFetcher = new YamlLocalhostSplitChangeFetcher(inputStreamProvider); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SplitChange splitChange = localhostSplitChangeFetcher.fetch(-1L, -1, fetchOptions); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/dtos/FallbackTreatmentCalculationImpTest.java b/client/src/test/java/io/split/client/dtos/FallbackTreatmentCalculationImpTest.java new file mode 100644 index 000000000..854a1a0ec --- /dev/null +++ b/client/src/test/java/io/split/client/dtos/FallbackTreatmentCalculationImpTest.java @@ -0,0 +1,40 @@ +package io.split.client.dtos; + +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.hamcrest.core.IsNull.notNullValue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class FallbackTreatmentCalculationImpTest { + + @Test + public void TestWorks() { + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on")); + FallbackTreatmentCalculator fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + assertEquals("on", fallbackTreatmentCalculator.resolve("anyflag", "exception").getTreatment()); + assertEquals("fallback - exception", fallbackTreatmentCalculator.resolve("anyflag", "exception").getLabel()); + + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on"), + new HashMap() {{ put("flag", "off"); }} ); + fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + assertEquals("on", fallbackTreatmentCalculator.resolve("anyflag", "exception").getTreatment()); + assertEquals("fallback - exception", fallbackTreatmentCalculator.resolve("anyflag", "exception").getLabel()); + assertEquals("off", fallbackTreatmentCalculator.resolve("flag", "exception").getTreatment()); + assertEquals("fallback - exception", fallbackTreatmentCalculator.resolve("flag", "exception").getLabel()); + + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration( + new HashMap() {{ put("flag", new FallbackTreatment("off")); }} ); + fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + assertEquals("control", fallbackTreatmentCalculator.resolve("anyflag", "exception").getTreatment()); + assertEquals("exception", fallbackTreatmentCalculator.resolve("anyflag", "exception").getLabel()); + assertEquals("off", fallbackTreatmentCalculator.resolve("flag", "exception").getTreatment()); + assertEquals("fallback - exception", fallbackTreatmentCalculator.resolve("flag", "exception").getLabel()); + } +} diff --git a/client/src/test/java/io/split/client/dtos/FallbackTreatmentConfigurationTest.java b/client/src/test/java/io/split/client/dtos/FallbackTreatmentConfigurationTest.java new file mode 100644 index 000000000..a09cf5993 --- /dev/null +++ b/client/src/test/java/io/split/client/dtos/FallbackTreatmentConfigurationTest.java @@ -0,0 +1,32 @@ +package io.split.client.dtos; + +import org.junit.Test; + +import java.util.HashMap; + +import static org.junit.Assert.assertEquals; + +public class FallbackTreatmentConfigurationTest { + + @Test + public void TestWorks() { + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on")); + assertEquals("on", fallbackTreatmentsConfiguration.getGlobalFallbackTreatment().getTreatment()); + assertEquals(null, fallbackTreatmentsConfiguration.getByFlagFallbackTreatment()); + + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on", "{\"prop\":\"val\"}"), + new HashMap() {{ put("flag", new FallbackTreatment("off", "{\"prop2\":\"val2\"}")); }} ); + assertEquals("on", fallbackTreatmentsConfiguration.getGlobalFallbackTreatment().getTreatment()); + assertEquals("{\"prop\":\"val\"}", fallbackTreatmentsConfiguration.getGlobalFallbackTreatment().getConfig()); + assertEquals(null, fallbackTreatmentsConfiguration.getGlobalFallbackTreatment().getLabel()); + assertEquals("off", fallbackTreatmentsConfiguration.getByFlagFallbackTreatment().get("flag").getTreatment()); + assertEquals("{\"prop2\":\"val2\"}", fallbackTreatmentsConfiguration.getByFlagFallbackTreatment().get("flag").getConfig()); + assertEquals(null, fallbackTreatmentsConfiguration.getByFlagFallbackTreatment().get("flag").getLabel()); + + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration("on", + new HashMap() {{ put("flag", "off"); }} ); + assertEquals("on", fallbackTreatmentsConfiguration.getGlobalFallbackTreatment().getTreatment()); + assertEquals("off", fallbackTreatmentsConfiguration.getByFlagFallbackTreatment().get("flag").getTreatment()); + + } +} diff --git a/client/src/test/java/io/split/client/dtos/ImpressionCountTest.java b/client/src/test/java/io/split/client/dtos/ImpressionCountTest.java index ee154c29b..140a4b30e 100644 --- a/client/src/test/java/io/split/client/dtos/ImpressionCountTest.java +++ b/client/src/test/java/io/split/client/dtos/ImpressionCountTest.java @@ -9,7 +9,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; diff --git a/client/src/test/java/io/split/client/dtos/KeyImpressionTest.java b/client/src/test/java/io/split/client/dtos/KeyImpressionTest.java index c4ac5d12d..11ff40b33 100644 --- a/client/src/test/java/io/split/client/dtos/KeyImpressionTest.java +++ b/client/src/test/java/io/split/client/dtos/KeyImpressionTest.java @@ -25,6 +25,7 @@ public void TestShrinkedPropertyNames() { imp.changeNumber = 123L; imp.time = 456L; imp.previousTime = 789L; + imp.properties = "{\"name\": \"value\"}"; String serialized = gson.toJson(imp); Map deSerialized = gson.fromJson(serialized, new TypeToken>() { }.getType()); @@ -65,5 +66,11 @@ public void TestShrinkedPropertyNames() { assertThat(previousTime, is(notNullValue())); assertThat(previousTime, instanceOf(Double.class)); assertThat(previousTime, is(789.0)); + + Object properties = deSerialized.get(KeyImpression.FIELD_PROPERTIES); + assertThat(properties, is(notNullValue())); + assertThat(properties, instanceOf(String.class)); + assertThat(properties, is("{\"name\": \"value\"}")); + } } diff --git a/client/src/test/java/io/split/client/dtos/UniqueKeysTest.java b/client/src/test/java/io/split/client/dtos/UniqueKeysTest.java new file mode 100644 index 000000000..cd078efa7 --- /dev/null +++ b/client/src/test/java/io/split/client/dtos/UniqueKeysTest.java @@ -0,0 +1,42 @@ +package io.split.client.dtos; + +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.hamcrest.core.IsInstanceOf.instanceOf; + +public class UniqueKeysTest { + + @Test + public void TestShrinkedPropertyNames() { + Gson gson = new Gson(); + List keys = new ArrayList<>(); + keys.add("key-1"); + keys.add("key-2"); + List uniqueKeys = new ArrayList<>(); + uniqueKeys.add(new UniqueKeys.UniqueKey("feature-1", keys)); + UniqueKeys imp = new UniqueKeys(uniqueKeys); + String serialized = gson.toJson(imp); + + HashMap parsedRaw = gson.fromJson(serialized, new TypeToken>(){}.getType()); + assertThat(parsedRaw.get("keys"), instanceOf(List.class)); + List asList = (ArrayList) parsedRaw.get("keys"); + assertThat(asList.size(), is(equalTo(1))); + + Map item0 = (Map) asList.get(0); + assertThat(item0.get("f"), is(equalTo("feature-1"))); + + List ks = (List) item0.get("ks"); + assertThat(ks.get(0), is(equalTo("key-1"))); + assertThat(ks.get(1), is(equalTo("key-2"))); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/events/EventsSenderTest.java b/client/src/test/java/io/split/client/events/EventsSenderTest.java new file mode 100644 index 000000000..c9ddb754b --- /dev/null +++ b/client/src/test/java/io/split/client/events/EventsSenderTest.java @@ -0,0 +1,79 @@ +package io.split.client.events; + +import io.split.TestHelper; +import io.split.client.RequestDecorator; +import io.split.client.utils.SDKMetadata; +import io.split.service.SplitHttpClient; +import io.split.service.SplitHttpClientImpl; +import io.split.telemetry.storage.TelemetryRuntimeProducer; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.core5.http.HttpStatus; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.*; + +public class EventsSenderTest { + + private static final TelemetryRuntimeProducer TELEMETRY_RUNTIME_CONSUMER = Mockito + .mock(TelemetryRuntimeProducer.class); + private static final CloseableHttpClient CLOSEABLE_HTTP_CLIENT = Mockito.mock(CloseableHttpClient.class); + + @Test + public void testDefaultURL() throws URISyntaxException { + SplitHttpClient SPLIT_HTTP_CLIENT = SplitHttpClientImpl.create(CLOSEABLE_HTTP_CLIENT, + new RequestDecorator(null), "qwerty", metadata()); + URI rootTarget = URI.create("https://api.split.io"); + EventsSender fetcher = EventsSender.create(SPLIT_HTTP_CLIENT, rootTarget, TELEMETRY_RUNTIME_CONSUMER); + Assert.assertEquals("https://api.split.io/api/events/bulk", fetcher.getBulkEndpoint().toString()); + } + + @Test + public void testCustomURLNoPathNoBackslash() throws URISyntaxException { + SplitHttpClient SPLIT_HTTP_CLIENT = SplitHttpClientImpl.create(CLOSEABLE_HTTP_CLIENT, + new RequestDecorator(null), "qwerty", metadata()); + URI rootTarget = URI.create("https://kubernetesturl.com"); + EventsSender fetcher = EventsSender.create(SPLIT_HTTP_CLIENT, rootTarget, TELEMETRY_RUNTIME_CONSUMER); + Assert.assertEquals("https://kubernetesturl.com/api/events/bulk", fetcher.getBulkEndpoint().toString()); + } + + @Test + public void testCustomURLAppendingPath() throws URISyntaxException { + SplitHttpClient SPLIT_HTTP_CLIENT = SplitHttpClientImpl.create(CLOSEABLE_HTTP_CLIENT, + new RequestDecorator(null), "qwerty", metadata()); + URI rootTarget = URI.create("https://kubernetesturl.com/split/"); + EventsSender fetcher = EventsSender.create(SPLIT_HTTP_CLIENT, rootTarget, TELEMETRY_RUNTIME_CONSUMER); + Assert.assertEquals("https://kubernetesturl.com/split/api/events/bulk", fetcher.getBulkEndpoint().toString()); + } + + @Test + public void testCustomURLAppendingPathNoBackslash() throws URISyntaxException { + SplitHttpClient SPLIT_HTTP_CLIENT = SplitHttpClientImpl.create(CLOSEABLE_HTTP_CLIENT, + new RequestDecorator(null), "qwerty", metadata()); + URI rootTarget = URI.create("https://kubernetesturl.com/split"); + EventsSender fetcher = EventsSender.create(SPLIT_HTTP_CLIENT, rootTarget, TELEMETRY_RUNTIME_CONSUMER); + Assert.assertEquals("https://kubernetesturl.com/split/api/events/bulk", fetcher.getBulkEndpoint().toString()); + } + + @Test + public void testHttpError() throws URISyntaxException, IOException, InvocationTargetException, + IllegalAccessException, NoSuchMethodException { + URI rootTarget = URI.create("https://kubernetesturl.com/split"); + CloseableHttpClient httpClient = TestHelper.mockHttpClient("", HttpStatus.SC_BAD_REQUEST); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + EventsSender sender = EventsSender.create(splitHtpClient, rootTarget, TELEMETRY_RUNTIME_CONSUMER); + // should not raise exception + sender.sendEvents(new ArrayList<>()); + } + + private SDKMetadata metadata() { + return new SDKMetadata("java-1.2.3", "1.2.3.4", "someIP"); + } + +} diff --git a/client/src/test/java/io/split/client/events/EventsTaskTest.java b/client/src/test/java/io/split/client/events/EventsTaskTest.java index a6529d343..93d5d0d50 100644 --- a/client/src/test/java/io/split/client/events/EventsTaskTest.java +++ b/client/src/test/java/io/split/client/events/EventsTaskTest.java @@ -1,83 +1,102 @@ package io.split.client.events; import io.split.client.dtos.Event; -import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryRuntimeProducer; -import io.split.telemetry.storage.TelemetryStorage; -import org.apache.hc.client5.http.classic.methods.HttpUriRequest; -import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; -import org.apache.hc.client5.http.impl.classic.HttpClients; -import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; - public class EventsTaskTest { - private static final TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); + private static final EventsSender EVENTS_SENDER = Mockito.mock(EventsSender.class); @Test - public void testDefaultURL() throws URISyntaxException { - URI rootTarget = URI.create("https://api.split.io"); - CloseableHttpClient httpClient = HttpClients.custom().build(); - EventsStorage eventsStorage = Mockito.mock(EventsStorage.class); - EventsTask fetcher = EventsTask.create(httpClient, rootTarget, 5, 5, 5, TELEMETRY_STORAGE, eventsStorage, eventsStorage); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://api.split.io/api/events/bulk"))); - } + public void testEventsAreSending() throws InterruptedException { + TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(TelemetryRuntimeProducer.class); + EventsStorage eventsStorage = new InMemoryEventsStorage(10000, telemetryRuntimeProducer); + EventsSender eventsSender = Mockito.mock(EventsSender.class); + EventsTask eventClient = new EventsTask(eventsStorage, + 2000, + eventsSender, + null); + eventClient.start(); - @Test - public void testCustomURLNoPathNoBackslash() throws URISyntaxException { - URI rootTarget = URI.create("https://kubernetesturl.com"); - CloseableHttpClient httpClient = HttpClients.custom().build(); - EventsStorage eventsStorage = Mockito.mock(EventsStorage.class); - EventsTask fetcher = EventsTask.create(httpClient, rootTarget, 5, 5, 5, TELEMETRY_STORAGE, eventsStorage, eventsStorage); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/api/events/bulk"))); + for (int i = 0; i < 159; ++i) { + Event event = new Event(); + eventsStorage.track(event, 1024 * 32); + } + + Thread.sleep(1000); + + Event event = new Event(); + eventsStorage.track(event, 1024 * 32); + Thread.sleep(2000); + Mockito.verify(eventsSender, Mockito.times(1)).sendEvents(Mockito.anyObject()); } @Test - public void testCustomURLAppendingPath() throws URISyntaxException { - URI rootTarget = URI.create("https://kubernetesturl.com/split/"); - CloseableHttpClient httpClient = HttpClients.custom().build(); - EventsStorage eventsStorage = Mockito.mock(EventsStorage.class); - EventsTask fetcher = EventsTask.create(httpClient, rootTarget, 5, 5, 5, TELEMETRY_STORAGE, eventsStorage, eventsStorage); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/events/bulk"))); + public void testEventsWhenCloseTask() throws InterruptedException { + TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(TelemetryRuntimeProducer.class); + EventsSender eventsSender = Mockito.mock(EventsSender.class); + EventsStorage eventsStorage = new InMemoryEventsStorage(10000, telemetryRuntimeProducer); + EventsTask eventClient = new EventsTask(eventsStorage, + 2000, + eventsSender, + null); + + for (int i = 0; i < 159; ++i) { + Event event = new Event(); + eventsStorage.track(event, 1024 * 32); + } + + eventClient.close(); + Thread.sleep(2000); + Mockito.verify(eventsSender, Mockito.times(1)).sendEvents(Mockito.anyObject()); } @Test - public void testCustomURLAppendingPathNoBackslash() throws URISyntaxException { - URI rootTarget = URI.create("https://kubernetesturl.com/split"); - CloseableHttpClient httpClient = HttpClients.custom().build(); - EventsStorage eventsStorage = Mockito.mock(EventsStorage.class); - EventsTask fetcher = EventsTask.create(httpClient, rootTarget, 5, 5, 5, TELEMETRY_STORAGE, eventsStorage, eventsStorage); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/events/bulk"))); + public void testCheckQueFull() { + TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(TelemetryRuntimeProducer.class); + EventsStorage eventsStorage = new InMemoryEventsStorage(10, telemetryRuntimeProducer); + EventsTask eventClient = new EventsTask(eventsStorage, + 2000, + EVENTS_SENDER, + null); + + for (int i = 0; i < 10; ++i) { + Event event = new Event(); + eventsStorage.track(event, 1024 * 32); + } + Assert.assertTrue(eventsStorage.isFull()); } @Test - public void testEventsFlushedWhenSizeLimitReached() throws URISyntaxException, InterruptedException, IOException { + public void testTimesSendingEvents() throws InterruptedException { TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(TelemetryRuntimeProducer.class); - CloseableHttpClient client = Mockito.mock(CloseableHttpClient.class); - EventsStorage eventsStorage = new InMemoryEventsStorage(10000, telemetryRuntimeProducer); - EventsTask eventClient = new EventsTask(eventsStorage, eventsStorage, - client, - URI.create("https://kubernetesturl.com/split"), - 10000, // Long queue so it doesn't flush by # of events - 100000, // Long period so it doesn't flush by timeout expiration. - 0, TELEMETRY_STORAGE); + EventsSender eventsSender = Mockito.mock(EventsSender.class); + EventsStorage eventsStorage = new InMemoryEventsStorage(100, telemetryRuntimeProducer); + EventsTask eventClient = new EventsTask(eventsStorage, + 2000, + eventsSender, + null); + eventClient.start(); - for (int i = 0; i < 159; ++i) { + for (int i = 0; i < 10; ++i) { Event event = new Event(); - eventsStorage.track(event, 1024 * 32); // 159 32kb events should be about to flush + eventsStorage.track(event, 1024 * 32); } - Thread.sleep(2000); - Mockito.verifyZeroInteractions(client); + Thread.sleep(3000); + Mockito.verify(eventsSender, Mockito.times(1)).sendEvents(Mockito.anyObject()); - Event event = new Event(); - eventsStorage.track(event, 1024 * 32); // 159 32kb events should be about to flush - Thread.sleep(2000); - Mockito.verify(client, Mockito.times(1)).execute((HttpUriRequest) Mockito.any()); + for (int i = 0; i < 10; ++i) { + Event event = new Event(); + eventsStorage.track(event, 1024 * 32); + } + + Thread.sleep(3000); + Mockito.verify(eventsSender, Mockito.times(2)).sendEvents(Mockito.anyObject()); + eventClient.close(); + Thread.sleep(1000); + Mockito.verify(eventsSender, Mockito.times(2)).sendEvents(Mockito.anyObject()); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/impressions/BloomFilterImpTest.java b/client/src/test/java/io/split/client/impressions/BloomFilterImpTest.java new file mode 100644 index 000000000..de0c8a3d0 --- /dev/null +++ b/client/src/test/java/io/split/client/impressions/BloomFilterImpTest.java @@ -0,0 +1,42 @@ +package io.split.client.impressions; + +import io.split.client.impressions.filters.BloomFilterImp; +import org.junit.Assert; +import org.junit.Test; + +public class BloomFilterImpTest { + + @Test + public void addSomeElements(){ + BloomFilterImp bloomFilterImp = new BloomFilterImp(5,0.01); + Assert.assertEquals(true, bloomFilterImp.add("feature key-1")); + Assert.assertEquals(true, bloomFilterImp.add("feature key-2")); + Assert.assertEquals(true, bloomFilterImp.add("feature key-3")); + } + + @Test + public void checkContainSomeElements(){ + BloomFilterImp bloomFilterImp = new BloomFilterImp(5,0.01); + Assert.assertTrue(bloomFilterImp.add("feature key-1")); + Assert.assertTrue(bloomFilterImp.add("feature key-2")); + Assert.assertTrue(bloomFilterImp.add("feature key-3")); + + Assert.assertEquals(true, bloomFilterImp.contains("feature key-1")); + Assert.assertEquals(true, bloomFilterImp.contains("feature key-2")); + Assert.assertEquals(true, bloomFilterImp.contains("feature key-3")); + } + + @Test + public void removedElements(){ + BloomFilterImp bloomFilterImp = new BloomFilterImp(5,0.01); + bloomFilterImp.add("feature key-1"); + bloomFilterImp.add("feature key-2"); + bloomFilterImp.add("feature key-3"); + + bloomFilterImp.clear(); + + Assert.assertEquals(false, bloomFilterImp.contains("feature key-1")); + Assert.assertEquals(false, bloomFilterImp.contains("feature key-2")); + Assert.assertEquals(false, bloomFilterImp.contains("feature key-3")); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/impressions/FilterAdapterImplTest.java b/client/src/test/java/io/split/client/impressions/FilterAdapterImplTest.java new file mode 100644 index 000000000..0c0875d64 --- /dev/null +++ b/client/src/test/java/io/split/client/impressions/FilterAdapterImplTest.java @@ -0,0 +1,48 @@ +package io.split.client.impressions; + +import io.split.client.impressions.filters.BloomFilterImp; +import io.split.client.impressions.filters.FilterAdapterImpl; +import org.junit.Assert; +import org.junit.Test; + +public class FilterAdapterImplTest { + + @Test + public void addSomeElements(){ + BloomFilterImp bloomFilterImp = new BloomFilterImp(5,0.01); + FilterAdapterImpl filterAdapter = new FilterAdapterImpl(bloomFilterImp); + + Assert.assertEquals(true, filterAdapter.add("feature", "key-1")); + Assert.assertEquals(true, filterAdapter.add("feature", "key-2")); + Assert.assertEquals(true, filterAdapter.add("feature", "key-3")); + } + + @Test + public void checkContainSomeElements(){ + BloomFilterImp bloomFilterImp = new BloomFilterImp(5,0.01); + FilterAdapterImpl filterAdapter = new FilterAdapterImpl(bloomFilterImp); + + Assert.assertTrue(filterAdapter.add("feature","key-1")); + Assert.assertTrue(filterAdapter.add("feature","key-2")); + Assert.assertTrue(filterAdapter.add("feature","key-3")); + + Assert.assertEquals(true, filterAdapter.contains("feature","key-1")); + Assert.assertEquals(true, filterAdapter.contains("feature","key-2")); + Assert.assertEquals(true, filterAdapter.contains("feature","key-3")); + } + + @Test + public void removedElements(){ + BloomFilterImp bloomFilterImp = new BloomFilterImp(5,0.01); + FilterAdapterImpl filterAdapter = new FilterAdapterImpl(bloomFilterImp); + filterAdapter.add("feature","key-1"); + filterAdapter.add("feature","key-2"); + filterAdapter.add("feature"," key-3"); + + filterAdapter.clear(); + + Assert.assertEquals(false, bloomFilterImp.contains("feature key-1")); + Assert.assertEquals(false, bloomFilterImp.contains("feature key-2")); + Assert.assertEquals(false, bloomFilterImp.contains("feature key-3")); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/impressions/HttpImpressionsSenderTest.java b/client/src/test/java/io/split/client/impressions/HttpImpressionsSenderTest.java index ee586ffe4..00471171e 100644 --- a/client/src/test/java/io/split/client/impressions/HttpImpressionsSenderTest.java +++ b/client/src/test/java/io/split/client/impressions/HttpImpressionsSenderTest.java @@ -3,9 +3,13 @@ import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import io.split.TestHelper; +import io.split.client.RequestDecorator; import io.split.client.dtos.ImpressionCount; import io.split.client.dtos.KeyImpression; import io.split.client.dtos.TestImpressions; +import io.split.client.utils.SDKMetadata; +import io.split.service.SplitHttpClient; +import io.split.service.SplitHttpClientImpl; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryStorage; import org.apache.hc.client5.http.classic.methods.HttpPost; @@ -24,6 +28,7 @@ import java.lang.reflect.InvocationTargetException; import java.net.URI; import java.net.URISyntaxException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -42,43 +47,63 @@ public class HttpImpressionsSenderTest { public void testDefaultURL() throws URISyntaxException { URI rootTarget = URI.create("https://api.split.io"); CloseableHttpClient httpClient = HttpClients.custom().build(); - HttpImpressionsSender fetcher = HttpImpressionsSender.create(httpClient, rootTarget, ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://api.split.io/api/testImpressions/bulk"))); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + HttpImpressionsSender fetcher = HttpImpressionsSender.create(splitHtpClient, rootTarget, + ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); + Assert.assertThat(fetcher.getTarget().toString(), + Matchers.is(Matchers.equalTo("https://api.split.io/api/testImpressions/bulk"))); } @Test public void testCustomURLNoPathNoBackslash() throws URISyntaxException { URI rootTarget = URI.create("https://kubernetesturl.com"); CloseableHttpClient httpClient = HttpClients.custom().build(); - HttpImpressionsSender fetcher = HttpImpressionsSender.create(httpClient, rootTarget, ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/api/testImpressions/bulk"))); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + HttpImpressionsSender fetcher = HttpImpressionsSender.create(splitHtpClient, rootTarget, + ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); + Assert.assertThat(fetcher.getTarget().toString(), + Matchers.is(Matchers.equalTo("https://kubernetesturl.com/api/testImpressions/bulk"))); } @Test public void testCustomURLAppendingPath() throws URISyntaxException { URI rootTarget = URI.create("https://kubernetesturl.com/split/"); CloseableHttpClient httpClient = HttpClients.custom().build(); - HttpImpressionsSender fetcher = HttpImpressionsSender.create(httpClient, rootTarget, ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/testImpressions/bulk"))); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + HttpImpressionsSender fetcher = HttpImpressionsSender.create(splitHtpClient, rootTarget, + ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); + Assert.assertThat(fetcher.getTarget().toString(), + Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/testImpressions/bulk"))); } @Test public void testCustomURLAppendingPathNoBackslash() throws URISyntaxException { URI rootTarget = URI.create("https://kubernetesturl.com/split"); CloseableHttpClient httpClient = HttpClients.custom().build(); - HttpImpressionsSender fetcher = HttpImpressionsSender.create(httpClient, rootTarget, ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); - Assert.assertThat(fetcher.getTarget().toString(), Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/testImpressions/bulk"))); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + HttpImpressionsSender fetcher = HttpImpressionsSender.create(splitHtpClient, rootTarget, + ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); + Assert.assertThat(fetcher.getTarget().toString(), + Matchers.is(Matchers.equalTo("https://kubernetesturl.com/split/api/testImpressions/bulk"))); } @Test - public void testImpressionCountsEndpointOptimized() throws URISyntaxException, IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { + public void testImpressionCountsEndpointOptimized() throws URISyntaxException, IOException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException { URI rootTarget = URI.create("https://kubernetesturl.com/split"); // Setup response mock CloseableHttpClient httpClient = TestHelper.mockHttpClient("", HttpStatus.SC_OK); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); // Send counters - HttpImpressionsSender sender = HttpImpressionsSender.create(httpClient, rootTarget, ImpressionsManager.Mode.OPTIMIZED, TELEMETRY_STORAGE); + HttpImpressionsSender sender = HttpImpressionsSender.create(splitHtpClient, rootTarget, + ImpressionsManager.Mode.OPTIMIZED, TELEMETRY_STORAGE); HashMap toSend = new HashMap<>(); toSend.put(new ImpressionCounter.Key("test1", 0), 4); toSend.put(new ImpressionCounter.Key("test2", 0), 5); @@ -88,8 +113,8 @@ public void testImpressionCountsEndpointOptimized() throws URISyntaxException, I ArgumentCaptor captor = ArgumentCaptor.forClass(HttpUriRequest.class); verify(httpClient).execute(captor.capture()); HttpUriRequest request = captor.getValue(); - assertThat(request.getUri(), is(equalTo(URI.create("https://kubernetesturl.com/split/api/testImpressions/count")))); - assertThat(request.getHeaders().length, is(0)); + assertThat(request.getUri(), + is(equalTo(URI.create("https://kubernetesturl.com/split/api/testImpressions/count")))); assertThat(request, instanceOf(HttpPost.class)); HttpPost asPostRequest = (HttpPost) request; InputStreamReader reader = new InputStreamReader(asPostRequest.getEntity().getContent()); @@ -101,67 +126,74 @@ public void testImpressionCountsEndpointOptimized() throws URISyntaxException, I } @Test - public void testImpressionCountsEndpointDebug() throws URISyntaxException, IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { + public void testImpressionBulksEndpoint() throws URISyntaxException, IOException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException { URI rootTarget = URI.create("https://kubernetesturl.com/split"); // Setup response mock CloseableHttpClient httpClient = TestHelper.mockHttpClient("", HttpStatus.SC_OK); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); - // Send counters - HttpImpressionsSender sender = HttpImpressionsSender.create(httpClient, rootTarget, ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); - HashMap toSend = new HashMap<>(); - toSend.put(new ImpressionCounter.Key("test1", 0), 4); - toSend.put(new ImpressionCounter.Key("test2", 0), 5); - sender.postCounters(toSend); - - // Assert that the HTTP client was not called - verify(httpClient, Mockito.never()).execute(Mockito.any()); - } - - @Test - public void testImpressionBulksEndpoint() throws URISyntaxException, IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { - URI rootTarget = URI.create("https://kubernetesturl.com/split"); - - // Setup response mock - CloseableHttpClient httpClient = TestHelper.mockHttpClient("", HttpStatus.SC_OK); - - HttpImpressionsSender sender = HttpImpressionsSender.create(httpClient, rootTarget, ImpressionsManager.Mode.OPTIMIZED, TELEMETRY_STORAGE); + HttpImpressionsSender sender = HttpImpressionsSender.create(splitHtpClient, rootTarget, + ImpressionsManager.Mode.OPTIMIZED, TELEMETRY_STORAGE); // Send impressions List toSend = Arrays.asList(new TestImpressions("t1", Arrays.asList( - KeyImpression.fromImpression(new Impression("k1", null, "t1", "on", 123L, "r1", 456L, null)), - KeyImpression.fromImpression(new Impression("k2", null, "t1", "on", 123L, "r1", 456L, null)), - KeyImpression.fromImpression(new Impression("k3", null, "t1", "on", 123L, "r1", 456L, null)) - )), new TestImpressions("t2", Arrays.asList( - KeyImpression.fromImpression(new Impression("k1", null, "t2", "on", 123L, "r1", 456L, null)), - KeyImpression.fromImpression(new Impression("k2", null, "t2", "on", 123L, "r1", 456L, null)), - KeyImpression.fromImpression(new Impression("k3", null, "t2", "on", 123L, "r1", 456L, null)) - ))); + KeyImpression.fromImpression(new Impression("k1", null, "t1", "on", 123L, "r1", 456L, null, null)), + KeyImpression.fromImpression(new Impression("k2", null, "t1", "on", 123L, "r1", 456L, null, null)), + KeyImpression.fromImpression(new Impression("k3", null, "t1", "on", 123L, "r1", 456L, null, null)))), + new TestImpressions("t2", Arrays.asList( + KeyImpression.fromImpression(new Impression("k1", null, "t2", "on", 123L, "r1", 456L, null, null)), + KeyImpression.fromImpression(new Impression("k2", null, "t2", "on", 123L, "r1", 456L, null, null)), + KeyImpression.fromImpression(new Impression("k3", null, "t2", "on", 123L, "r1", 456L, null, null))))); sender.postImpressionsBulk(toSend); // Capture outgoing request and validate it ArgumentCaptor captor = ArgumentCaptor.forClass(HttpUriRequest.class); verify(httpClient).execute(captor.capture()); HttpUriRequest request = captor.getValue(); - assertThat(request.getUri(), is(equalTo(URI.create("https://kubernetesturl.com/split/api/testImpressions/bulk")))); - assertThat(request.getHeaders().length, is(1)); + assertThat(request.getUri(), + is(equalTo(URI.create("https://kubernetesturl.com/split/api/testImpressions/bulk")))); assertThat(request.getFirstHeader("SplitSDKImpressionsMode").getValue(), is(equalTo("OPTIMIZED"))); assertThat(request, instanceOf(HttpPost.class)); HttpPost asPostRequest = (HttpPost) request; InputStreamReader reader = new InputStreamReader(asPostRequest.getEntity().getContent()); Gson gson = new Gson(); - List payload = gson.fromJson(reader, new TypeToken>() { }.getType()); + List payload = gson.fromJson(reader, new TypeToken>() { + }.getType()); assertThat(payload.size(), is(equalTo(2))); // Do the same flow for imrpessionsMode = debug CloseableHttpClient httpClientDebugMode = TestHelper.mockHttpClient("", HttpStatus.SC_OK); + SplitHttpClient splitHtpClient2 = SplitHttpClientImpl.create(httpClientDebugMode, new RequestDecorator(null), + "qwerty", metadata()); - sender = HttpImpressionsSender.create(httpClientDebugMode, rootTarget, ImpressionsManager.Mode.DEBUG, TELEMETRY_STORAGE); + sender = HttpImpressionsSender.create(splitHtpClient2, rootTarget, ImpressionsManager.Mode.DEBUG, + TELEMETRY_STORAGE); sender.postImpressionsBulk(toSend); captor = ArgumentCaptor.forClass(HttpUriRequest.class); verify(httpClientDebugMode).execute(captor.capture()); request = captor.getValue(); - assertThat(request.getHeaders().length, is(1)); assertThat(request.getFirstHeader("SplitSDKImpressionsMode").getValue(), is(equalTo("DEBUG"))); } + + @Test + public void testHttpError() throws URISyntaxException, IOException, IllegalAccessException, NoSuchMethodException, + InvocationTargetException { + URI rootTarget = URI.create("https://kubernetesturl.com/split"); + CloseableHttpClient httpClient = TestHelper.mockHttpClient("", HttpStatus.SC_BAD_REQUEST); + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + HttpImpressionsSender sender = HttpImpressionsSender.create(splitHtpClient, rootTarget, + ImpressionsManager.Mode.OPTIMIZED, TELEMETRY_STORAGE); + // Should not raise exception + sender.postImpressionsBulk(new ArrayList<>()); + sender.postCounters(new HashMap<>()); + } + + private SDKMetadata metadata() { + return new SDKMetadata("java-1.2.3", "1.2.3.4", "someIP"); + } + } diff --git a/client/src/test/java/io/split/client/impressions/ImpressionHasherTest.java b/client/src/test/java/io/split/client/impressions/ImpressionHasherTest.java index 86214d777..031c1aa8c 100644 --- a/client/src/test/java/io/split/client/impressions/ImpressionHasherTest.java +++ b/client/src/test/java/io/split/client/impressions/ImpressionHasherTest.java @@ -18,7 +18,7 @@ public void works() { System.currentTimeMillis(), "someLabel", 123L, - null); + null, null); // Different feature Impression imp2 = new Impression("someKey", @@ -28,7 +28,7 @@ public void works() { System.currentTimeMillis(), "someLabel", 123L, - null); + null, null); assertThat(ImpressionHasher.process(imp1), not(equalTo(ImpressionHasher.process(imp2)))); @@ -40,7 +40,7 @@ public void works() { System.currentTimeMillis(), "someLabel", 123L, - null); + null, null); assertThat(ImpressionHasher.process(imp1), not(equalTo(ImpressionHasher.process(imp2)))); // different changeNumber @@ -51,7 +51,7 @@ public void works() { System.currentTimeMillis(), "someLabel", 456L, - null); + null, null); assertThat(ImpressionHasher.process(imp1), not(equalTo(ImpressionHasher.process(imp2)))); // different label @@ -62,7 +62,7 @@ public void works() { System.currentTimeMillis(), "someOtherLabel", 123L, - null); + null, null); assertThat(ImpressionHasher.process(imp1), not(equalTo(ImpressionHasher.process(imp2)))); // different treatment @@ -73,7 +73,7 @@ public void works() { System.currentTimeMillis(), "someLabel", 123L, - null); + null, null); assertThat(ImpressionHasher.process(imp1), not(equalTo(ImpressionHasher.process(imp2)))); } @@ -87,7 +87,7 @@ public void doesNotCrash() { System.currentTimeMillis(), "someLabel", 123L, - null); + null, null); assertNotNull(ImpressionHasher.process(imp1)); imp1 = new Impression(null, @@ -97,7 +97,7 @@ public void doesNotCrash() { System.currentTimeMillis(), "someLabel", 123L, - null); + null, null); assertNotNull(ImpressionHasher.process(imp1)); imp1 = new Impression(null, @@ -107,7 +107,7 @@ public void doesNotCrash() { System.currentTimeMillis(), "someLabel", null, - null); + null, null); assertNotNull(ImpressionHasher.process(imp1)); imp1 = new Impression(null, @@ -117,7 +117,7 @@ public void doesNotCrash() { System.currentTimeMillis(), null, null, - null); + null, null); assertNotNull(ImpressionHasher.process(imp1)); imp1 = new Impression(null, @@ -127,7 +127,7 @@ public void doesNotCrash() { System.currentTimeMillis(), "someLabel", null, - null); + null, null); assertNotNull(ImpressionHasher.process(imp1)); assertNull(ImpressionHasher.process(null)); } diff --git a/client/src/test/java/io/split/client/impressions/ImpressionObserverTest.java b/client/src/test/java/io/split/client/impressions/ImpressionObserverTest.java index 2fe3611e7..57fc258aa 100644 --- a/client/src/test/java/io/split/client/impressions/ImpressionObserverTest.java +++ b/client/src/test/java/io/split/client/impressions/ImpressionObserverTest.java @@ -40,6 +40,7 @@ private List generateImpressions(long count) { System.currentTimeMillis(), (i % 2 == 0) ? "in segment all" : "whitelisted", i * i, + null, null); imps.add(imp); } @@ -54,6 +55,7 @@ public void testBasicFunctionality() { "on", System.currentTimeMillis(), "in segment all", 1234L, + null, null); // Add 5 new impressions so that the old one is evicted and re-try the test. @@ -75,10 +77,7 @@ public void testMemoryUsageStopsWhenCacheIsFull() throws Exception { getObjectSize = objectSizeCalculatorClass.getMethod("getObjectSize", Object.class); //getObjectSize(observer); } catch (ClassNotFoundException | NoSuchMethodException e) { _log.error("This test only runs with the hotspot JVM. It's ignored locally, but mandatory on CI"); - if (!Strings.isNullOrEmpty(System.getenv("CI"))) { // If the CI environment variable is present - throw new Exception("Setup CI to run with a hotspot JVM"); - } - // Otherwise just ignore this test. + // TODO: Fix this test for JDK > 8 return; } @@ -111,6 +110,7 @@ private void caller(ImpressionObserver o, int count, ConcurrentLinkedQueue> impressionsCaptor; + @Captor + private ArgumentCaptor> impressionKeyList; + + @Captor + private ArgumentCaptor uniqueKeysCaptor; + @Captor private ArgumentCaptor> impressionCountCaptor; @@ -58,18 +72,24 @@ public void works() throws URISyntaxException { ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); - ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(null, config, senderMock, null, TELEMETRY_STORAGE, storage, storage); + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); - KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null); - KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L); - KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 2L); - KeyImpression ki4 = keyImpression("test2", "pato", "on", 4L, 3L); + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 2L, null); + KeyImpression ki4 = keyImpression("test2", "pato", "on", 4L, 3L, null); - treatmentLog.track(Stream.of(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, ki1.changeNumber, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, ki2.changeNumber, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, ki3.changeNumber, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, ki4.changeNumber, null)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, ki1.changeNumber, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, ki2.changeNumber, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, ki3.changeNumber, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, ki4.changeNumber, null, null), false)).collect(Collectors.toList())); // Do what the scheduler would do. treatmentLog.sendImpressions(); @@ -78,11 +98,140 @@ public void works() throws URISyntaxException { List captured = impressionsCaptor.getValue(); - assertThat(captured.size(), is(equalTo(2))); + Assert.assertEquals(2, captured.size()); + } + + @Test + public void testImpressionListenerOptimize() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.OPTIMIZED) + .build(); + ImpressionsStorage storage = Mockito.mock(InMemoryImpressionsStorage.class); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = new ImpressionCounter(); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + TelemetryStorageProducer telemetryStorageProducer = new InMemoryTelemetryStorage(); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionOptimized(true, impressionObserver, impressionCounter, telemetryStorageProducer); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionListener impressionListener = Mockito.mock(AsynchronousImpressionListener.class); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, impressionListener); + treatmentLog.start(); + + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 2L, null); + KeyImpression ki4 = keyImpression("test2", "pato", "on", 4L, 3L, null); + + List impressionList = new ArrayList<>(); + impressionList.add(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, ki1.changeNumber, null, null), false)); + impressionList.add(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, ki2.changeNumber, null, null), false)); + impressionList.add(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, ki3.changeNumber, null, null), false)); + impressionList.add(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, ki4.changeNumber, null, null), false)); + + treatmentLog.track(impressionList); + verify(impressionListener, times(4)).log(Mockito.anyObject()); + + verify(storage).put(impressionKeyList.capture()); + + List captured = impressionKeyList.getValue(); + + Assert.assertEquals(3, captured.size()); + } + + @Test + public void testImpressionListenerDebug() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(6) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .build(); + ImpressionsStorage storage = Mockito.mock(InMemoryImpressionsStorage.class); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(true, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionListener impressionListener = Mockito.mock(AsynchronousImpressionListener.class); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, impressionListener); + treatmentLog.start(); + + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 2L, null); + KeyImpression ki4 = keyImpression("test2", "pato", "on", 4L, 3L, null); + + List impressionList = new ArrayList<>(); + impressionList.add(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, ki1.changeNumber, null, null), false)); + impressionList.add(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, ki2.changeNumber, null, null), false)); + impressionList.add(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, ki3.changeNumber, null, null), false)); + impressionList.add(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, ki4.changeNumber, null, null), false)); + + treatmentLog.track(impressionList); + verify(impressionListener, times(4)).log(Mockito.anyObject()); + + verify(storage).put(impressionKeyList.capture()); + + List captured = impressionKeyList.getValue(); + + Assert.assertEquals(4, captured.size()); + } + + @Test + public void testImpressionListenerNone() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.NONE) + .build(); + ImpressionsStorage storage = Mockito.mock(InMemoryImpressionsStorage.class); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + ImpressionCounter impressionCounter = new ImpressionCounter(); + UniqueKeysTracker uniqueKeysTracker = new UniqueKeysTrackerImp(telemetrySynchronizer, 1000, 1000, null); + uniqueKeysTracker.start(); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionNone(true, uniqueKeysTracker, impressionCounter); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionListener impressionListener = Mockito.mock(AsynchronousImpressionListener.class); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, impressionListener); + treatmentLog.start(); + + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 2L, null); + KeyImpression ki4 = keyImpression("test2", "pato", "on", 4L, 3L, null); + + List impressionList = new ArrayList<>(); + impressionList.add(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, ki1.changeNumber, null, null), false)); + impressionList.add(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, ki2.changeNumber, null, null), false)); + impressionList.add(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, ki3.changeNumber, null, null), false)); + impressionList.add(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, ki4.changeNumber, null, null), false)); + + treatmentLog.track(impressionList); + verify(impressionListener, times(4)).log(Mockito.anyObject()); + + verify(storage).put(impressionKeyList.capture()); + + List captured = impressionKeyList.getValue(); + + Assert.assertEquals(0, captured.size()); } @Test - public void worksButDropsImpressions() throws URISyntaxException { + public void worksButDropsImpressions() { SplitClientConfig config = SplitClientConfig.builder() .impressionsQueueSize(3) @@ -92,19 +241,25 @@ public void worksButDropsImpressions() throws URISyntaxException { ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); - ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(null, config, senderMock, null, TELEMETRY_STORAGE, storage, storage); + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); // These 4 unique test name will cause 4 entries but we are caping at the first 3. - KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null); - KeyImpression ki2 = keyImpression("test2", "adil", "on", 2L, null); - KeyImpression ki3 = keyImpression("test3", "pato", "on", 3L, null); - KeyImpression ki4 = keyImpression("test4", "pato", "on", 4L, null); + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null, null); + KeyImpression ki2 = keyImpression("test2", "adil", "on", 2L, null, null); + KeyImpression ki3 = keyImpression("test3", "pato", "on", 3L, null, null); + KeyImpression ki4 = keyImpression("test4", "pato", "on", 4L, null, null); - treatmentLog.track(Stream.of(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, null, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, null, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, null, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, null, null)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, null, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, null, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, null, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, null, null, null), false)).collect(Collectors.toList())); // Do what the scheduler would do. treatmentLog.sendImpressions(); @@ -113,12 +268,12 @@ public void worksButDropsImpressions() throws URISyntaxException { List captured = impressionsCaptor.getValue(); - assertThat(captured.size(), is(equalTo(3))); - Mockito.verify(TELEMETRY_STORAGE, times(1)).recordImpressionStats(ImpressionsDataTypeEnum.IMPRESSIONS_DROPPED, 1); + Assert.assertEquals(3, captured.size()); + verify(TELEMETRY_STORAGE, times(1)).recordImpressionStats(ImpressionsDataTypeEnum.IMPRESSIONS_DROPPED, 1); } @Test - public void works4ImpressionsInOneTest() throws URISyntaxException { + public void works4ImpressionsInOneTest() { SplitClientConfig config = SplitClientConfig.builder() .impressionsQueueSize(10) @@ -128,19 +283,25 @@ public void works4ImpressionsInOneTest() throws URISyntaxException { ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); - ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(null, config, senderMock, null, TELEMETRY_STORAGE, storage, storage); + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); // These 4 unique test name will cause 4 entries but we are caping at the first 3. - KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L); - KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L); - KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L); - KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L); + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L, null); - treatmentLog.track(Stream.of(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); // Do what the scheduler would do. treatmentLog.sendImpressions(); @@ -149,14 +310,14 @@ public void works4ImpressionsInOneTest() throws URISyntaxException { List captured = impressionsCaptor.getValue(); - assertThat(captured.size(), is(equalTo(1))); - assertThat(captured.get(0).keyImpressions.size(), is(equalTo(4))); - assertThat(captured.get(0).keyImpressions.get(0), is(equalTo(ki1))); - Mockito.verify(TELEMETRY_STORAGE, times(4)).recordImpressionStats(ImpressionsDataTypeEnum.IMPRESSIONS_QUEUED, 1); + Assert.assertEquals(1, captured.size()); + Assert.assertEquals(4, captured.get(0).keyImpressions.size()); + Assert.assertEquals(ki1, captured.get(0).keyImpressions.get(0)); + verify(TELEMETRY_STORAGE, times(4)).recordImpressionStats(ImpressionsDataTypeEnum.IMPRESSIONS_QUEUED, 1); } @Test - public void worksNoImpressions() throws URISyntaxException { + public void worksNoImpressions() { SplitClientConfig config = SplitClientConfig.builder() .impressionsQueueSize(10) @@ -166,7 +327,13 @@ public void worksNoImpressions() throws URISyntaxException { ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); - ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(null, config, senderMock, null, TELEMETRY_STORAGE, storage, storage); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); // There are no impressions to post. @@ -177,8 +344,7 @@ public void worksNoImpressions() throws URISyntaxException { } @Test - @Ignore // TODO: This test needs to be updated - public void alreadySeenImpressionsAreMarked() throws URISyntaxException { + public void alreadySeenImpressionsAreMarked() { SplitClientConfig config = SplitClientConfig.builder() .impressionsQueueSize(10) .endpoint("nowhere.com", "nowhere.com") @@ -187,19 +353,24 @@ public void alreadySeenImpressionsAreMarked() throws URISyntaxException { ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); - ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(null, config, senderMock, null, TELEMETRY_STORAGE, storage, storage); + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); // These 4 unique test name will cause 4 entries but we are caping at the first 3. - KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L); - KeyImpression ki2 = keyImpression("test1", "adil2", "on", 2L, 1L); - KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L); - KeyImpression ki4 = keyImpression("test1", "pato2", "on", 4L, 1L); - - treatmentLog.track(Stream.of(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null)).collect(Collectors.toList())); + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil2", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato2", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); treatmentLog.sendImpressions(); verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); @@ -207,93 +378,360 @@ public void alreadySeenImpressionsAreMarked() throws URISyntaxException { List captured = impressionsCaptor.getValue(); for (TestImpressions testImpressions : captured) { for (KeyImpression keyImpression : testImpressions.keyImpressions) { - assertThat(keyImpression.previousTime, is(equalTo(null))); + Assert.assertEquals(null, keyImpression.previousTime); } } // Do it again. Now they should all have a `seenAt` value Mockito.reset(senderMock); - treatmentLog.track(Stream.of(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.sendImpressions(); + + verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); + + captured = impressionsCaptor.getAllValues().get(1); + for (TestImpressions testImpressions : captured) { + for (KeyImpression keyImpression : testImpressions.keyImpressions) { + assertEquals(Optional.of(keyImpression.previousTime), Optional.of(keyImpression.time)); + } + } + } + + @Test + public void testImpressionsStandaloneModeOptimizedMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.OPTIMIZED) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = new ImpressionCounter(); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + TelemetryStorageProducer telemetryStorageProducer = new InMemoryTelemetryStorage(); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionOptimized(false, impressionObserver, impressionCounter, telemetryStorageProducer); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); treatmentLog.sendImpressions(); verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); - captured = impressionsCaptor.getValue(); + List captured = impressionsCaptor.getValue(); + Assert.assertEquals(2, captured.get(0).keyImpressions.size()); for (TestImpressions testImpressions : captured) { for (KeyImpression keyImpression : testImpressions.keyImpressions) { - assertThat(keyImpression.previousTime, is(equalTo(keyImpression.time))); + Assert.assertEquals(null, keyImpression.previousTime); } } + // Only the first 2 impressions make it to the server + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 1L, 1L, null))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "pato", "on", 3L, 1L, null))); + + treatmentLog.sendImpressionCounters(); + verify(senderMock).postCounters(impressionCountCaptor.capture()); + HashMap capturedCounts = impressionCountCaptor.getValue(); + Assert.assertEquals(1, capturedCounts.size()); + Assert.assertTrue(capturedCounts.entrySet().contains(new AbstractMap.SimpleEntry<>(new ImpressionCounter.Key("test1", 0), 2))); + + // Assert that the sender is never called if the counters are empty. + Mockito.reset(senderMock); + treatmentLog.sendImpressionCounters(); + verify(senderMock, times(0)).postCounters(Mockito.any()); + } + + @Test + public void testImpressionsStandaloneModeDebugMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.sendImpressions(); + + verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); + + List captured = impressionsCaptor.getValue(); + Assert.assertEquals(4, captured.get(0).keyImpressions.size()); + for (TestImpressions testImpressions : captured) { + KeyImpression keyImpression1 = testImpressions.keyImpressions.get(0); + KeyImpression keyImpression2 = testImpressions.keyImpressions.get(1); + KeyImpression keyImpression3 = testImpressions.keyImpressions.get(2); + KeyImpression keyImpression4 = testImpressions.keyImpressions.get(3); + Assert.assertEquals(null, keyImpression1.previousTime); + Assert.assertEquals(Optional.of(1L), Optional.of(keyImpression2.previousTime)); + Assert.assertEquals(null, keyImpression3.previousTime); + Assert.assertEquals(Optional.of(3L), Optional.of(keyImpression4.previousTime)); + } + // Only the first 2 impressions make it to the server + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 1L, 1L, null))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "pato", "on", 3L, 1L, null))); } - private KeyImpression keyImpression(String feature, String key, String treatment, long time, Long changeNumber) { - KeyImpression result = new KeyImpression(); - result.feature = feature; - result.keyName = key; - result.treatment = treatment; - result.time = time; - result.changeNumber = changeNumber; - return result; + @Test + public void testImpressionsStandaloneModeNoneMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.NONE) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + ImpressionCounter impressionCounter = new ImpressionCounter(); + UniqueKeysTracker uniqueKeysTracker = new UniqueKeysTrackerImp(telemetrySynchronizer, 1000, 1000, null); + uniqueKeysTracker.start(); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionNone(false, uniqueKeysTracker, impressionCounter); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.close(); + uniqueKeysTracker.stop(); + + verify(telemetrySynchronizer).synchronizeUniqueKeys(uniqueKeysCaptor.capture()); + + List uniqueKeysList = uniqueKeysCaptor.getAllValues(); + UniqueKeys uniqueKeys = uniqueKeysList.get(0); + UniqueKeys.UniqueKey uniqueKey = uniqueKeys.uniqueKeys.get(0); + Assert.assertEquals("test1", uniqueKey.featureName); + + List keysDto = uniqueKey.keysDto; + Assert.assertEquals("pato", keysDto.get(0)); + Assert.assertEquals("adil", keysDto.get(1)); + + //treatmentLog.sendImpressionCounters(); + verify(senderMock).postCounters(impressionCountCaptor.capture()); + HashMap capturedCounts = impressionCountCaptor.getValue(); + Assert.assertEquals(1, capturedCounts.size()); + Assert.assertTrue(capturedCounts.entrySet().contains(new AbstractMap.SimpleEntry<>(new ImpressionCounter.Key("test1", 0), 4))); + + // Assert that the sender is never called if the counters are empty. + Mockito.reset(senderMock); + treatmentLog.sendImpressionCounters(); + verify(senderMock, times(0)).postCounters(Mockito.any()); } @Test - public void testImpressionsOptimizedMode() throws URISyntaxException { + public void testImpressionsConsumerModeOptimizedMode() { SplitClientConfig config = SplitClientConfig.builder() .impressionsQueueSize(10) .endpoint("nowhere.com", "nowhere.com") .impressionsMode(ImpressionsManager.Mode.OPTIMIZED) + .operationMode(OperationMode.CONSUMER) + .customStorageWrapper(Mockito.mock(CustomStorageWrapper.class)) .build(); ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = new ImpressionCounter(); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + TelemetryStorageProducer telemetryStorageProducer = new InMemoryTelemetryStorage(); - ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(null, config, senderMock, null, TELEMETRY_STORAGE, storage, storage); + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionOptimized(false, impressionObserver, impressionCounter, telemetryStorageProducer); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); // These 4 unique test name will cause 4 entries but we are caping at the first 3. - KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L); - KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L); - KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L); - KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L); - - treatmentLog.track(Stream.of(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null)).collect(Collectors.toList())); - treatmentLog.track(Stream.of(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null)).collect(Collectors.toList())); + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); treatmentLog.sendImpressions(); verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); List captured = impressionsCaptor.getValue(); - assertThat(captured.get(0).keyImpressions.size(), is(equalTo(2))); + Assert.assertEquals(2, captured.get(0).keyImpressions.size()); for (TestImpressions testImpressions : captured) { for (KeyImpression keyImpression : testImpressions.keyImpressions) { - assertThat(keyImpression.previousTime, is(equalTo(null))); + Assert.assertEquals(null, keyImpression.previousTime); } } // Only the first 2 impressions make it to the server - assertThat(captured.get(0).keyImpressions, - contains(keyImpression("test1", "adil", "on", 1L, 1L), - keyImpression("test1", "pato", "on", 3L, 1L))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 1L, 1L, null))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "pato", "on", 3L, 1L, null))); treatmentLog.sendImpressionCounters(); verify(senderMock).postCounters(impressionCountCaptor.capture()); HashMap capturedCounts = impressionCountCaptor.getValue(); - assertThat(capturedCounts.size(), is(equalTo(1))); - assertThat(capturedCounts.entrySet(), - contains(new AbstractMap.SimpleEntry<>(new ImpressionCounter.Key("test1", 0), 4))); + Assert.assertEquals(1, capturedCounts.size()); + Assert.assertTrue(capturedCounts.entrySet().contains(new AbstractMap.SimpleEntry<>(new ImpressionCounter.Key("test1", 0), 2))); + // Assert that the sender is never called if the counters are empty. + Mockito.reset(senderMock); + treatmentLog.sendImpressionCounters(); + verify(senderMock, times(0)).postCounters(Mockito.any()); + } + + @Test + public void testImpressionsConsumerModeNoneMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.NONE) + .operationMode(OperationMode.CONSUMER) + .customStorageWrapper(Mockito.mock(CustomStorageWrapper.class)) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + ImpressionCounter impressionCounter = new ImpressionCounter(); + UniqueKeysTracker uniqueKeysTracker = new UniqueKeysTrackerImp(telemetrySynchronizer, 1000, 1000, null); + uniqueKeysTracker.start(); + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionNone(false, uniqueKeysTracker, impressionCounter); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); + uniqueKeysTracker.stop(); + treatmentLog.close(); + + verify(telemetrySynchronizer).synchronizeUniqueKeys(uniqueKeysCaptor.capture()); + + List uniqueKeysList = uniqueKeysCaptor.getAllValues(); + UniqueKeys uniqueKeys = uniqueKeysList.get(0); + UniqueKeys.UniqueKey uniqueKey = uniqueKeys.uniqueKeys.get(0); + Assert.assertEquals("test1", uniqueKey.featureName); + + List keysDto = uniqueKey.keysDto; + Assert.assertEquals("pato", keysDto.get(0)); + Assert.assertEquals("adil", keysDto.get(1)); + + //treatmentLog.sendImpressionCounters(); + verify(senderMock).postCounters(impressionCountCaptor.capture()); + HashMap capturedCounts = impressionCountCaptor.getValue(); + Assert.assertEquals(1, capturedCounts.size()); + Assert.assertTrue(capturedCounts.entrySet().contains(new AbstractMap.SimpleEntry<>(new ImpressionCounter.Key("test1", 0), 4))); // Assert that the sender is never called if the counters are empty. Mockito.reset(senderMock); treatmentLog.sendImpressionCounters(); - verify(senderMock, Mockito.times(0)).postCounters(Mockito.any()); + verify(senderMock, times(0)).postCounters(Mockito.any()); + } + + @Test + public void testImpressionsConsumerModeDebugMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .operationMode(OperationMode.CONSUMER) + .customStorageWrapper(Mockito.mock(CustomStorageWrapper.class)) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.sendImpressions(); + + verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); + + List captured = impressionsCaptor.getValue(); + Assert.assertEquals(4, captured.get(0).keyImpressions.size()); + for (TestImpressions testImpressions : captured) { + KeyImpression keyImpression1 = testImpressions.keyImpressions.get(0); + KeyImpression keyImpression2 = testImpressions.keyImpressions.get(1); + KeyImpression keyImpression3 = testImpressions.keyImpressions.get(2); + KeyImpression keyImpression4 = testImpressions.keyImpressions.get(3); + Assert.assertEquals(null, keyImpression1.previousTime); + Assert.assertEquals(Optional.of(1L), Optional.of(keyImpression2.previousTime)); + Assert.assertEquals(null, keyImpression3.previousTime); + Assert.assertEquals(Optional.of(3L), Optional.of(keyImpression4.previousTime)); + } + // Only the first 2 impressions make it to the server + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 1L, 1L, null))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "pato", "on", 3L, 1L, null))); } @Test - public void testCounterStandaloneMode() throws URISyntaxException { + public void testCounterStandaloneModeOptimizedMode() { SplitClientConfig config = SplitClientConfig.builder() .impressionsQueueSize(10) .endpoint("nowhere.com", "nowhere.com") @@ -302,13 +740,55 @@ public void testCounterStandaloneMode() throws URISyntaxException { ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + TelemetryStorageProducer telemetryStorageProducer = new InMemoryTelemetryStorage(); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionOptimized(false, impressionObserver, impressionCounter, telemetryStorageProducer); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + ImpressionsManagerImpl manager = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + manager.start(); + Assert.assertNotNull(manager.getCounter()); + } + @Test + public void testCounterStandaloneModeDebugMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); - ImpressionsManagerImpl manager = ImpressionsManagerImpl.instanceForTest(null, config, senderMock, null, TELEMETRY_STORAGE, storage, storage); + ImpressionsManagerImpl manager = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, null, null); + manager.start(); + Assert.assertNull(manager.getCounter()); + } + + @Test + public void testCounterStandaloneModeNoneMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.NONE) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ProcessImpressionStrategy processImpressionStrategy = Mockito.mock(ProcessImpressionNone.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + + ImpressionsManagerImpl manager = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, Mockito.mock(ProcessImpressionNone.class), processImpressionStrategy, impressionCounter, null); + manager.start(); Assert.assertNotNull(manager.getCounter()); } @Test - public void testCounterConsumerMode() throws URISyntaxException { + public void testCounterConsumerModeOptimizedMode() { SplitClientConfig config = SplitClientConfig.builder() .impressionsQueueSize(10) .endpoint("nowhere.com", "nowhere.com") @@ -319,9 +799,339 @@ public void testCounterConsumerMode() throws URISyntaxException { ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ProcessImpressionStrategy processImpressionStrategy = Mockito.mock(ProcessImpressionOptimized.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + + ImpressionsManagerImpl manager = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, Mockito.mock(ProcessImpressionNone.class), processImpressionStrategy, impressionCounter, null); + manager.start(); + Assert.assertNotNull(manager.getCounter()); + } + + @Test + public void testCounterConsumerModeDebugMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .operationMode(OperationMode.CONSUMER) + .customStorageWrapper(Mockito.mock(CustomStorageWrapper.class)) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ProcessImpressionStrategy processImpressionStrategy = Mockito.mock(ProcessImpressionDebug.class); - ImpressionsManagerImpl manager = ImpressionsManagerImpl.instanceForTest(null, config, senderMock, null, TELEMETRY_STORAGE, storage, storage); + ImpressionsManagerImpl manager = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, Mockito.mock(ProcessImpressionNone.class), processImpressionStrategy, null, null); + manager.start(); Assert.assertNull(manager.getCounter()); } -} + @Test + public void testCounterConsumerModeNoneMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.NONE) + .operationMode(OperationMode.CONSUMER) + .customStorageWrapper(Mockito.mock(CustomStorageWrapper.class)) + .build(); + + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ProcessImpressionStrategy processImpressionStrategy = Mockito.mock(ProcessImpressionNone.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + + ImpressionsManagerImpl manager = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, Mockito.mock(ProcessImpressionNone.class), processImpressionStrategy, impressionCounter, null); + manager.start(); + Assert.assertNotNull(manager.getCounter()); + } + + @Test + public void testImpressionToggleStandaloneOptimizedMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.OPTIMIZED) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = new ImpressionCounter(); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + TelemetryStorageProducer telemetryStorageProducer = new InMemoryTelemetryStorage(); + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + UniqueKeysTracker uniqueKeysTracker = new UniqueKeysTrackerImp(telemetrySynchronizer, 1000, 1000, null); + uniqueKeysTracker.start(); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionOptimized(false, impressionObserver, impressionCounter, telemetryStorageProducer); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, uniqueKeysTracker, impressionCounter); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "mati", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "bilal", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), true)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), true)).collect(Collectors.toList())); + treatmentLog.sendImpressions(); + + verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); + + List captured = impressionsCaptor.getValue(); + Assert.assertEquals(2, captured.get(0).keyImpressions.size()); + for (TestImpressions testImpressions : captured) { + for (KeyImpression keyImpression : testImpressions.keyImpressions) { + Assert.assertEquals(null, keyImpression.previousTime); + } + } + // Only the first 2 impressions make it to the server + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 1L, 1L, null))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "pato", "on", 3L, 1L, null))); + + HashMap> trackedKeys = ((UniqueKeysTrackerImp) uniqueKeysTracker).popAll(); + HashSet keys = new HashSet<>(); + keys.add("mati"); + keys.add("bilal"); + Assert.assertEquals(1, trackedKeys.size()); + Assert.assertEquals(keys, trackedKeys.get("test1")); + + treatmentLog.sendImpressionCounters(); + verify(senderMock).postCounters(impressionCountCaptor.capture()); + HashMap capturedCounts = impressionCountCaptor.getValue(); + Assert.assertEquals(1, capturedCounts.size()); + Assert.assertTrue(capturedCounts.entrySet().contains(new AbstractMap.SimpleEntry<>(new ImpressionCounter.Key("test1", 0), 2))); + + // Assert that the sender is never called if the counters are empty. + Mockito.reset(senderMock); + treatmentLog.sendImpressionCounters(); + verify(senderMock, times(0)).postCounters(Mockito.any()); + } + + @Test + public void testImpressionToggleStandaloneModeDebugMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + UniqueKeysTracker uniqueKeysTracker = new UniqueKeysTrackerImp(telemetrySynchronizer, 1000, 1000, null); + uniqueKeysTracker.start(); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, uniqueKeysTracker, impressionCounter); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "mati", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "bilal", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), true)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), true)).collect(Collectors.toList())); + treatmentLog.sendImpressions(); + + HashMap> trackedKeys = ((UniqueKeysTrackerImp) uniqueKeysTracker).popAll(); + HashSet keys = new HashSet<>(); + keys.add("mati"); + keys.add("bilal"); + Assert.assertEquals(1, trackedKeys.size()); + Assert.assertEquals(keys, trackedKeys.get("test1")); + + verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); + + List captured = impressionsCaptor.getValue(); + Assert.assertEquals(2, captured.get(0).keyImpressions.size()); + for (TestImpressions testImpressions : captured) { + KeyImpression keyImpression1 = testImpressions.keyImpressions.get(0); + KeyImpression keyImpression3 = testImpressions.keyImpressions.get(1); + Assert.assertEquals(null, keyImpression1.previousTime); + Assert.assertEquals(null, keyImpression3.previousTime); + } + // Only the first 2 impressions make it to the server + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 1L, 1L, null))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "pato", "on", 3L, 1L, null))); + } + + @Test + public void testImpressionToggleStandaloneModeNoneMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.NONE) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + ImpressionCounter impressionCounter = new ImpressionCounter(); + UniqueKeysTracker uniqueKeysTracker = new UniqueKeysTrackerImp(telemetrySynchronizer, 1000, 1000, null); + uniqueKeysTracker.start(); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionNone(false, uniqueKeysTracker, impressionCounter); + ProcessImpressionNone processImpressionNone = (ProcessImpressionNone) processImpressionStrategy; + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, null); + KeyImpression ki2 = keyImpression("test1", "mati", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "bilal", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), true)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), true)).collect(Collectors.toList())); + treatmentLog.close(); + HashMap> trackedKeys = ((UniqueKeysTrackerImp) uniqueKeysTracker).popAll(); + uniqueKeysTracker.stop(); + + HashSet keys = new HashSet<>(); + keys.add("adil"); + keys.add("mati"); + keys.add("pato"); + keys.add("bilal"); + Assert.assertEquals(1, trackedKeys.size()); + Assert.assertEquals(keys, trackedKeys.get("test1")); + + //treatmentLog.sendImpressionCounters(); + verify(senderMock).postCounters(impressionCountCaptor.capture()); + HashMap capturedCounts = impressionCountCaptor.getValue(); + Assert.assertEquals(1, capturedCounts.size()); + Assert.assertTrue(capturedCounts.entrySet().contains(new AbstractMap.SimpleEntry<>(new ImpressionCounter.Key("test1", 0), 4))); + + // Assert that the sender is never called if the counters are empty. + Mockito.reset(senderMock); + treatmentLog.sendImpressionCounters(); + verify(senderMock, times(0)).postCounters(Mockito.any()); + } + + @Test + public void testImpressionsPropertiesOptimizedMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.OPTIMIZED) + .operationMode(OperationMode.CONSUMER) + .customStorageWrapper(Mockito.mock(CustomStorageWrapper.class)) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = new ImpressionCounter(); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + TelemetryStorageProducer telemetryStorageProducer = new InMemoryTelemetryStorage(); + + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionOptimized(false, impressionObserver, impressionCounter, telemetryStorageProducer); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, "{\"prop\":\"val\"}"); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, ki1.properties), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.sendImpressions(); + + verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); + + List captured = impressionsCaptor.getValue(); + Assert.assertEquals(3, captured.get(0).keyImpressions.size()); + for (TestImpressions testImpressions : captured) { + for (KeyImpression keyImpression : testImpressions.keyImpressions) { + Assert.assertEquals(null, keyImpression.previousTime); + } + } + // impression with properties is not deduped + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 1L, 1L, "{\"prop\":\"val\"}"))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 2L, 1L, null))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "pato", "on", 3L, 1L, null))); + + treatmentLog.sendImpressionCounters(); + verify(senderMock).postCounters(impressionCountCaptor.capture()); + HashMap capturedCounts = impressionCountCaptor.getValue(); + Assert.assertEquals(1, capturedCounts.size()); + Assert.assertTrue(capturedCounts.entrySet().contains(new AbstractMap.SimpleEntry<>(new ImpressionCounter.Key("test1", 0), 1))); + + // Assert that the sender is never called if the counters are empty. + Mockito.reset(senderMock); + treatmentLog.sendImpressionCounters(); + verify(senderMock, times(0)).postCounters(Mockito.any()); + } + + @Test + public void testImpressionsPropertiesDebugMode() { + SplitClientConfig config = SplitClientConfig.builder() + .impressionsQueueSize(10) + .endpoint("nowhere.com", "nowhere.com") + .impressionsMode(ImpressionsManager.Mode.DEBUG) + .operationMode(OperationMode.CONSUMER) + .customStorageWrapper(Mockito.mock(CustomStorageWrapper.class)) + .build(); + ImpressionsStorage storage = new InMemoryImpressionsStorage(config.impressionsQueueSize()); + + ImpressionsSender senderMock = Mockito.mock(ImpressionsSender.class); + ImpressionCounter impressionCounter = Mockito.mock(ImpressionCounter.class); + ImpressionObserver impressionObserver = new ImpressionObserver(200); + ProcessImpressionStrategy processImpressionStrategy = new ProcessImpressionDebug(false, impressionObserver); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(false, null, null); + + ImpressionsManagerImpl treatmentLog = ImpressionsManagerImpl.instanceForTest(config, senderMock, TELEMETRY_STORAGE, storage, storage, processImpressionNone, processImpressionStrategy, impressionCounter, null); + treatmentLog.start(); + + // These 4 unique test name will cause 4 entries but we are caping at the first 3. + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, 1L, "{\"prop\":\"val\"}"); + KeyImpression ki2 = keyImpression("test1", "adil", "on", 2L, 1L, null); + KeyImpression ki3 = keyImpression("test1", "pato", "on", 3L, 1L, null); + KeyImpression ki4 = keyImpression("test1", "pato", "on", 4L, 1L, null); + + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, "{\"prop\":\"val\"}"), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.track(Stream.of(new DecoratedImpression(new Impression(ki4.keyName, null, ki4.feature, ki4.treatment, ki4.time, null, 1L, null, null), false)).collect(Collectors.toList())); + treatmentLog.sendImpressions(); + + verify(senderMock).postImpressionsBulk(impressionsCaptor.capture()); + + List captured = impressionsCaptor.getValue(); + Assert.assertEquals(4, captured.get(0).keyImpressions.size()); + for (TestImpressions testImpressions : captured) { + KeyImpression keyImpression1 = testImpressions.keyImpressions.get(0); + KeyImpression keyImpression2 = testImpressions.keyImpressions.get(1); + KeyImpression keyImpression3 = testImpressions.keyImpressions.get(2); + KeyImpression keyImpression4 = testImpressions.keyImpressions.get(3); + Assert.assertEquals(null, keyImpression1.previousTime); + Assert.assertEquals(null, keyImpression2.previousTime); + Assert.assertEquals(null, keyImpression3.previousTime); + Assert.assertEquals(Optional.of(3L), Optional.of(keyImpression4.previousTime)); + } + // impression with properties is not deduped + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 1L, 1L, "{\"prop\":\"val\"}"))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "adil", "on", 1L, 1L, null))); + Assert.assertTrue(captured.get(0).keyImpressions.contains(keyImpression("test1", "pato", "on", 3L, 1L, null))); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/impressions/PluggableImpressionSenderTest.java b/client/src/test/java/io/split/client/impressions/PluggableImpressionSenderTest.java new file mode 100644 index 000000000..a7be40cbb --- /dev/null +++ b/client/src/test/java/io/split/client/impressions/PluggableImpressionSenderTest.java @@ -0,0 +1,44 @@ +package io.split.client.impressions; + +import io.split.storages.pluggable.CustomStorageWrapperHasPipeline; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; +import pluggable.CustomStorageWrapper; +import pluggable.HasPipelineSupport; + +import java.util.HashMap; +import java.util.Optional; +import java.util.concurrent.ConcurrentMap; + +public class PluggableImpressionSenderTest { + + @Test + public void testPostCounters() throws Exception { + CustomStorageWrapper customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); + PluggableImpressionSender redisImpressionSender = PluggableImpressionSender.create(customStorageWrapper); + + HashMap counters = new HashMap<>(); + ImpressionCounter.Key counterKey1 = new ImpressionCounter.Key("feature1", 100); + counters.put(counterKey1, 2); + redisImpressionSender.postCounters(counters); + Mockito.verify(customStorageWrapper, Mockito.times(1)).hIncrement(Mockito.eq("SPLITIO.impressions.count"), Mockito.eq("feature1::100"), Mockito.eq(2L)); + } + + @Test + public void testPostCountersHasPipeline() throws Exception { + CustomStorageWrapperHasPipeline customStorageWrapper = new CustomStorageWrapperHasPipeline(); + PluggableImpressionSender redisImpressionSender = PluggableImpressionSender.create(customStorageWrapper); + + HashMap counters = new HashMap<>(); + ImpressionCounter.Key counterKey1 = new ImpressionCounter.Key("feature1", 100); + counters.put(counterKey1, 2); + redisImpressionSender.postCounters(counters); + + Assert.assertTrue(customStorageWrapper instanceof HasPipelineSupport); + ConcurrentMap impressionsCount = customStorageWrapper.getImpressionsCount(); + Assert.assertTrue(impressionsCount.containsKey("feature1::100")); + String key = "feature1::100"; + Assert.assertEquals(Optional.of(2L), Optional.of(impressionsCount.get(key))); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/impressions/UniqueKeysTrackerImpTest.java b/client/src/test/java/io/split/client/impressions/UniqueKeysTrackerImpTest.java new file mode 100644 index 000000000..e758369eb --- /dev/null +++ b/client/src/test/java/io/split/client/impressions/UniqueKeysTrackerImpTest.java @@ -0,0 +1,178 @@ +package io.split.client.impressions; + +import io.split.client.dtos.UniqueKeys; +import io.split.telemetry.synchronizer.TelemetryInMemorySubmitter; +import io.split.telemetry.synchronizer.TelemetrySynchronizer; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.ArrayList; +import java.util.concurrent.atomic.AtomicInteger; + +public class UniqueKeysTrackerImpTest { + private static TelemetrySynchronizer _telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + + @Test + public void addSomeElements(){ + UniqueKeysTrackerImp uniqueKeysTrackerImp = new UniqueKeysTrackerImp(_telemetrySynchronizer, 10000, 10000, null); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key1")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key2")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key3")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature2","key4")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature2","key5")); + + HashMap> result = uniqueKeysTrackerImp.popAll(); + Assert.assertEquals(2,result.size()); + + HashSet value1 = result.get("feature1"); + Assert.assertEquals(3,value1.size()); + Assert.assertTrue(value1.contains("key1")); + Assert.assertTrue(value1.contains("key2")); + Assert.assertTrue(value1.contains("key3")); + + HashSet value2 = result.get("feature2"); + Assert.assertEquals(2,value2.size()); + Assert.assertTrue(value2.contains("key4")); + Assert.assertTrue(value2.contains("key5")); + } + + @Test + public void addTheSameElements(){ + UniqueKeysTrackerImp uniqueKeysTrackerImp = new UniqueKeysTrackerImp(_telemetrySynchronizer, 10000, 10000, null); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key1")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key2")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key3")); + + Assert.assertFalse(uniqueKeysTrackerImp.track("feature1","key1")); + Assert.assertFalse(uniqueKeysTrackerImp.track("feature1","key2")); + Assert.assertFalse(uniqueKeysTrackerImp.track("feature1","key3")); + + HashMap> result = uniqueKeysTrackerImp.popAll(); + Assert.assertEquals(1,result.size()); + + HashSet value1 = result.get("feature1"); + Assert.assertEquals(3,value1.size()); + Assert.assertTrue(value1.contains("key1")); + Assert.assertTrue(value1.contains("key2")); + Assert.assertTrue(value1.contains("key3")); + } + + @Test + public void popAllUniqueKeys(){ + UniqueKeysTrackerImp uniqueKeysTrackerImp = new UniqueKeysTrackerImp(_telemetrySynchronizer, 10000, 10000, null); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key1")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key2")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature2","key3")); + + HashMap> result = uniqueKeysTrackerImp.popAll(); + Assert.assertEquals(2,result.size()); + HashMap> resultAfterPopAll = uniqueKeysTrackerImp.popAll(); + Assert.assertEquals(0,resultAfterPopAll.size()); + } + + @Test + public void testSynchronization() throws Exception { + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + UniqueKeysTrackerImp uniqueKeysTrackerImp = new UniqueKeysTrackerImp(telemetrySynchronizer, 1, 3, null); + uniqueKeysTrackerImp.start(); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key1")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key2")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature2","key3")); + + Thread.sleep(2900); + Mockito.verify(telemetrySynchronizer, Mockito.times(1)).synchronizeUniqueKeys(Mockito.anyObject()); + Thread.sleep(2900); + Mockito.verify(telemetrySynchronizer, Mockito.times(1)).synchronizeUniqueKeys(Mockito.anyObject()); + } + + @Test + public void testStopSynchronization() throws Exception { + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + UniqueKeysTrackerImp uniqueKeysTrackerImp = new UniqueKeysTrackerImp(telemetrySynchronizer, 1, 2, null); + uniqueKeysTrackerImp.start(); + Assert.assertFalse(uniqueKeysTrackerImp.getSendGuard().get()); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key1")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1","key2")); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature2","key3")); + + Thread.sleep(2100); + Mockito.verify(telemetrySynchronizer, Mockito.times(1)).synchronizeUniqueKeys(Mockito.anyObject()); + uniqueKeysTrackerImp.stop(); + Mockito.verify(telemetrySynchronizer, Mockito.times(1)).synchronizeUniqueKeys(Mockito.anyObject()); + } + + @Test + public void testUniqueKeysChunks() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { + UniqueKeysTrackerImp uniqueKeysTrackerImp = new UniqueKeysTrackerImp(_telemetrySynchronizer, 10000, 10000, null); + HashMap> uniqueKeysHashMap = new HashMap<>(); + HashSet feature1 = new HashSet<>(); + HashSet feature2 = new HashSet<>(); + HashSet feature3 = new HashSet<>(); + HashSet feature4 = new HashSet<>(); + HashSet feature5 = new HashSet<>(); + for (Integer i=1; i<6000; i++) { + if (i <= 1000) { + feature1.add("key" + i); + } + if (i <= 2000) { + feature2.add("key" + i); + } + if (i <= 3000) { + feature3.add("key" + i); + } + if (i <= 4000) { + feature4.add("key" + i); + } + feature5.add("key" + i); + } + uniqueKeysHashMap.put("feature1", feature1); + uniqueKeysHashMap.put("feature2", feature2); + uniqueKeysHashMap.put("feature3", feature3); + uniqueKeysHashMap.put("feature4", feature4); + uniqueKeysHashMap.put("feature5", feature5); + + List uniqueKeysFromPopAll = new ArrayList<>(); + for (Map.Entry> uniqueKeyEntry : uniqueKeysHashMap.entrySet()) { + UniqueKeys.UniqueKey uniqueKey = new UniqueKeys.UniqueKey(uniqueKeyEntry.getKey(), new ArrayList<>(uniqueKeyEntry.getValue())); + uniqueKeysFromPopAll.add(uniqueKey); + } + Method methodCapChunks = uniqueKeysTrackerImp.getClass().getDeclaredMethod("capChunksToMaxSize", List.class); + methodCapChunks.setAccessible(true); + uniqueKeysFromPopAll = (List)methodCapChunks.invoke(uniqueKeysTrackerImp, uniqueKeysFromPopAll); + + Method methodGetChunks = uniqueKeysTrackerImp.getClass().getDeclaredMethod("getChunks", List.class); + methodGetChunks.setAccessible(true); + List> keysChunks = (List>) methodGetChunks.invoke(uniqueKeysTrackerImp, uniqueKeysFromPopAll); + for (List chunk : keysChunks) { + int chunkSize = 0; + for (UniqueKeys.UniqueKey keys : chunk) { + chunkSize += keys.keysDto.size(); + } + Assert.assertTrue(chunkSize <= 5000); + } + } + + @Test + public void testTrackReachMaxKeys() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException, NoSuchFieldException { + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + UniqueKeysTrackerImp uniqueKeysTrackerImp = new UniqueKeysTrackerImp(telemetrySynchronizer, 10000, 10000, null); + for (int i=1; i<6000; i++) { + Assert.assertTrue(uniqueKeysTrackerImp.track("feature1", "key" + i)); + Assert.assertTrue(uniqueKeysTrackerImp.track("feature2", "key" + i)); + } + Mockito.verify(telemetrySynchronizer, Mockito.times(2)).synchronizeUniqueKeys(Mockito.anyObject()); + + Field getTrackerSize = uniqueKeysTrackerImp.getClass().getDeclaredField("trackerKeysSize"); + getTrackerSize.setAccessible(true); + AtomicInteger trackerSize = (AtomicInteger) getTrackerSize.get(uniqueKeysTrackerImp); + Assert.assertTrue(trackerSize.intValue() == 1998); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/impressions/strategy/ProcessImpressionDebugTest.java b/client/src/test/java/io/split/client/impressions/strategy/ProcessImpressionDebugTest.java new file mode 100644 index 000000000..68be97c58 --- /dev/null +++ b/client/src/test/java/io/split/client/impressions/strategy/ProcessImpressionDebugTest.java @@ -0,0 +1,70 @@ +package io.split.client.impressions.strategy; + +import static io.split.client.impressions.ImpressionTestUtils.keyImpression; + +import io.split.client.dtos.KeyImpression; +import io.split.client.impressions.Impression; +import io.split.client.impressions.ImpressionObserver; +import io.split.client.impressions.ImpressionsResult; +import io.split.telemetry.storage.InMemoryTelemetryStorage; +import io.split.telemetry.storage.TelemetryStorage; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.util.ArrayList; +import java.util.List; + +public class ProcessImpressionDebugTest { + + private static final long LAST_SEEN_CACHE_SIZE = 500000; + private static TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); + + @Test + public void processImpressionsWithListener(){ + boolean listenerEnable = true; + ImpressionObserver impressionObserver = new ImpressionObserver(LAST_SEEN_CACHE_SIZE); + ProcessImpressionDebug processImpressionDebug = new ProcessImpressionDebug(listenerEnable, impressionObserver); + + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null, null); + KeyImpression ki2 = keyImpression("test2", "adil", "on", 1L, null, null); + KeyImpression ki3 = keyImpression("test1", "adil", "on", 1L, null, null); + + List impressions = new ArrayList<>(); + impressions.add(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null)); + impressions.add(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null)); + impressions.add(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null)); + + ImpressionsResult impressionsResult1 = processImpressionDebug.process(impressions); + + long pt3 = impressionsResult1.getImpressionsToQueue().get(2).pt(); + Assert.assertEquals(1, pt3); + + Assert.assertEquals(3,impressionsResult1.getImpressionsToQueue().size()); + Assert.assertEquals(3,impressionsResult1.getImpressionsToListener().size()); + } + + @Test + public void processImpressionsWithoutListener(){ + boolean listenerEnable = false; + ImpressionObserver impressionObserver = new ImpressionObserver(LAST_SEEN_CACHE_SIZE); + ProcessImpressionDebug processImpressionDebug = new ProcessImpressionDebug(listenerEnable, impressionObserver); + + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null, null); + KeyImpression ki2 = keyImpression("test2", "adil", "on", 1L, null, null); + KeyImpression ki3 = keyImpression("test1", "adil", "on", 1L, null, null); + + List impressions = new ArrayList<>(); + impressions.add(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null)); + impressions.add(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null)); + impressions.add(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null)); + + ImpressionsResult impressionsResult1 = processImpressionDebug.process(impressions); + + long pt3 = impressionsResult1.getImpressionsToQueue().get(2).pt(); + Assert.assertEquals(1, pt3); + + Assert.assertEquals(3,impressionsResult1.getImpressionsToQueue().size()); + Assert.assertNull(impressionsResult1.getImpressionsToListener()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/impressions/strategy/ProcessImpressionNoneTest.java b/client/src/test/java/io/split/client/impressions/strategy/ProcessImpressionNoneTest.java new file mode 100644 index 000000000..1debedd1e --- /dev/null +++ b/client/src/test/java/io/split/client/impressions/strategy/ProcessImpressionNoneTest.java @@ -0,0 +1,73 @@ +package io.split.client.impressions.strategy; + +import io.split.client.dtos.KeyImpression; +import io.split.client.impressions.Impression; +import io.split.client.impressions.ImpressionsResult; +import io.split.client.impressions.UniqueKeysTrackerImp; +import io.split.client.impressions.ImpressionCounter; +import io.split.telemetry.storage.InMemoryTelemetryStorage; +import io.split.telemetry.storage.TelemetryStorage; +import io.split.telemetry.synchronizer.TelemetryInMemorySubmitter; +import io.split.telemetry.synchronizer.TelemetrySynchronizer; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.util.*; + +import static io.split.client.impressions.ImpressionTestUtils.keyImpression; + +public class ProcessImpressionNoneTest { + + private static TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); + + @Test + public void processImpressionsWithListener(){ + boolean listenerEnable = true; + ImpressionCounter counter = new ImpressionCounter(); + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + UniqueKeysTrackerImp uniqueKeysTracker = new UniqueKeysTrackerImp(telemetrySynchronizer, 10000, 10000, null); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(listenerEnable, uniqueKeysTracker, counter); + + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null, null); + KeyImpression ki2 = keyImpression("test2", "adil", "on", 1L, null, null); + KeyImpression ki3 = keyImpression("test1", "adil", "on", 1L, null, null); + + List impressions = new ArrayList<>(); + impressions.add(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null)); + impressions.add(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null)); + impressions.add(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null)); + + ImpressionsResult impressionsResult1 = processImpressionNone.process(impressions); + Assert.assertEquals(0,impressionsResult1.getImpressionsToQueue().size()); + Assert.assertEquals(3,impressionsResult1.getImpressionsToListener().size()); + Assert.assertEquals(2, uniqueKeysTracker.popAll().size()); + + HashMap counters = counter.popAll(); + Assert.assertEquals(2, counters.size()); + } + + @Test + public void processImpressionsWithoutListener(){ + boolean listenerEnable = false; + ImpressionCounter counter = new ImpressionCounter(); + TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); + UniqueKeysTrackerImp uniqueKeysTracker = new UniqueKeysTrackerImp(telemetrySynchronizer, 10000, 10000, null); + ProcessImpressionNone processImpressionNone = new ProcessImpressionNone(listenerEnable, uniqueKeysTracker, counter); + + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null, null); + KeyImpression ki2 = keyImpression("test2", "adil", "on", 1L, null, null); + KeyImpression ki3 = keyImpression("test1", "adil", "on", 1L, null, null); + + List impressions = new ArrayList<>(); + impressions.add(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null)); + impressions.add(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null)); + impressions.add(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null)); + + ImpressionsResult impressionsResult1 = processImpressionNone.process(impressions); + Assert.assertEquals(0,impressionsResult1.getImpressionsToQueue().size()); + Assert.assertNull(impressionsResult1.getImpressionsToListener()); + Assert.assertEquals(2, uniqueKeysTracker.popAll().size()); + Assert.assertEquals(2, counter.popAll().size()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/impressions/strategy/ProcessImpressionOptimizedTest.java b/client/src/test/java/io/split/client/impressions/strategy/ProcessImpressionOptimizedTest.java new file mode 100644 index 000000000..f7cecebe5 --- /dev/null +++ b/client/src/test/java/io/split/client/impressions/strategy/ProcessImpressionOptimizedTest.java @@ -0,0 +1,69 @@ +package io.split.client.impressions.strategy; + +import static io.split.client.impressions.ImpressionTestUtils.keyImpression; + +import io.split.client.dtos.KeyImpression; + +import io.split.client.impressions.Impression; +import io.split.client.impressions.ImpressionObserver; +import io.split.client.impressions.ImpressionsResult; +import io.split.client.impressions.ImpressionCounter; +import io.split.telemetry.storage.InMemoryTelemetryStorage; +import io.split.telemetry.storage.TelemetryStorage; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.util.ArrayList; +import java.util.List; + +public class ProcessImpressionOptimizedTest { + + private static final long LAST_SEEN_CACHE_SIZE = 500000; + private static TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); + + @Test + public void processImpressionsWithListener(){ + boolean listenerEnable = true; + ImpressionObserver impressionObserver = new ImpressionObserver(LAST_SEEN_CACHE_SIZE); + ImpressionCounter counter = new ImpressionCounter(); + ProcessImpressionOptimized processImpressionOptimized = new ProcessImpressionOptimized(listenerEnable, impressionObserver, counter, TELEMETRY_STORAGE); + + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null, null); + KeyImpression ki2 = keyImpression("test2", "adil", "on", 1L, null, null); + KeyImpression ki3 = keyImpression("test1", "adil", "on", 1L, null, null); + + List impressions = new ArrayList<>(); + impressions.add(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null)); + impressions.add(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null)); + impressions.add(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null)); + + ImpressionsResult impressionsResult1 = processImpressionOptimized.process(impressions); + + Assert.assertEquals(2,impressionsResult1.getImpressionsToQueue().size()); + Assert.assertEquals(3,impressionsResult1.getImpressionsToListener().size()); + Assert.assertEquals(1, counter.popAll().size()); + } + + @Test + public void processImpressionsWithoutListener(){ + boolean listenerEnable = false; + ImpressionObserver impressionObserver = new ImpressionObserver(LAST_SEEN_CACHE_SIZE); + ImpressionCounter counter = new ImpressionCounter(); + ProcessImpressionOptimized processImpressionOptimized = new ProcessImpressionOptimized(listenerEnable, impressionObserver, counter, TELEMETRY_STORAGE); + + KeyImpression ki1 = keyImpression("test1", "adil", "on", 1L, null, null); + KeyImpression ki2 = keyImpression("test2", "adil", "on", 1L, null, null); + KeyImpression ki3 = keyImpression("test1", "adil", "on", 1L, null, null); + + List impressions = new ArrayList<>(); + impressions.add(new Impression(ki1.keyName, null, ki1.feature, ki1.treatment, ki1.time, null, 1L, null, null)); + impressions.add(new Impression(ki2.keyName, null, ki2.feature, ki2.treatment, ki2.time, null, 1L, null, null)); + impressions.add(new Impression(ki3.keyName, null, ki3.feature, ki3.treatment, ki3.time, null, 1L, null, null)); + + ImpressionsResult impressionsResult1 = processImpressionOptimized.process(impressions); + Assert.assertEquals(2,impressionsResult1.getImpressionsToQueue().size()); + Assert.assertNull(impressionsResult1.getImpressionsToListener()); + Assert.assertEquals(1, counter.popAll().size()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/interceptors/FlagSetsFilterImplTest.java b/client/src/test/java/io/split/client/interceptors/FlagSetsFilterImplTest.java new file mode 100644 index 000000000..c0467e298 --- /dev/null +++ b/client/src/test/java/io/split/client/interceptors/FlagSetsFilterImplTest.java @@ -0,0 +1,26 @@ +package io.split.client.interceptors; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.HashSet; + +public class FlagSetsFilterImplTest { + + @Test + public void testIntersectSetsWithShouldFilter() { + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("a", "b"))); + Assert.assertTrue(flagSetsFilter.intersect("a")); + Assert.assertTrue(flagSetsFilter.intersect(new HashSet<>(Arrays.asList("a", "c")))); + Assert.assertFalse(flagSetsFilter.intersect("c")); + Assert.assertFalse(flagSetsFilter.intersect(new HashSet<>(Arrays.asList("d", "c")))); + } + + @Test + public void testIntersectSetsWithShouldNotFilter() { + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>()); + Assert.assertTrue(flagSetsFilter.intersect("a")); + Assert.assertTrue(flagSetsFilter.intersect(new HashSet<>(Arrays.asList("a", "c")))); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/utils/ApacheRequestDecoratorTest.java b/client/src/test/java/io/split/client/utils/ApacheRequestDecoratorTest.java new file mode 100644 index 000000000..5d5971bb8 --- /dev/null +++ b/client/src/test/java/io/split/client/utils/ApacheRequestDecoratorTest.java @@ -0,0 +1,114 @@ +package io.split.client.utils; + +import io.split.client.CustomHeaderDecorator; +import io.split.client.RequestDecorator; +import io.split.client.dtos.RequestContext; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.ProtocolException; +import org.junit.Assert; +import org.junit.Test; + +import java.util.List; +import java.util.Arrays; +import java.util.Map; + +public class ApacheRequestDecoratorTest { + + @Test + public void testNoOp() { + ApacheRequestDecorator apacheRequestDecorator = new ApacheRequestDecorator(); + RequestDecorator requestDecorator = new RequestDecorator(null); + HttpGet request = new HttpGet("http://anyhost"); + + request = (HttpGet) apacheRequestDecorator.decorate(request, requestDecorator); + Assert.assertEquals(0, request.getHeaders().length); + request.addHeader("myheader", "value"); + request = (HttpGet) apacheRequestDecorator.decorate(request, requestDecorator); + Assert.assertEquals(1, request.getHeaders().length); + } + + @Test + public void testAddCustomHeaders() throws ProtocolException { + class MyCustomHeaders implements CustomHeaderDecorator { + public MyCustomHeaders() {} + @Override + public Map> getHeaderOverrides(RequestContext context) { + Map> additionalHeaders = context.headers(); + additionalHeaders.put("first", Arrays.asList("1")); + additionalHeaders.put("second", Arrays.asList("2.1", "2.2")); + additionalHeaders.put("third", Arrays.asList("3")); + return additionalHeaders; + } + } + MyCustomHeaders myHeaders = new MyCustomHeaders(); + RequestDecorator decorator = new RequestDecorator(myHeaders); + ApacheRequestDecorator apacheRequestDecorator = new ApacheRequestDecorator(); + + HttpGet request = new HttpGet("http://anyhost"); + request.addHeader("first", "myfirstheader"); + request = (HttpGet) apacheRequestDecorator.decorate(request, decorator); + + Assert.assertEquals(4, request.getHeaders().length); + Assert.assertEquals("1", request.getHeader("first").getValue()); + + Header[] second = request.getHeaders("second"); + Assert.assertEquals("2.1", second[0].getValue()); + Assert.assertEquals("2.2", second[1].getValue()); + Assert.assertEquals("3", request.getHeader("third").getValue()); + + HttpPost request2 = new HttpPost("http://anyhost"); + request2.addHeader("myheader", "value"); + request2 = (HttpPost) apacheRequestDecorator.decorate(request2, decorator); + Assert.assertEquals(5, request2.getHeaders().length); + } + + @Test + public void testAddBlockedHeaders() throws ProtocolException { + class MyCustomHeaders implements CustomHeaderDecorator { + public MyCustomHeaders() {} + @Override + public Map> getHeaderOverrides(RequestContext context) { + Map> additionalHeaders = context.headers(); + additionalHeaders.put("first", Arrays.asList("1")); + additionalHeaders.put("SplitSDKVersion", Arrays.asList("2.4")); + additionalHeaders.put("SplitMachineip", Arrays.asList("xx")); + additionalHeaders.put("splitMachineName", Arrays.asList("xx")); + additionalHeaders.put("splitimpressionsmode", Arrays.asList("xx")); + additionalHeaders.put("HOST", Arrays.asList("xx")); + additionalHeaders.put("referrer", Arrays.asList("xx")); + additionalHeaders.put("content-type", Arrays.asList("xx")); + additionalHeaders.put("content-length", Arrays.asList("xx")); + additionalHeaders.put("content-encoding", Arrays.asList("xx")); + additionalHeaders.put("ACCEPT", Arrays.asList("xx")); + additionalHeaders.put("keep-alive", Arrays.asList("xx")); + additionalHeaders.put("x-fastly-debug", Arrays.asList("xx")); + return additionalHeaders; + } + } + MyCustomHeaders myHeaders = new MyCustomHeaders(); + RequestDecorator decorator = new RequestDecorator(myHeaders); + ApacheRequestDecorator apacheRequestDecorator = new ApacheRequestDecorator(); + HttpGet request = new HttpGet("http://anyhost"); + request = (HttpGet) apacheRequestDecorator.decorate(request, decorator); + Assert.assertEquals(1, request.getHeaders().length); + Assert.assertEquals(null, request.getHeader("SplitSDKVersion")); + } + + @Test(expected = IllegalArgumentException.class) + public void customDecoratorError() { + class MyCustomHeaders implements CustomHeaderDecorator { + public MyCustomHeaders() {} + @Override + public Map> getHeaderOverrides(RequestContext context) { + throw new RuntimeException(); + } + } + MyCustomHeaders myHeaders = new MyCustomHeaders(); + RequestDecorator decorator = new RequestDecorator(myHeaders); + ApacheRequestDecorator apacheRequestDecorator = new ApacheRequestDecorator(); + HttpGet request = new HttpGet("http://anyhost"); + request = (HttpGet) apacheRequestDecorator.decorate(request, decorator); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/utils/CustomDispatcher.java b/client/src/test/java/io/split/client/utils/CustomDispatcher.java index e9f144760..ea2a22355 100644 --- a/client/src/test/java/io/split/client/utils/CustomDispatcher.java +++ b/client/src/test/java/io/split/client/utils/CustomDispatcher.java @@ -1,27 +1,29 @@ package io.split.client.utils; -import io.split.client.SplitClientConfig; import okhttp3.mockwebserver.Dispatcher; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.RecordedRequest; import org.jetbrains.annotations.NotNull; -import org.mockito.Mock; import java.io.InputStream; import java.util.*; public class CustomDispatcher extends Dispatcher { - public static final String INITIAL_SPLIT_CHANGES = "/api/splitChanges?since=-1"; - public static final String AUTH_ENABLED = "/api/auth/enabled"; - public static final String AUTH_DISABLED = "/api/auth/disabled"; - public static final String SINCE_1585948850109 = "/api/splitChanges?since=1585948850109"; - public static final String SINCE_1585948850110 = "/api/splitChanges?since=1585948850110"; - public static final String SINCE_1585948850111 = "/api/splitChanges?since=1585948850111"; - public static final String SINCE_1585948850112 = "/api/splitChanges?since=1585948850112"; + public static final String INITIAL_SPLIT_CHANGES = "/api/splitChanges?s=1.3&since=-1&rbSince=-1"; + public static final String INITIAL_FLAGS_BY_SETS = "/api/splitChanges?s=1.3&since=-1&rbSince=-1&sets=set1%2Cset2"; + public static final String SINCE_1602796638344 = "/api/splitChanges?s=1.3&since=1602796638344&rbSince=-1&sets=set1%2Cset2"; + public static final String AUTH_ENABLED = "/api/auth/enabled?s=1.3"; + public static final String AUTH_DISABLED = "/api/auth/disabled?s=1.3"; + public static final String SINCE_1585948850109 = "/api/splitChanges?s=1.3&since=1585948850109&rbSince=1585948850109"; + public static final String SINCE_1585948850109_FLAG_SET = "/api/splitChanges?s=1.3&since=-1&rbSince=-1&sets=set_1%2Cset_2"; + public static final String SINCE_1585948850110 = "/api/splitChanges?s=1.3&since=1585948850110&rbSince=1585948850110"; + public static final String SINCE_1585948850111 = "/api/splitChanges?s=1.3&since=1585948850111&rbSince=1585948850111"; + public static final String SINCE_1585948850112 = "/api/splitChanges?s=1.3&since=1585948850112&rbSince=1585948850112"; public static final String SEGMENT_TEST_INITIAL = "/api/segmentChanges/segment-test?since=-1"; public static final String SEGMENT3_INITIAL = "/api/segmentChanges/segment3?since=-1"; public static final String SEGMENT3_SINCE_1585948850110 = "/api/segmentChanges/segment3?since=1585948850110"; public static final String SEGMENT3_SINCE_1585948850111 = "/api/segmentChanges/segment3?since=1585948850111"; + public static final String SEGMENT_BY_FLAG_SET = "/api/segmentChanges/new_segment?since=-1"; public static final String METRICS_TIME = "/api/metrics/time"; public static final String METRICS_COUNTER = "api/metrics/counter"; @@ -41,18 +43,24 @@ public MockResponse dispatch(@NotNull RecordedRequest request) { switch (request.getPath()) { case CustomDispatcher.INITIAL_SPLIT_CHANGES: return getResponse(CustomDispatcher.INITIAL_SPLIT_CHANGES, new MockResponse().setBody(inputStreamToString("splits.json"))); + case CustomDispatcher.INITIAL_FLAGS_BY_SETS: + return getResponse(CustomDispatcher.INITIAL_FLAGS_BY_SETS, new MockResponse().setBody("{\"ff\":{\"d\":[{\"trafficTypeName\":\"client\",\"name\":\"workm\",\"trafficAllocation\":100,\"trafficAllocationSeed\":147392224,\"seed\":524417105,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"on\",\"changeNumber\":1602796638344,\"algo\":2,\"configurations\":{},\"sets\":[\"set1\",\"set2\"],\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"new_segment\"},\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":100},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"in segment new_segment\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":100},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":0},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"default rule\"}]},{\"trafficTypeName\":\"client\",\"name\":\"workm_set_3\",\"trafficAllocation\":100,\"trafficAllocationSeed\":147392224,\"seed\":524417105,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"on\",\"changeNumber\":1602796638344,\"algo\":2,\"configurations\":{},\"sets\":[\"set_3\"],\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"new_segment\"},\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":100},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"in segment new_segment\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":100},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":0},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"default rule\"}]}],\"s\":-1,\"t\":1602796638344},\"rbs\":{\"s\":-1,\"t\":-1,\"d\":[]}}")); case CustomDispatcher.AUTH_ENABLED: return getResponse(CustomDispatcher.AUTH_ENABLED,new MockResponse().setBody(inputStreamToString("streaming-auth-push-enabled.json"))); case CustomDispatcher.AUTH_DISABLED: return getResponse(CustomDispatcher.AUTH_DISABLED,new MockResponse().setBody(inputStreamToString("streaming-auth-push-disabled.json"))); case CustomDispatcher.SINCE_1585948850109: - return getResponse(CustomDispatcher.SINCE_1585948850109, new MockResponse().setBody("{\"splits\": [], \"since\":1585948850109, \"till\":1585948850110}")); + return getResponse(CustomDispatcher.SINCE_1585948850109, new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850110}, \"rbs\":{\"s\":1585948850109,\"t\":1585948850110,\"d\":[]}}")); + case SINCE_1585948850109_FLAG_SET: + return getResponse(SINCE_1585948850109_FLAG_SET, new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850109, \"t\":1585948850110}, \"rbs\":{\"s\":1585948850109,\"t\":1585948850110,\"d\":[]}}")); case CustomDispatcher.SINCE_1585948850110: return getResponse(CustomDispatcher.SINCE_1585948850110, new MockResponse().setBody(inputStreamToString("splits2.json"))); case CustomDispatcher.SINCE_1585948850111: return getResponse(CustomDispatcher.SINCE_1585948850111, new MockResponse().setBody(inputStreamToString("splits_killed.json"))); case CustomDispatcher.SINCE_1585948850112: - return getResponse(CustomDispatcher.SINCE_1585948850112, new MockResponse().setBody("{\"splits\": [], \"since\":1585948850112, \"till\":1585948850112}")); + return getResponse(CustomDispatcher.SINCE_1585948850112, new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1585948850112, \"t\":1585948850112}, \"rbs\":{\"s\":1585948850112,\"t\":1585948850112,\"d\":[]}}")); + case CustomDispatcher.SINCE_1602796638344: + return getResponse(CustomDispatcher.SINCE_1602796638344, new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1602796638344, \"t\":1602796638344}, \"rbs\":{\"s\":1602796638344,\"t\":1602796638344,\"d\":[]}}")); case CustomDispatcher.SEGMENT_TEST_INITIAL: return getResponse(CustomDispatcher.SEGMENT_TEST_INITIAL, new MockResponse().setBody("{\"name\": \"segment3\",\"added\": [],\"removed\": [],\"since\": -1,\"till\": -1}")); case CustomDispatcher.SEGMENT3_INITIAL: @@ -61,6 +69,8 @@ public MockResponse dispatch(@NotNull RecordedRequest request) { return getResponse(CustomDispatcher.SEGMENT3_SINCE_1585948850110, new MockResponse().setBody("{\"name\": \"segment3\",\"added\": [],\"removed\": [],\"since\": 1585948850110,\"till\": 1585948850110}")); case CustomDispatcher.SEGMENT3_SINCE_1585948850111: return getResponse(CustomDispatcher.SEGMENT3_SINCE_1585948850111, new MockResponse().setBody("{\"name\": \"segment3\",\"added\": [],\"removed\": [],\"since\": 1585948850111,\"till\": 1585948850111}")); + case CustomDispatcher.SEGMENT_BY_FLAG_SET: + return getResponse(CustomDispatcher.SEGMENT3_SINCE_1585948850111, new MockResponse().setBody("{\"name\":\"new_segment\",\"added\":[\"user-1\"],\"removed\":[\"user-2\",\"user-3\"],\"since\":-1,\"till\":-1}")); case CustomDispatcher.METRICS_TIME: case CustomDispatcher.METRICS_COUNTER: return getResponse(CustomDispatcher.METRICS_COUNTER, new MockResponse().setResponseCode(200)); diff --git a/client/src/test/java/io/split/client/utils/CustomDispatcher2.java b/client/src/test/java/io/split/client/utils/CustomDispatcher2.java new file mode 100644 index 000000000..15979ffc1 --- /dev/null +++ b/client/src/test/java/io/split/client/utils/CustomDispatcher2.java @@ -0,0 +1,181 @@ +package io.split.client.utils; + +import okhttp3.mockwebserver.Dispatcher; +import okhttp3.mockwebserver.MockResponse; +import okhttp3.mockwebserver.RecordedRequest; +import org.jetbrains.annotations.NotNull; + +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; +import java.util.Queue; +import java.util.Scanner; + +public class CustomDispatcher2 extends Dispatcher { + public static final String SPLIT_FETCHER_1 = "/api/splitChanges?s=1.3&since=-1&rbSince=-1"; + public static final String SPLIT_FETCHER_2 = "/api/splitChanges?s=1.3&since=1675095324253&rbSince=1585948850111"; + public static final String SPLIT_FETCHER_3 = "/api/splitChanges?s=1.3&since=1685095324253&rbSince=1585948850111"; + public static final String SPLIT_FETCHER_4 = "/api/splitChanges?s=1.3&since=1695095324253&rbSince=1585948850111"; + public static final String SPLIT_FETCHER_5 = "/api/splitChanges?s=1.3&since=1775095324253&rbSince=1585948850111"; + + private final Map>_responses; + + public CustomDispatcher2(Map> responses){ + _responses = responses; + } + + public static CustomDispatcher2.Builder builder() { + return new CustomDispatcher2.Builder(); + } + + MockResponse response = new MockResponse().setBody("{" + + "\"ff\":{" + + "\"t\":1675095324253," + + "\"s\":-1," + + "\"d\": [{" + + "\"changeNumber\": 123," + + "\"trafficTypeName\": \"user\"," + + "\"name\": \"some_name\"," + + "\"trafficAllocation\": 100," + + "\"trafficAllocationSeed\": 123456," + + "\"seed\": 321654," + + "\"status\": \"ACTIVE\"," + + "\"killed\": false," + + "\"defaultTreatment\": \"off\"," + + "\"algo\": 2," + + "\"conditions\": [" + + "{" + + "\"partitions\": [" + + "{\"treatment\": \"on\", \"size\": 50}," + + "{\"treatment\": \"off\", \"size\": 50}" + + "]," + + "\"contitionType\": \"WHITELIST\"," + + "\"label\": \"some_label\"," + + "\"matcherGroup\": {" + + "\"matchers\": [" + + "{" + + "\"matcherType\": \"WHITELIST\"," + + "\"whitelistMatcherData\": {" + + "\"whitelist\": [\"k1\", \"k2\", \"k3\"]" + + "}," + + "\"negate\": false" + + "}" + + "]," + + "\"combiner\": \"AND\"" + + "}" + + "}," + + "{" + + "\"conditionType\": \"ROLLOUT\"," + + "\"matcherGroup\": {" + + "\"combiner\": \"AND\"," + + "\"matchers\": [" + + "{" + + "\"keySelector\": {" + + "\"trafficType\": \"user\"" + + "}," + + "\"matcherType\": \"IN_RULE_BASED_SEGMENT\"," + + "\"negate\": false," + + "\"userDefinedSegmentMatcherData\": {" + + "\"segmentName\": \"sample_rule_based_segment\"" + + "}" + + "}" + + "]" + + "}," + + "\"partitions\": [" + + "{" + + "\"treatment\": \"on\"," + + "\"size\": 100" + + "}," + + "{" + + "\"treatment\": \"off\"," + + "\"size\": 0" + + "}" + + "]," + + "\"label\": \"in rule based segment sample_rule_based_segment\"" + + "}" + + "]," + + "\"sets\": [\"set1\", \"set2\"]}]" + + "}," + + "\"rbs\": {" + + "\"t\": 1585948850111," + + "\"s\": -1," + + "\"d\": [" + + "{" + + "\"changeNumber\": 5," + + "\"name\": \"sample_rule_based_segment\"," + + "\"status\": \"ACTIVE\"," + + "\"trafficTypeName\": \"user\"," + + "\"excluded\":{" + + "\"keys\":[\"mauro@split.io\",\"gaston@split.io\"]," + + "\"segments\":[]" + + "}," + + "\"conditions\": [" + + "{" + + "\"matcherGroup\": {" + + "\"combiner\": \"AND\"," + + "\"matchers\": [" + + "{" + + "\"keySelector\": {" + + "\"trafficType\": \"user\"," + + "\"attribute\": \"email\"" + + "}," + + "\"matcherType\": \"ENDS_WITH\"," + + "\"negate\": false," + + "\"whitelistMatcherData\": {" + + "\"whitelist\": [" + + "\"@split.io\"" + + "]}}]}}]}]}}"); + MockResponse response2 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1675095324253, \"t\":1675095324253}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); + MockResponse response3 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1685095324253, \"t\":1695095324253}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); + MockResponse response4 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1695095324253, \"t\":1775095324253}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); + MockResponse response5 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1775095324253, \"t\":1775095324253}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); + + @NotNull + @Override + public MockResponse dispatch(@NotNull RecordedRequest request) { + switch (request.getPath()) { + case CustomDispatcher2.SPLIT_FETCHER_1: + return getResponse(CustomDispatcher2.SPLIT_FETCHER_1, response); + case CustomDispatcher2.SPLIT_FETCHER_2: + return getResponse(CustomDispatcher2.SPLIT_FETCHER_2, response2); + case CustomDispatcher2.SPLIT_FETCHER_3: + return getResponse(CustomDispatcher2.SPLIT_FETCHER_3, response3); + case CustomDispatcher2.SPLIT_FETCHER_4: + return getResponse(CustomDispatcher2.SPLIT_FETCHER_4, response4); + case CustomDispatcher2.SPLIT_FETCHER_5: + return getResponse(CustomDispatcher2.SPLIT_FETCHER_5, response5); + } + return new MockResponse().setResponseCode(404); + } + + private MockResponse getResponse(String target, MockResponse mockedResponse) { + Queue responses = _responses.get(target); + if(responses != null) { + MockResponse finalResponse = responses.poll(); + return finalResponse == null ? mockedResponse : finalResponse; + } + return mockedResponse; + } + + + + public static final class Builder { + private Map> _responses = new HashMap<>(); + public Builder(){}; + + /** + * Add responses to an specific path + * @param path + * @param responses + * @return + */ + public Builder path(String path, Queue responses) { + _responses.put(path, responses); + return this; + } + + public CustomDispatcher2 build() { + return new CustomDispatcher2(_responses); + } + } +} diff --git a/client/src/test/java/io/split/client/utils/FeatureFlagProcessorTest.java b/client/src/test/java/io/split/client/utils/FeatureFlagProcessorTest.java new file mode 100644 index 000000000..7a1b774a1 --- /dev/null +++ b/client/src/test/java/io/split/client/utils/FeatureFlagProcessorTest.java @@ -0,0 +1,81 @@ +package io.split.client.utils; + +import io.split.client.dtos.Split; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; +import io.split.engine.experiments.SplitParser; +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; + +import static io.split.client.utils.FeatureFlagProcessor.processFeatureFlagChanges; + +public class FeatureFlagProcessorTest { + + @Test + public void testProcessFeatureFlagChanges() { + SplitParser splitParser = new SplitParser(); + List featureFlags = new ArrayList<>(); + + String definition1 = "{\"trafficTypeName\":\"user\",\"id\":\"d431cdd0-b0be-11ea-8a80-1660ada9ce39\",\"name\":\"mauro_java\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-92391491,\"seed\":-1769377604,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1684329854385,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"WHITELIST\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"matcherType\":\"WHITELIST\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"admin\",\"mauro\",\"nico\"]}}]},\"partitions\":[{\"treatment\":\"off\",\"size\":100}],\"label\":\"whitelisted\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"maur-2\"}}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"in segment maur-2\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"ALL_KEYS\",\"negate\":false}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"default rule\"}]}"; + Split featureFlagTest1 = Json.fromJson(definition1, Split.class); + + String definition2 = "{\"trafficTypeName\":\"user\",\"id\":\"d704f220-0567-11ee-80ee-fa3c6460cd13\",\"name\":\"NET_CORE_getTreatmentWithConfigAfterArchive\",\"trafficAllocation\":100,\"trafficAllocationSeed\":179018541,\"seed\":272707374,\"status\":\"ARCHIVED\",\"killed\":false,\"defaultTreatment\":\"V-FGyN\",\"changeNumber\":1686165617166,\"algo\":2,\"configurations\":{\"V-FGyN\":\"{\\\"color\\\":\\\"blue\\\"}\",\"V-YrWB\":\"{\\\"color\\\":\\\"red\\\"}\"},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"test\"},\"matcherType\":\"LESS_THAN_OR_EQUAL_TO\",\"negate\":false,\"unaryNumericMatcherData\":{\"dataType\":\"NUMBER\",\"value\":20}}]},\"partitions\":[{\"treatment\":\"V-FGyN\",\"size\":0},{\"treatment\":\"V-YrWB\",\"size\":100}],\"label\":\"test \\u003c\\u003d 20\"}]}"; + Split featureFlagTest2 = Json.fromJson(definition2, Split.class); + + featureFlags.add(featureFlagTest1); + featureFlags.add(featureFlagTest2); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>()); + FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(splitParser, featureFlags, flagSetsFilter); + + Assert.assertEquals(1, featureFlagsToUpdate.toAdd.size()); + Assert.assertEquals(1, featureFlagsToUpdate.toRemove.size()); + Assert.assertEquals(1, featureFlagsToUpdate.segments.size()); + } + + @Test + public void testProcessFeatureFlagChangesWithSetsToAdd() { + SplitParser splitParser = new SplitParser(); + List featureFlags = new ArrayList<>(); + + String definition1 = "{\"trafficTypeName\":\"user\",\"id\":\"d431cdd0-b0be-11ea-8a80-1660ada9ce39\",\"name\":\"mauro_java\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-92391491,\"seed\":-1769377604,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1684329854385,\"algo\":2,\"configurations\":{},\"sets\":[\"set_1\",\"set_2\"],\"conditions\":[{\"conditionType\":\"WHITELIST\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"matcherType\":\"WHITELIST\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"admin\",\"mauro\",\"nico\"]}}]},\"partitions\":[{\"treatment\":\"off\",\"size\":100}],\"label\":\"whitelisted\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"maur-2\"}}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"in segment maur-2\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"ALL_KEYS\",\"negate\":false}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"default rule\"}]}"; + Split featureFlagTest1 = Json.fromJson(definition1, Split.class); + + String definition2 = "{\"trafficTypeName\":\"user\",\"id\":\"d704f220-0567-11ee-80ee-fa3c6460cd13\",\"name\":\"NET_CORE_getTreatmentWithConfigAfterArchive\",\"trafficAllocation\":100,\"trafficAllocationSeed\":179018541,\"seed\":272707374,\"status\":\"ARCHIVED\",\"killed\":false,\"defaultTreatment\":\"V-FGyN\",\"changeNumber\":1686165617166,\"algo\":2,\"configurations\":{\"V-FGyN\":\"{\\\"color\\\":\\\"blue\\\"}\",\"V-YrWB\":\"{\\\"color\\\":\\\"red\\\"}\"},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"test\"},\"matcherType\":\"LESS_THAN_OR_EQUAL_TO\",\"negate\":false,\"unaryNumericMatcherData\":{\"dataType\":\"NUMBER\",\"value\":20}}]},\"partitions\":[{\"treatment\":\"V-FGyN\",\"size\":0},{\"treatment\":\"V-YrWB\",\"size\":100}],\"label\":\"test \\u003c\\u003d 20\"}]}"; + Split featureFlagTest2 = Json.fromJson(definition2, Split.class); + + featureFlags.add(featureFlagTest1); + featureFlags.add(featureFlagTest2); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("set_1"))); + FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(splitParser, featureFlags, flagSetsFilter); + + Assert.assertEquals(1, featureFlagsToUpdate.toAdd.size()); + Assert.assertEquals(1, featureFlagsToUpdate.toRemove.size()); + Assert.assertEquals(1, featureFlagsToUpdate.segments.size()); + } + + @Test + public void testProcessFeatureFlagChangesWithSetsToRemove() { + SplitParser splitParser = new SplitParser(); + List featureFlags = new ArrayList<>(); + + String definition1 = "{\"trafficTypeName\":\"user\",\"id\":\"d431cdd0-b0be-11ea-8a80-1660ada9ce39\",\"name\":\"mauro_java\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-92391491,\"seed\":-1769377604,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1684329854385,\"algo\":2,\"configurations\":{},\"sets\":[\"set_1\",\"set_2\"],\"conditions\":[{\"conditionType\":\"WHITELIST\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"matcherType\":\"WHITELIST\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"admin\",\"mauro\",\"nico\"]}}]},\"partitions\":[{\"treatment\":\"off\",\"size\":100}],\"label\":\"whitelisted\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"maur-2\"}}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"in segment maur-2\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"ALL_KEYS\",\"negate\":false}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"default rule\"}]}"; + Split featureFlagTest1 = Json.fromJson(definition1, Split.class); + + String definition2 = "{\"trafficTypeName\":\"user\",\"id\":\"d704f220-0567-11ee-80ee-fa3c6460cd13\",\"name\":\"NET_CORE_getTreatmentWithConfigAfterArchive\",\"trafficAllocation\":100,\"trafficAllocationSeed\":179018541,\"seed\":272707374,\"status\":\"ARCHIVED\",\"killed\":false,\"defaultTreatment\":\"V-FGyN\",\"changeNumber\":1686165617166,\"algo\":2,\"configurations\":{\"V-FGyN\":\"{\\\"color\\\":\\\"blue\\\"}\",\"V-YrWB\":\"{\\\"color\\\":\\\"red\\\"}\"},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"test\"},\"matcherType\":\"LESS_THAN_OR_EQUAL_TO\",\"negate\":false,\"unaryNumericMatcherData\":{\"dataType\":\"NUMBER\",\"value\":20}}]},\"partitions\":[{\"treatment\":\"V-FGyN\",\"size\":0},{\"treatment\":\"V-YrWB\",\"size\":100}],\"label\":\"test \\u003c\\u003d 20\"}]}"; + Split featureFlagTest2 = Json.fromJson(definition2, Split.class); + + featureFlags.add(featureFlagTest1); + featureFlags.add(featureFlagTest2); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("set_3"))); + FeatureFlagsToUpdate featureFlagsToUpdate = processFeatureFlagChanges(splitParser, featureFlags, flagSetsFilter); + + Assert.assertEquals(0, featureFlagsToUpdate.toAdd.size()); + Assert.assertEquals(2, featureFlagsToUpdate.toRemove.size()); + Assert.assertEquals(0, featureFlagsToUpdate.segments.size()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/utils/FileInputStreamProviderTest.java b/client/src/test/java/io/split/client/utils/FileInputStreamProviderTest.java new file mode 100644 index 000000000..5c9e34d2d --- /dev/null +++ b/client/src/test/java/io/split/client/utils/FileInputStreamProviderTest.java @@ -0,0 +1,13 @@ +package io.split.client.utils; + +import io.split.client.exceptions.InputStreamProviderException; +import org.junit.Test; + +public class FileInputStreamProviderTest { + + @Test(expected = InputStreamProviderException.class) + public void processTestForException() throws InputStreamProviderException { + FileInputStreamProvider fileInputStreamProvider = new FileInputStreamProvider("src/test/resources/notExist.json"); + fileInputStreamProvider.get(); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/utils/FlagSetsValidatorTest.java b/client/src/test/java/io/split/client/utils/FlagSetsValidatorTest.java new file mode 100644 index 000000000..444de28cb --- /dev/null +++ b/client/src/test/java/io/split/client/utils/FlagSetsValidatorTest.java @@ -0,0 +1,78 @@ +package io.split.client.utils; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import static io.split.inputValidation.FlagSetsValidator.cleanup; + +public class FlagSetsValidatorTest { + + @Test + public void testEmptyFlagSets() { + List flagSets = new ArrayList<>(); + Assert.assertTrue(cleanup(flagSets).isEmpty()); + } + + @Test + public void testUpperFlagSets() { + List flagSets = new ArrayList<>(); + flagSets.add("Test1"); + flagSets.add("TEST2"); + Set cleanFlagSets = cleanup(flagSets); + Assert.assertTrue(cleanFlagSets.contains("test1")); + Assert.assertTrue(cleanFlagSets.contains("test2")); + } + + @Test + public void testTrimFlagSets() { + List flagSets = new ArrayList<>(); + flagSets.add(" test1"); + flagSets.add(" test2 "); + Set cleanFlagSets = cleanup(flagSets); + Assert.assertTrue(cleanFlagSets.contains("test1")); + Assert.assertTrue(cleanFlagSets.contains("test2")); + } + + @Test + public void testRegexFlagSets() { + List flagSets = new ArrayList<>(); + flagSets.add(" test1"); + flagSets.add(" test-2 "); + Set cleanFlagSets = cleanup(flagSets); + Assert.assertEquals(1, cleanFlagSets.size()); + Assert.assertTrue(cleanFlagSets.contains("test1")); + Assert.assertFalse(cleanFlagSets.contains("test-2")); + } + + @Test + public void testDuplicateFlagSets() { + List flagSets = new ArrayList<>(); + flagSets.add(" test1"); + flagSets.add(" test1 "); + Set cleanFlagSets = cleanup(flagSets); + Assert.assertEquals(1, cleanFlagSets.size()); + Assert.assertTrue(cleanFlagSets.contains("test1")); + } + + @Test + public void testFlagSetsInOrder() { + List flagSets = new ArrayList<>(); + flagSets.add(" test3"); + flagSets.add(" test2"); + flagSets.add(" test1 "); + flagSets.add(" 1test "); + flagSets.add(" 2test "); + Set cleanFlagSets = cleanup(flagSets); + Assert.assertEquals(5, cleanFlagSets.size()); + List sets = new ArrayList<>(cleanFlagSets); + Assert.assertEquals("1test", sets.get(0)); + Assert.assertEquals("2test", sets.get(1)); + Assert.assertEquals("test1", sets.get(2)); + Assert.assertEquals("test2", sets.get(3)); + Assert.assertEquals("test3", sets.get(4)); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/utils/LocalhostSegmentChangeFetcherTest.java b/client/src/test/java/io/split/client/utils/LocalhostSegmentChangeFetcherTest.java new file mode 100644 index 000000000..b43388fcc --- /dev/null +++ b/client/src/test/java/io/split/client/utils/LocalhostSegmentChangeFetcherTest.java @@ -0,0 +1,150 @@ +package io.split.client.utils; + +import io.split.client.LocalhostSegmentChangeFetcher; +import io.split.client.dtos.SegmentChange; +import io.split.engine.common.FetchOptions; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; + +import java.io.File; +import java.io.IOException; + +public class LocalhostSegmentChangeFetcherTest { + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + private String TEST_0 = "{\"name\":\"segment_test\",\"added\":[\"user-1\"],\"removed\":[],\"since\":-1,\"till\":-1}"; + private String TEST_1 = "{\"name\":\"segment_test\",\"added\":[\"user-1\"],\"removed\":[\"user-2\"],\"since\":-1,\"till\":-1}"; + private String TEST_2 = "{\"name\":\"segment_test\",\"added\":[\"user-1\"],\"removed\":[\"user-2\"],\"since\":-1,\"till\":2323}"; + private String TEST_3 = "{\"name\":\"segment_test\",\"added\":[\"user-1\",\"user-3\"],\"removed\":[\"user-2\"],\"since\":-1,\"till\":2323}"; + private String TEST_4 = "{\"name\":\"segment_test\",\"added\":[\"user-1\",\"user-3\"],\"removed\":[\"user-2\"],\"since\":-1,\"till\":445345}"; + private String TEST_5 = "{\"name\":\"segment_test\",\"added\":[\"user-1\"],\"removed\":[\"user-2\",\"user-3\"],\"since\":-1,\"till\":-1}"; + + @Test + public void testSegmentFetch() { + LocalhostSegmentChangeFetcher localhostSegmentChangeFetcher = new LocalhostSegmentChangeFetcher("src/test/resources/"); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SegmentChange segmentChange = localhostSegmentChangeFetcher.fetch("segment_1", -1L, fetchOptions); + + Assert.assertEquals("segment_1", segmentChange.name); + Assert.assertEquals(4, segmentChange.added.size()); + } + + @Test + public void testSegmentNameNull() { + LocalhostSegmentChangeFetcher localhostSegmentChangeFetcher = new LocalhostSegmentChangeFetcher("src/test/resources/sanitizer/"); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SegmentChange segmentChange = localhostSegmentChangeFetcher.fetch("segmentNameNull", -1L, fetchOptions); + + Assert.assertNull(segmentChange); + } + + @Test + public void sameInAddedAndRemoved() { + LocalhostSegmentChangeFetcher localhostSegmentChangeFetcher = new LocalhostSegmentChangeFetcher("src/test/resources/sanitizer/"); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SegmentChange segmentChange = localhostSegmentChangeFetcher.fetch("sameInAddedAndRemoved", -1L, fetchOptions); + + Assert.assertEquals(0, segmentChange.removed.size()); + Assert.assertEquals(4, segmentChange.added.size()); + } + + @Test + public void checkTillAndSince() { + LocalhostSegmentChangeFetcher localhostSegmentChangeFetcher = new LocalhostSegmentChangeFetcher("src/test/resources/sanitizer/"); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + SegmentChange segmentChange = localhostSegmentChangeFetcher.fetch("segmentChangeSinceTill", -1L, fetchOptions); + + Assert.assertEquals(-1L, segmentChange.till); + Assert.assertEquals(-1L, segmentChange.since); + } + + @Test + public void testProcessSegmentFetch() throws IOException { + File file = folder.newFile("segment_test.json"); + + byte[] test = TEST_0.getBytes(); + com.google.common.io.Files.write(test, file); + + LocalhostSegmentChangeFetcher localhostSplitChangeFetcher = new LocalhostSegmentChangeFetcher(folder.getRoot().getAbsolutePath()); + FetchOptions fetchOptions = Mockito.mock(FetchOptions.class); + + // 0) The CN from storage is -1, till and since are -1, and sha doesn't exist in the hash. It's going to return a segment change with updates. + SegmentChange segmentChange = localhostSplitChangeFetcher.fetch("segment_test",-1L, fetchOptions); + + Assert.assertEquals(1, segmentChange.added.size()); + Assert.assertTrue(segmentChange.added.contains("user-1")); + Assert.assertEquals(0, segmentChange.removed.size()); + Assert.assertEquals(-1, segmentChange.till); + Assert.assertEquals(-1, segmentChange.since); + + test = TEST_1.getBytes(); + com.google.common.io.Files.write(test, file); + + // 1) The CN from storage is -1, till and since are -1, and sha is different than before. It's going to return a segment change with updates. + segmentChange = localhostSplitChangeFetcher.fetch("segment_test",-1L, fetchOptions); + Assert.assertEquals(1, segmentChange.added.size()); + Assert.assertTrue(segmentChange.added.contains("user-1")); + Assert.assertEquals(1, segmentChange.removed.size()); + Assert.assertTrue(segmentChange.removed.contains("user-2")); + Assert.assertEquals(-1, segmentChange.till); + Assert.assertEquals(-1, segmentChange.since); + + test = TEST_2.getBytes(); + com.google.common.io.Files.write(test, file); + + // 2) The CN from storage is -1, till is 2323, and since is -1, and sha is the same as before. It's going to return a segment change with the same data. + segmentChange = localhostSplitChangeFetcher.fetch("segment_test",-1L, fetchOptions); + Assert.assertEquals(1, segmentChange.added.size()); + Assert.assertTrue(segmentChange.added.contains("user-1")); + Assert.assertEquals(1, segmentChange.removed.size()); + Assert.assertTrue(segmentChange.removed.contains("user-2")); + Assert.assertEquals(-1, segmentChange.till); + Assert.assertEquals(-1, segmentChange.since); + + test = TEST_3.getBytes(); + com.google.common.io.Files.write(test, file); + + // 3) The CN from storage is -1, till is 2323, and since is -1, sha is different than before. It's going to return a segment change with updates. + segmentChange = localhostSplitChangeFetcher.fetch("segment_test",-1L, fetchOptions); + Assert.assertEquals(2, segmentChange.added.size()); + Assert.assertTrue(segmentChange.added.contains("user-1")); + Assert.assertEquals(1, segmentChange.removed.size()); + Assert.assertTrue(segmentChange.removed.contains("user-2")); + Assert.assertEquals(2323, segmentChange.till); + Assert.assertEquals(-1, segmentChange.since); + + test = TEST_4.getBytes(); + com.google.common.io.Files.write(test, file); + + // 4) The CN from storage is 2323, till is 445345, and since is -1, and sha is the same as before. It's going to return a segment change with same data. + segmentChange = localhostSplitChangeFetcher.fetch("segment_test",2323, fetchOptions); + Assert.assertEquals(2, segmentChange.added.size()); + Assert.assertTrue(segmentChange.added.contains("user-1")); + Assert.assertTrue(segmentChange.added.contains("user-3")); + Assert.assertEquals(1, segmentChange.removed.size()); + Assert.assertTrue(segmentChange.removed.contains("user-2")); + Assert.assertEquals(2323, segmentChange.till); + Assert.assertEquals(2323, segmentChange.since); + + test = TEST_5.getBytes(); + com.google.common.io.Files.write(test, file); + + // 5) The CN from storage is 2323, till and since are -1, and sha is different than before. It's going to return a segment change with updates. + segmentChange = localhostSplitChangeFetcher.fetch("segment_test",2323, fetchOptions); + Assert.assertEquals(1, segmentChange.added.size()); + Assert.assertTrue(segmentChange.added.contains("user-1")); + Assert.assertFalse(segmentChange.added.contains("user-3")); + Assert.assertEquals(2, segmentChange.removed.size()); + Assert.assertTrue(segmentChange.removed.contains("user-2")); + Assert.assertTrue(segmentChange.removed.contains("user-3")); + Assert.assertEquals(2323, segmentChange.till); + Assert.assertEquals(2323, segmentChange.since); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/client/utils/LocalhostUtils.java b/client/src/test/java/io/split/client/utils/LocalhostUtils.java new file mode 100644 index 000000000..de507bd4a --- /dev/null +++ b/client/src/test/java/io/split/client/utils/LocalhostUtils.java @@ -0,0 +1,46 @@ +package io.split.client.utils; + +import io.split.client.LocalhostSplit; +import io.split.client.SplitAndKey; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.StringWriter; +import java.util.Map; + +public class LocalhostUtils { + + public static void writeFile(File f, StringWriter content) throws IOException { + BufferedWriter writer = new BufferedWriter(new FileWriter(f)); + writer.write(content.toString()); + writer.flush(); + writer.close(); + } + + public static void writeFile(File f, Map map) throws IOException { + BufferedWriter writer = new BufferedWriter(new FileWriter(f)); + + for (Map.Entry entry : map.entrySet()) { + String line = toString(entry); + writer.write(line); + } + + writer.flush(); + writer.close(); + } + + private static String toString(Map.Entry entry) { + StringBuilder bldr = new StringBuilder(); + bldr.append(entry.getKey().split()); + bldr.append(' '); + bldr.append(entry.getValue().treatment); + if (entry.getKey().key() != null) { + bldr.append(' '); + bldr.append(entry.getKey().key()); + } + bldr.append('\n'); + return bldr.toString(); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/common/ConsumerSyncManagerTest.java b/client/src/test/java/io/split/engine/common/ConsumerSyncManagerTest.java new file mode 100644 index 000000000..d01184f80 --- /dev/null +++ b/client/src/test/java/io/split/engine/common/ConsumerSyncManagerTest.java @@ -0,0 +1,18 @@ +package io.split.engine.common; + +import org.junit.Test; +import org.mockito.Mockito; + +import java.io.IOException; + +public class ConsumerSyncManagerTest { + @Test + public void testStartAndShutdown() throws IOException { + Synchronizer redisSynchronizer = Mockito.mock(ConsumerSynchronizer.class); + ConsumerSyncManager imp = new ConsumerSyncManager(redisSynchronizer); + imp.start(); + Mockito.verify(redisSynchronizer, Mockito.times(1)).startPeriodicDataRecording(); + imp.shutdown(); + Mockito.verify(redisSynchronizer, Mockito.times(1)).stopPeriodicDataRecording(); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/common/ConsumerSynchronizerTest.java b/client/src/test/java/io/split/engine/common/ConsumerSynchronizerTest.java new file mode 100644 index 000000000..3c488fab0 --- /dev/null +++ b/client/src/test/java/io/split/engine/common/ConsumerSynchronizerTest.java @@ -0,0 +1,30 @@ +package io.split.engine.common; + +import io.split.client.impressions.ImpressionsManager; +import io.split.client.impressions.UniqueKeysTracker; +import io.split.telemetry.synchronizer.TelemetrySyncTask; +import org.junit.Test; +import org.mockito.Mockito; + +public class ConsumerSynchronizerTest { + + @Test + public void testDataRecording() { + ImpressionsManager impressionsManager = Mockito.mock(ImpressionsManager.class); + UniqueKeysTracker uniqueKeysTracker = Mockito.mock(UniqueKeysTracker.class); + TelemetrySyncTask telemetrySyncTask = Mockito.mock(TelemetrySyncTask.class); + SplitTasks splitTasks = SplitTasks.build(null, null, impressionsManager, null, telemetrySyncTask, uniqueKeysTracker); + Synchronizer imp = new ConsumerSynchronizer(splitTasks); + imp.startPeriodicDataRecording(); + + Mockito.verify(impressionsManager, Mockito.times(1)).start(); + Mockito.verify(uniqueKeysTracker, Mockito.times(1)).start(); + Mockito.verify(telemetrySyncTask, Mockito.times(1)).startScheduledTask(); + + imp.stopPeriodicDataRecording(); + + Mockito.verify(impressionsManager, Mockito.times(1)).close(); + Mockito.verify(uniqueKeysTracker, Mockito.times(1)).stop(); + Mockito.verify(telemetrySyncTask, Mockito.times(1)).stopScheduledTask(); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/common/FetcherOptionsTest.java b/client/src/test/java/io/split/engine/common/FetcherOptionsTest.java index 25b2ea0fd..19a1d45ec 100644 --- a/client/src/test/java/io/split/engine/common/FetcherOptionsTest.java +++ b/client/src/test/java/io/split/engine/common/FetcherOptionsTest.java @@ -2,48 +2,20 @@ import org.junit.Test; -import java.util.HashMap; -import java.util.Map; -import java.util.function.Function; - import static org.junit.Assert.assertEquals; public class FetcherOptionsTest { @Test public void optionsPropagatedOk() { - final boolean[] called = {false}; - Function, Void> func = new Function, Void>() { - @Override - public Void apply(Map unused) { - called[0] = true; - return null; - } - }; - FetchOptions options = new FetchOptions.Builder() .cacheControlHeaders(true) - .fastlyDebugHeader(true) - .responseHeadersCallback(func) .targetChangeNumber(123) + .flagSetsFilter("set1,set2") .build(); assertEquals(options.cacheControlHeadersEnabled(), true); - assertEquals(options.fastlyDebugHeaderEnabled(), true); assertEquals(options.targetCN(), 123); - options.handleResponseHeaders(new HashMap<>()); - assertEquals(called[0], true); - } - - @Test - public void nullHandlerDoesNotExplode() { - - FetchOptions options = new FetchOptions.Builder() - .cacheControlHeaders(true) - .fastlyDebugHeader(true) - .responseHeadersCallback(null) - .build(); - - options.handleResponseHeaders(new HashMap<>()); + assertEquals("set1,set2", options.flagSetsFilter()); } } diff --git a/client/src/test/java/io/split/engine/common/LocalhostSynchronizerTest.java b/client/src/test/java/io/split/engine/common/LocalhostSynchronizerTest.java new file mode 100644 index 000000000..04163aedd --- /dev/null +++ b/client/src/test/java/io/split/engine/common/LocalhostSynchronizerTest.java @@ -0,0 +1,105 @@ +package io.split.engine.common; + +import io.split.client.LocalhostSegmentChangeFetcher; +import io.split.client.JsonLocalhostSplitChangeFetcher; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; +import io.split.client.utils.FileInputStreamProvider; +import io.split.client.utils.InputStreamProvider; +import io.split.engine.experiments.*; +import io.split.engine.segments.SegmentChangeFetcher; +import io.split.engine.segments.SegmentSynchronizationTaskImp; +import io.split.storages.*; +import io.split.storages.memory.InMemoryCacheImp; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; +import io.split.storages.memory.SegmentCacheInMemoryImpl; +import io.split.telemetry.storage.NoopTelemetryStorage; +import io.split.telemetry.storage.TelemetryStorage; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.util.HashSet; + +public class LocalhostSynchronizerTest { + + private static final TelemetryStorage TELEMETRY_STORAGE_NOOP = Mockito.mock(NoopTelemetryStorage.class); + private static final FlagSetsFilter FLAG_SETS_FILTER = new FlagSetsFilterImpl(new HashSet<>()); + + @Test + public void testSyncAll(){ + SplitCache splitCacheProducer = new InMemoryCacheImp(FLAG_SETS_FILTER); + + InputStreamProvider inputStreamProvider = new FileInputStreamProvider("src/test/resources/split_init.json"); + SplitChangeFetcher splitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + + SplitFetcher splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, TELEMETRY_STORAGE_NOOP, FLAG_SETS_FILTER, + ruleBasedSegmentParser, ruleBasedSegmentCache); + + SplitSynchronizationTask splitSynchronizationTask = new SplitSynchronizationTask(splitFetcher, splitCacheProducer, 1000L, null); + + SegmentChangeFetcher segmentChangeFetcher = new LocalhostSegmentChangeFetcher("src/test/resources/"); + SegmentCacheProducer segmentCacheProducer = new SegmentCacheInMemoryImpl(); + + SegmentSynchronizationTaskImp segmentSynchronizationTaskImp = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1000, 1, segmentCacheProducer, + TELEMETRY_STORAGE_NOOP, splitCacheProducer, null, ruleBasedSegmentCache); + SplitTasks splitTasks = SplitTasks.build(splitSynchronizationTask, segmentSynchronizationTaskImp, null, null, null, null); + + LocalhostSynchronizer localhostSynchronizer = new LocalhostSynchronizer(splitTasks, splitFetcher, false); + + Assert.assertTrue(localhostSynchronizer.syncAll()); + } + + @Test + public void testPeriodicFetching() throws InterruptedException { + SplitCache splitCacheProducer = new InMemoryCacheImp(FLAG_SETS_FILTER); + + SplitChangeFetcher splitChangeFetcher = Mockito.mock(JsonLocalhostSplitChangeFetcher.class); + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + + SplitFetcher splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, TELEMETRY_STORAGE_NOOP, FLAG_SETS_FILTER, + ruleBasedSegmentParser, ruleBasedSegmentCache); + + SplitSynchronizationTask splitSynchronizationTask = new SplitSynchronizationTask(splitFetcher, splitCacheProducer, 1000L, null); + FetchOptions fetchOptions = new FetchOptions.Builder().build(); + + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(LocalhostSegmentChangeFetcher.class); + SegmentCacheProducer segmentCacheProducer = new SegmentCacheInMemoryImpl(); + + SegmentSynchronizationTaskImp segmentSynchronizationTaskImp = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1000, 1, segmentCacheProducer, + TELEMETRY_STORAGE_NOOP, splitCacheProducer, null, ruleBasedSegmentCache); + + SplitTasks splitTasks = SplitTasks.build(splitSynchronizationTask, segmentSynchronizationTaskImp, null, null, null, null); + LocalhostSynchronizer localhostSynchronizer = new LocalhostSynchronizer(splitTasks, splitFetcher, true); + + localhostSynchronizer.startPeriodicFetching(); + + Thread.sleep(2000); + + Mockito.verify(splitChangeFetcher, Mockito.times(1)).fetch(-1, -1, fetchOptions); + } + + @Test + public void testRefreshSplits() { + SplitCacheProducer splitCacheProducer = new InMemoryCacheImp(FLAG_SETS_FILTER); + SplitChangeFetcher splitChangeFetcher = Mockito.mock(SplitChangeFetcher.class); + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentCacheProducer ruleBasedSegmentCacheProducer = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + + SplitFetcher splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, TELEMETRY_STORAGE_NOOP, FLAG_SETS_FILTER, + ruleBasedSegmentParser, ruleBasedSegmentCacheProducer); + SplitSynchronizationTask splitSynchronizationTask = new SplitSynchronizationTask(splitFetcher, splitCacheProducer, 1000L, null); + SplitTasks splitTasks = SplitTasks.build(splitSynchronizationTask, null, null, null, null, null); + LocalhostSynchronizer localhostSynchronizer = new LocalhostSynchronizer(splitTasks, splitFetcher, false); + + localhostSynchronizer.refreshSplits(null, null); + + Mockito.verify(splitChangeFetcher, Mockito.times(1)).fetch(Mockito.anyLong(), Mockito.anyLong(), Mockito.anyObject()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/common/PushManagerTest.java b/client/src/test/java/io/split/engine/common/PushManagerTest.java index d93ccbae5..33ce13416 100644 --- a/client/src/test/java/io/split/engine/common/PushManagerTest.java +++ b/client/src/test/java/io/split/engine/common/PushManagerTest.java @@ -7,7 +7,7 @@ import io.split.engine.sse.client.SSEClient; import io.split.engine.sse.dtos.AuthenticationResponse; import io.split.engine.sse.workers.SegmentsWorkerImp; -import io.split.engine.sse.workers.SplitsWorker; +import io.split.engine.sse.workers.FeatureFlagsWorker; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryStorage; import org.junit.Assert; @@ -15,8 +15,6 @@ import org.junit.Test; import org.mockito.Mockito; -import java.util.concurrent.LinkedBlockingQueue; - public class PushManagerTest { private AuthApiClient _authApiClient; private EventSourceClient _eventSourceClient; @@ -24,9 +22,13 @@ public class PushManagerTest { private PushManager _pushManager; private PushStatusTracker _pushStatusTracker; private TelemetryStorage _telemetryStorage; + private FeatureFlagsWorker _featureFlagsWorker; + private SegmentsWorkerImp _segmentsWorkerImp; @Before public void setUp() { + _featureFlagsWorker = Mockito.mock(FeatureFlagsWorker.class); + _segmentsWorkerImp = Mockito.mock(SegmentsWorkerImp.class); _authApiClient = Mockito.mock(AuthApiClient.class); _eventSourceClient = Mockito.mock(EventSourceClient.class); _backoff = Mockito.mock(Backoff.class); @@ -34,9 +36,11 @@ public void setUp() { _telemetryStorage = new InMemoryTelemetryStorage(); _pushManager = new PushManagerImp(_authApiClient, _eventSourceClient, - Mockito.mock(SplitsWorker.class), - Mockito.mock(SegmentsWorkerImp.class), - _pushStatusTracker, _telemetryStorage); + _featureFlagsWorker, + _segmentsWorkerImp, + _pushStatusTracker, + _telemetryStorage, + null); } @Test @@ -107,4 +111,60 @@ public void startWithPushDisabledAndRetryTrueShouldConnect() throws InterruptedE Thread.sleep(1500); Mockito.verify(_pushStatusTracker, Mockito.times(1)).handleSseStatus(SSEClient.StatusMessage.RETRYABLE_ERROR); } -} + + + @Test + public void startAndStop() throws InterruptedException { + AuthenticationResponse response = new AuthenticationResponse(true, "token-test", "channels-test", 1, false); + + Mockito.when(_authApiClient.Authenticate()) + .thenReturn(response); + + Mockito.when(_eventSourceClient.start(response.getChannels(), response.getToken())) + .thenReturn(true); + + _pushManager.start(); + + Mockito.verify(_authApiClient, Mockito.times(1)).Authenticate(); + Mockito.verify(_eventSourceClient, Mockito.times(1)).start(response.getChannels(), response.getToken()); + + Thread.sleep(1500); + + Mockito.verify(_pushStatusTracker, Mockito.times(0)).handleSseStatus(SSEClient.StatusMessage.RETRYABLE_ERROR); + Mockito.verify(_pushStatusTracker, Mockito.times(0)).forcePushDisable(); + Assert.assertEquals(1, _telemetryStorage.popStreamingEvents().size()); + + _pushManager.stop(); + + Mockito.verify(_eventSourceClient, Mockito.times(1)).stop(); + Mockito.verify(_featureFlagsWorker, Mockito.times(1)).stop(); + Mockito.verify(_segmentsWorkerImp, Mockito.times(1)).stop(); + } + + @Test + public void validateStartWorkers() { + _pushManager.startWorkers(); + Mockito.verify(_featureFlagsWorker, Mockito.times(1)).start(); + Mockito.verify(_segmentsWorkerImp, Mockito.times(1)).start(); + } + + @Test + public void validateScheduleConnectionReset() throws InterruptedException { + AuthenticationResponse response = new AuthenticationResponse(false, "token-test", "channels-test", 3, false); + + Mockito.when(_authApiClient.Authenticate()) + .thenReturn(response); + + Mockito.when(_eventSourceClient.start(response.getChannels(), response.getToken())) + .thenReturn(true); + + _pushManager.start(); + + _pushManager.scheduleConnectionReset(); + Thread.sleep(1000); + + Mockito.verify(_eventSourceClient, Mockito.times(3)).stop(); + Mockito.verify(_featureFlagsWorker, Mockito.times(3)).stop(); + Mockito.verify(_segmentsWorkerImp, Mockito.times(3)).stop(); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/common/SyncManagerTest.java b/client/src/test/java/io/split/engine/common/SyncManagerTest.java index 466fe38a8..f1c1699cd 100644 --- a/client/src/test/java/io/split/engine/common/SyncManagerTest.java +++ b/client/src/test/java/io/split/engine/common/SyncManagerTest.java @@ -1,51 +1,83 @@ package io.split.engine.common; import io.split.client.SplitClientConfig; +import io.split.client.events.EventsTask; +import io.split.client.impressions.ImpressionsManagerImpl; +import io.split.client.impressions.UniqueKeysTracker; import io.split.engine.SDKReadinessGates; +import io.split.engine.experiments.SplitSynchronizationTask; +import io.split.engine.segments.SegmentSynchronizationTaskImp; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryStorage; +import io.split.telemetry.synchronizer.TelemetrySyncTask; import io.split.telemetry.synchronizer.TelemetrySynchronizer; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; +import java.io.IOException; import java.util.concurrent.LinkedBlockingQueue; public class SyncManagerTest { - private static final int BACKOFF_BASE = 1; private Synchronizer _synchronizer; private PushManager _pushManager; private SDKReadinessGates _gates; + private ImpressionsManagerImpl _impressionsManager; + private TelemetrySynchronizer _telemetrySynchronizer; + private TelemetrySyncTask _telemetrySyncTask; + private EventsTask _eventsTask; + private SplitClientConfig _config; + private SegmentSynchronizationTaskImp _segmentSynchronizationTaskImp; + private SplitSynchronizationTask _splitSynchronizationTask; + private UniqueKeysTracker _uniqueKeysTracker; @Before public void setUp() { _synchronizer = Mockito.mock(Synchronizer.class); _pushManager = Mockito.mock(PushManager.class); _gates = Mockito.mock(SDKReadinessGates.class); + _impressionsManager = Mockito.mock(ImpressionsManagerImpl.class); + _telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); + _telemetrySyncTask = Mockito.mock(TelemetrySyncTask.class); + _eventsTask = Mockito.mock(EventsTask.class); + _config = Mockito.mock(SplitClientConfig.class); + _segmentSynchronizationTaskImp = Mockito.mock(SegmentSynchronizationTaskImp.class); + _splitSynchronizationTask = Mockito.mock(SplitSynchronizationTask.class); + _uniqueKeysTracker = Mockito.mock(UniqueKeysTracker.class); } @Test - public void startWithStreamingFalseShouldStartPolling() throws InterruptedException { + public void startWithStreamingFalseShouldStartPolling() throws InterruptedException, IOException { TelemetryStorage telemetryStorage = Mockito.mock(TelemetryStorage.class); _gates.sdkInternalReady(); - TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - SplitClientConfig config = Mockito.mock(SplitClientConfig.class); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + Mockito.when(_synchronizer.syncAll()).thenReturn(true); - SyncManagerImp syncManager = new SyncManagerImp(false, _synchronizer, _pushManager, new LinkedBlockingQueue<>(), BACKOFF_BASE, _gates, telemetryStorage, telemetrySynchronizer, config); + SplitAPI splitAPI = Mockito.mock(SplitAPI.class); + SyncManagerImp syncManager = new SyncManagerImp( splitTasks, false, _synchronizer, _pushManager, new LinkedBlockingQueue<>(), + _gates, telemetryStorage, _telemetrySynchronizer, _config, splitAPI); syncManager.start(); Thread.sleep(1000); Mockito.verify(_synchronizer, Mockito.times(1)).startPeriodicFetching(); Mockito.verify(_synchronizer, Mockito.times(1)).syncAll(); Mockito.verify(_pushManager, Mockito.times(0)).start(); + + syncManager.shutdown(); + Mockito.verify(_pushManager, Mockito.times(0)).stop(); + Mockito.verify(_synchronizer, Mockito.times(1)).stopPeriodicDataRecording(); } @Test public void startWithStreamingTrueShouldStartSyncAll() throws InterruptedException { TelemetryStorage telemetryStorage = Mockito.mock(TelemetryStorage.class); - TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - SplitClientConfig config = Mockito.mock(SplitClientConfig.class); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + Mockito.when(_synchronizer.syncAll()).thenReturn(true); - SyncManager sm = new SyncManagerImp(true, _synchronizer, _pushManager, new LinkedBlockingQueue<>(), BACKOFF_BASE, _gates, telemetryStorage, telemetrySynchronizer, config); + SplitAPI splitAPI = Mockito.mock(SplitAPI.class); + SyncManager sm = new SyncManagerImp(splitTasks, true, _synchronizer, _pushManager, new LinkedBlockingQueue<>(), + _gates, telemetryStorage, _telemetrySynchronizer, _config, splitAPI); sm.start(); Thread.sleep(1000); Mockito.verify(_synchronizer, Mockito.times(0)).startPeriodicFetching(); @@ -58,10 +90,12 @@ public void startWithStreamingTrueShouldStartSyncAll() throws InterruptedExcepti public void onStreamingAvailable() throws InterruptedException { TelemetryStorage telemetryStorage = Mockito.mock(TelemetryStorage.class); LinkedBlockingQueue messages = new LinkedBlockingQueue<>(); - TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - SplitClientConfig config = Mockito.mock(SplitClientConfig.class); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + SplitAPI splitAPI = Mockito.mock(SplitAPI.class); - SyncManagerImp syncManager = new SyncManagerImp(true, _synchronizer, _pushManager, messages, BACKOFF_BASE, _gates, telemetryStorage, telemetrySynchronizer, config); + SyncManagerImp syncManager = new SyncManagerImp(splitTasks, true, _synchronizer, _pushManager, messages, + _gates, telemetryStorage, _telemetrySynchronizer, _config, splitAPI); Thread t = new Thread(syncManager::incomingPushStatusHandler); t.start(); messages.offer(PushManager.Status.STREAMING_READY); @@ -77,9 +111,12 @@ public void onStreamingAvailable() throws InterruptedException { public void onStreamingDisabled() throws InterruptedException { TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); LinkedBlockingQueue messsages = new LinkedBlockingQueue<>(); - TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - SplitClientConfig config = Mockito.mock(SplitClientConfig.class); - SyncManagerImp syncManager = new SyncManagerImp(true, _synchronizer, _pushManager, messsages, BACKOFF_BASE, _gates, telemetryStorage, telemetrySynchronizer, config); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + SplitAPI splitAPI = Mockito.mock(SplitAPI.class); + + SyncManagerImp syncManager = new SyncManagerImp(splitTasks, true, _synchronizer, _pushManager, messsages, + _gates, telemetryStorage, _telemetrySynchronizer, _config, splitAPI); Thread t = new Thread(syncManager::incomingPushStatusHandler); t.start(); messsages.offer(PushManager.Status.STREAMING_DOWN); @@ -94,9 +131,11 @@ public void onStreamingDisabled() throws InterruptedException { public void onStreamingShutdown() throws InterruptedException { TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); LinkedBlockingQueue messsages = new LinkedBlockingQueue<>(); - TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - SplitClientConfig config = Mockito.mock(SplitClientConfig.class); - SyncManagerImp syncManager = new SyncManagerImp(true, _synchronizer, _pushManager, messsages, BACKOFF_BASE, _gates, telemetryStorage, telemetrySynchronizer, config); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + SplitAPI splitAPI = Mockito.mock(SplitAPI.class); + SyncManagerImp syncManager = new SyncManagerImp(splitTasks, true, _synchronizer, _pushManager, messsages, + _gates, telemetryStorage, _telemetrySynchronizer, _config, splitAPI); Thread t = new Thread(syncManager::incomingPushStatusHandler); t.start(); messsages.offer(PushManager.Status.STREAMING_OFF); @@ -106,12 +145,15 @@ public void onStreamingShutdown() throws InterruptedException { } @Test - public void onConnected() throws InterruptedException { + public void onConnected() throws InterruptedException, IOException { TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); LinkedBlockingQueue messsages = new LinkedBlockingQueue<>(); - TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - SplitClientConfig config = Mockito.mock(SplitClientConfig.class); - SyncManagerImp syncManager = new SyncManagerImp(true, _synchronizer, _pushManager, messsages, BACKOFF_BASE, _gates, telemetryStorage, telemetrySynchronizer, config); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + SplitAPI splitAPI = Mockito.mock(SplitAPI.class); + + SyncManagerImp syncManager = new SyncManagerImp(splitTasks, true, _synchronizer, _pushManager, messsages, + _gates, telemetryStorage, _telemetrySynchronizer, _config, splitAPI); Thread t = new Thread(syncManager::incomingPushStatusHandler); t.start(); messsages.offer(PushManager.Status.STREAMING_READY); @@ -119,52 +161,73 @@ public void onConnected() throws InterruptedException { Mockito.verify(_synchronizer, Mockito.times(1)).stopPeriodicFetching(); Mockito.verify(_synchronizer, Mockito.times(1)).syncAll(); t.interrupt(); + + syncManager.shutdown(); + Mockito.verify(_synchronizer, Mockito.times(1)).stopPeriodicDataRecording(); } @Test - public void onDisconnect() throws InterruptedException { + public void onDisconnect() throws InterruptedException, IOException { TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); LinkedBlockingQueue messsages = new LinkedBlockingQueue<>(); - TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - SplitClientConfig config = Mockito.mock(SplitClientConfig.class); - SyncManagerImp syncManager = new SyncManagerImp(true, _synchronizer, _pushManager, messsages, BACKOFF_BASE, _gates, telemetryStorage, telemetrySynchronizer, config); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + SplitAPI splitAPI = Mockito.mock(SplitAPI.class); + + SyncManagerImp syncManager = new SyncManagerImp(splitTasks, true, _synchronizer, _pushManager, messsages, + _gates, telemetryStorage, _telemetrySynchronizer, _config, splitAPI); Thread t = new Thread(syncManager::incomingPushStatusHandler); t.start(); messsages.offer(PushManager.Status.STREAMING_OFF); Thread.sleep(500); Mockito.verify(_synchronizer, Mockito.times(1)).startPeriodicFetching(); t.interrupt(); + + syncManager.shutdown(); + Mockito.verify(_synchronizer, Mockito.times(1)).stopPeriodicDataRecording(); } @Test - public void onDisconnectAndReconnect() throws InterruptedException { // Check with mauro. reconnect should call pushManager.start again, right? + public void onDisconnectAndReconnect() throws InterruptedException, IOException { // Check with mauro. reconnect should call pushManager.start again, right? TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); LinkedBlockingQueue messsages = new LinkedBlockingQueue<>(); - TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - SplitClientConfig config = Mockito.mock(SplitClientConfig.class); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + SplitAPI splitAPI = Mockito.mock(SplitAPI.class); + Mockito.when(_synchronizer.syncAll()).thenReturn(true); - SyncManagerImp syncManager = new SyncManagerImp(true, _synchronizer, _pushManager, messsages, BACKOFF_BASE, _gates, telemetryStorage, telemetrySynchronizer, config); + SyncManagerImp syncManager = new SyncManagerImp(splitTasks, true, _synchronizer, _pushManager, messsages, + _gates, telemetryStorage, _telemetrySynchronizer, _config, splitAPI); syncManager.start(); messsages.offer(PushManager.Status.STREAMING_BACKOFF); Thread.sleep(1200); Mockito.verify(_synchronizer, Mockito.times(1)).startPeriodicFetching(); Mockito.verify(_synchronizer, Mockito.times(1)).syncAll(); Mockito.verify(_pushManager, Mockito.times(2)).start(); + + syncManager.shutdown(); + Mockito.verify(_synchronizer, Mockito.times(1)).stopPeriodicDataRecording(); } @Test - public void syncAllRetryThenShouldStartPolling() throws InterruptedException { + public void syncAllRetryThenShouldStartPolling() throws InterruptedException, IOException { TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); - TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetrySynchronizer.class); - SplitClientConfig config = Mockito.mock(SplitClientConfig.class); + SplitTasks splitTasks = SplitTasks.build(_splitSynchronizationTask, _segmentSynchronizationTaskImp, + _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + SplitAPI splitAPI = Mockito.mock(SplitAPI.class); + Mockito.when(_synchronizer.syncAll()).thenReturn(false).thenReturn(true); - SyncManagerImp syncManager = new SyncManagerImp(false, _synchronizer, _pushManager, new LinkedBlockingQueue<>(), BACKOFF_BASE, _gates, telemetryStorage, telemetrySynchronizer, config); + SyncManagerImp syncManager = new SyncManagerImp(splitTasks, false, _synchronizer, _pushManager, new LinkedBlockingQueue<>(), + _gates, telemetryStorage, _telemetrySynchronizer, _config, splitAPI); syncManager.start(); Thread.sleep(2000); Mockito.verify(_synchronizer, Mockito.times(1)).startPeriodicFetching(); Mockito.verify(_synchronizer, Mockito.times(2)).syncAll(); Mockito.verify(_pushManager, Mockito.times(0)).start(); Mockito.verify(_gates, Mockito.times(1)).sdkInternalReady(); - Mockito.verify(telemetrySynchronizer, Mockito.times(1)).synchronizeConfig(Mockito.anyObject(), Mockito.anyLong(), Mockito.anyObject(), Mockito.anyObject()); + Mockito.verify(_telemetrySynchronizer, Mockito.times(1)).synchronizeConfig(Mockito.anyObject(), Mockito.anyLong(), Mockito.anyObject(), Mockito.anyObject()); + + syncManager.shutdown(); + Mockito.verify(_synchronizer, Mockito.times(1)).stopPeriodicDataRecording(); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/common/SynchronizerTest.java b/client/src/test/java/io/split/engine/common/SynchronizerTest.java index 91bafca7d..0ce439c7f 100644 --- a/client/src/test/java/io/split/engine/common/SynchronizerTest.java +++ b/client/src/test/java/io/split/engine/common/SynchronizerTest.java @@ -1,27 +1,34 @@ package io.split.engine.common; +import io.split.client.events.EventsTask; +import io.split.client.impressions.ImpressionsManager; +import io.split.client.impressions.UniqueKeysTracker; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; import io.split.engine.segments.SegmentChangeFetcher; import io.split.engine.segments.SegmentSynchronizationTaskImp; import io.split.storages.*; import io.split.storages.memory.InMemoryCacheImp; -import io.split.engine.SDKReadinessGates; import io.split.engine.experiments.FetchResult; import io.split.engine.experiments.SplitFetcherImp; import io.split.engine.experiments.SplitSynchronizationTask; import io.split.engine.segments.SegmentFetcher; import io.split.engine.segments.SegmentSynchronizationTask; -import io.split.storages.pluggable.adapters.UserCustomSplitAdapterConsumer; import io.split.telemetry.storage.TelemetryRuntimeProducer; +import io.split.telemetry.synchronizer.TelemetrySyncTask; import org.junit.Assert; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import org.mockito.internal.matchers.Any; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -29,29 +36,41 @@ import static org.mockito.Mockito.when; public class SynchronizerTest { + private static final FlagSetsFilter FLAG_SETS_FILTER = new FlagSetsFilterImpl(new HashSet<>()); private SplitSynchronizationTask _refreshableSplitFetcherTask; private SegmentSynchronizationTask _segmentFetcher; private SplitFetcherImp _splitFetcher; private SplitCacheProducer _splitCacheProducer; + private RuleBasedSegmentCacheProducer _ruleBasedSegmentCacheProducer; private Synchronizer _synchronizer; private SegmentCacheProducer _segmentCacheProducer; - private SDKReadinessGates _gates; + private SplitTasks _splitTasks; + private TelemetrySyncTask _telemetrySyncTask; + private ImpressionsManager _impressionsManager; + private EventsTask _eventsTask; + private UniqueKeysTracker _uniqueKeysTracker; @Before public void beforeMethod() { _refreshableSplitFetcherTask = Mockito.mock(SplitSynchronizationTask.class); _segmentFetcher = Mockito.mock(SegmentSynchronizationTask.class); _splitFetcher = Mockito.mock(SplitFetcherImp.class); + _ruleBasedSegmentCacheProducer = Mockito.mock(RuleBasedSegmentCacheProducer.class); _splitCacheProducer = Mockito.mock(SplitCacheProducer.class); _segmentCacheProducer = Mockito.mock(SegmentCache.class); - _gates = Mockito.mock(SDKReadinessGates.class); + _telemetrySyncTask = Mockito.mock(TelemetrySyncTask.class); + _impressionsManager = Mockito.mock(ImpressionsManager.class); + _eventsTask = Mockito.mock(EventsTask.class); + _uniqueKeysTracker = Mockito.mock(UniqueKeysTracker.class); - _synchronizer = new SynchronizerImp(_refreshableSplitFetcherTask, _splitFetcher, _segmentFetcher, _splitCacheProducer, _segmentCacheProducer, 50, 10, 5, false, _gates); + _splitTasks = SplitTasks.build(_refreshableSplitFetcherTask, _segmentFetcher, _impressionsManager, _eventsTask, _telemetrySyncTask, _uniqueKeysTracker); + + _synchronizer = new SynchronizerImp(_splitTasks, _splitFetcher, _splitCacheProducer, _segmentCacheProducer, _ruleBasedSegmentCacheProducer, 50, 10, 5, new HashSet<>()); } @Test public void syncAll() throws InterruptedException { - Mockito.when(_splitFetcher.forceRefresh(Mockito.anyObject())).thenReturn(new FetchResult(true, new HashSet<>())); + Mockito.when(_splitFetcher.forceRefresh(Mockito.anyObject())).thenReturn(new FetchResult(true, false, new HashSet<>())); Mockito.when(_segmentFetcher.fetchAllSynchronous()).thenReturn(true); _synchronizer.syncAll(); @@ -63,15 +82,15 @@ public void syncAll() throws InterruptedException { @Test public void testSyncAllSegments() throws InterruptedException, NoSuchFieldException, IllegalAccessException { SegmentSynchronizationTask segmentSynchronizationTask = new SegmentSynchronizationTaskImp(Mockito.mock(SegmentChangeFetcher.class), - 20L, 1, new SDKReadinessGates(), _segmentCacheProducer, Mockito.mock(TelemetryRuntimeProducer.class), - Mockito.mock(SplitCacheConsumer.class)); + 20L, 1, _segmentCacheProducer, Mockito.mock(TelemetryRuntimeProducer.class), + Mockito.mock(SplitCacheConsumer.class), null, Mockito.mock(RuleBasedSegmentCache.class)); Field synchronizerSegmentFetcher = SynchronizerImp.class.getDeclaredField("_segmentSynchronizationTaskImp"); synchronizerSegmentFetcher.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(synchronizerSegmentFetcher, synchronizerSegmentFetcher.getModifiers() & ~Modifier.FINAL); synchronizerSegmentFetcher.set(_synchronizer, segmentSynchronizationTask); - Mockito.when(_splitFetcher.forceRefresh(Mockito.anyObject())).thenReturn(new FetchResult(true, Stream.of("Segment1", "Segment2").collect(Collectors.toSet()))); + Mockito.when(_splitFetcher.forceRefresh(Mockito.anyObject())).thenReturn(new FetchResult(true, false, Stream.of("Segment1", "Segment2").collect(Collectors.toSet()))); Mockito.when(_segmentFetcher.fetchAllSynchronous()).thenReturn(true); _synchronizer.syncAll(); @@ -85,8 +104,8 @@ public void testSyncAllSegments() throws InterruptedException, NoSuchFieldExcept public void startPeriodicFetching() { _synchronizer.startPeriodicFetching(); - Mockito.verify(_refreshableSplitFetcherTask, Mockito.times(1)).startPeriodicFetching(); - Mockito.verify(_segmentFetcher, Mockito.times(1)).startPeriodicFetching(); + Mockito.verify(_refreshableSplitFetcherTask, Mockito.times(1)).start(); + Mockito.verify(_segmentFetcher, Mockito.times(1)).start(); } @Test @@ -100,8 +119,8 @@ public void stopPeriodicFetching() { @Test public void streamingRetryOnSplit() { when(_splitCacheProducer.getChangeNumber()).thenReturn(0l).thenReturn(0l).thenReturn(1l); - when(_splitFetcher.forceRefresh(Mockito.anyObject())).thenReturn(new FetchResult(true, new HashSet<>())); - _synchronizer.refreshSplits(1l); + when(_splitFetcher.forceRefresh(Mockito.anyObject())).thenReturn(new FetchResult(true, false, new HashSet<>())); + _synchronizer.refreshSplits(1L, 0L); Mockito.verify(_splitCacheProducer, Mockito.times(3)).getChangeNumber(); } @@ -117,19 +136,33 @@ public void streamingRetryOnSegment() { } @Test - public void testCDNBypassIsRequestedAfterNFailures() throws NoSuchFieldException, IllegalAccessException { + public void streamingRetryOnSplitAndSegment() { + when(_splitCacheProducer.getChangeNumber()).thenReturn(0l).thenReturn(0l).thenReturn(1l); + Set segments = new HashSet<>(); + segments.add("segment1"); + segments.add("segment2"); + when(_splitFetcher.forceRefresh(Mockito.anyObject())).thenReturn(new FetchResult(true, false, segments)); + SegmentFetcher fetcher = Mockito.mock(SegmentFetcher.class); + when(_segmentCacheProducer.getChangeNumber(Mockito.anyString())).thenReturn(0l).thenReturn(0l).thenReturn(1l); + when(_segmentFetcher.getFetcher(Mockito.anyString())).thenReturn(fetcher); + _synchronizer.refreshSplits(1L, 0L); + + Mockito.verify(_splitCacheProducer, Mockito.times(3)).getChangeNumber(); + Mockito.verify(_segmentFetcher, Mockito.times(2)).getFetcher(Mockito.anyString()); + } - SplitCache cache = new InMemoryCacheImp(); - Synchronizer imp = new SynchronizerImp(_refreshableSplitFetcherTask, + @Test + public void testCDNBypassIsRequestedAfterNFailures() { + SplitCache cache = new InMemoryCacheImp(FLAG_SETS_FILTER); + Synchronizer imp = new SynchronizerImp(_splitTasks, _splitFetcher, - _segmentFetcher, cache, _segmentCacheProducer, + _ruleBasedSegmentCacheProducer, 50, 3, 1, - true, - Mockito.mock(SDKReadinessGates.class)); + new HashSet<>()); ArgumentCaptor optionsCaptor = ArgumentCaptor.forClass(FetchOptions.class); AtomicInteger calls = new AtomicInteger(); @@ -138,10 +171,10 @@ public void testCDNBypassIsRequestedAfterNFailures() throws NoSuchFieldException switch (calls.get()) { case 4: cache.setChangeNumber(123); } - return new FetchResult(true, new HashSet<>()); + return new FetchResult(true, false, new HashSet<>()); }).when(_splitFetcher).forceRefresh(optionsCaptor.capture()); - imp.refreshSplits(123); + imp.refreshSplits(123L, 0L); List options = optionsCaptor.getAllValues(); Assert.assertEquals(options.size(), 4); @@ -153,18 +186,16 @@ public void testCDNBypassIsRequestedAfterNFailures() throws NoSuchFieldException @Test public void testCDNBypassRequestLimitAndBackoff() throws NoSuchFieldException, IllegalAccessException { - - SplitCache cache = new InMemoryCacheImp(); - Synchronizer imp = new SynchronizerImp(_refreshableSplitFetcherTask, + SplitCache cache = new InMemoryCacheImp(FLAG_SETS_FILTER); + Synchronizer imp = new SynchronizerImp(_splitTasks, _splitFetcher, - _segmentFetcher, cache, _segmentCacheProducer, + _ruleBasedSegmentCacheProducer, 50, 3, 1, - true, - Mockito.mock(SDKReadinessGates.class)); + new HashSet<>()); ArgumentCaptor optionsCaptor = ArgumentCaptor.forClass(FetchOptions.class); AtomicInteger calls = new AtomicInteger(); @@ -185,7 +216,7 @@ public void testCDNBypassRequestLimitAndBackoff() throws NoSuchFieldException, I backoffBase.set(imp, 1); // 1ms long before = System.currentTimeMillis(); - imp.refreshSplits(1); + imp.refreshSplits(1L, 0L); long after = System.currentTimeMillis(); List options = optionsCaptor.getAllValues(); @@ -211,18 +242,16 @@ public void testCDNBypassRequestLimitAndBackoff() throws NoSuchFieldException, I @Test public void testCDNBypassRequestLimitAndForSegmentsBackoff() throws NoSuchFieldException, IllegalAccessException { - - SplitCache cache = new InMemoryCacheImp(); - Synchronizer imp = new SynchronizerImp(_refreshableSplitFetcherTask, + SplitCache cache = new InMemoryCacheImp(FLAG_SETS_FILTER); + Synchronizer imp = new SynchronizerImp(_splitTasks, _splitFetcher, - _segmentFetcher, cache, _segmentCacheProducer, + _ruleBasedSegmentCacheProducer, 50, 3, 1, - true, - Mockito.mock(SDKReadinessGates.class)); + new HashSet<>()); SegmentFetcher fetcher = Mockito.mock(SegmentFetcher.class); when(_segmentFetcher.getFetcher("someSegment")).thenReturn(fetcher); @@ -246,7 +275,7 @@ public void testCDNBypassRequestLimitAndForSegmentsBackoff() throws NoSuchFieldE backoffBase.set(imp, 1); // 1ms long before = System.currentTimeMillis(); - imp.refreshSegment("someSegment",1); + imp.refreshSegment("someSegment",1L); long after = System.currentTimeMillis(); List options = optionsCaptor.getAllValues(); @@ -269,4 +298,45 @@ public void testCDNBypassRequestLimitAndForSegmentsBackoff() throws NoSuchFieldE long minDiffExpected = 1 + 2 + 4 + 8 + 16 + 32 + 64 + 128 + 256; Assert.assertTrue((after - before) > minDiffExpected); } -} + + @Test + public void testDataRecording(){ + SplitCache cache = new InMemoryCacheImp(FLAG_SETS_FILTER); + Synchronizer imp = new SynchronizerImp(_splitTasks, + _splitFetcher, + cache, + _segmentCacheProducer, + _ruleBasedSegmentCacheProducer, + 50, + 3, + 1, + new HashSet<>()); + imp.startPeriodicDataRecording(); + + Mockito.verify(_eventsTask, Mockito.times(1)).start(); + Mockito.verify(_impressionsManager, Mockito.times(1)).start(); + Mockito.verify(_uniqueKeysTracker, Mockito.times(1)).start(); + Mockito.verify(_telemetrySyncTask, Mockito.times(1)).startScheduledTask(); + + imp.stopPeriodicDataRecording(); + + Mockito.verify(_eventsTask, Mockito.times(1)).close(); + Mockito.verify(_impressionsManager, Mockito.times(1)).close(); + Mockito.verify(_uniqueKeysTracker, Mockito.times(1)).stop(); + Mockito.verify(_telemetrySyncTask, Mockito.times(1)).stopScheduledTask(); + } + + @Test + public void skipSyncWhenChangeNumbersAreZero() { + _synchronizer.refreshSplits(0L, 0L); + Mockito.verify(_splitFetcher, Mockito.times(0)).forceRefresh(Mockito.anyObject()); + } + + @Test + public void testSyncRuleBasedSegment() { + when(_ruleBasedSegmentCacheProducer.getChangeNumber()).thenReturn(-1l).thenReturn(-1l).thenReturn(123l); + when(_splitFetcher.forceRefresh(Mockito.anyObject())).thenReturn(new FetchResult(true, false, new HashSet<>())); + _synchronizer.refreshSplits(0L, 123L); + Mockito.verify(_splitFetcher, Mockito.times(2)).forceRefresh(Mockito.anyObject()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/evaluator/EvaluatorIntegrationTest.java b/client/src/test/java/io/split/engine/evaluator/EvaluatorIntegrationTest.java index 5e7a998d4..5b0a024a6 100644 --- a/client/src/test/java/io/split/engine/evaluator/EvaluatorIntegrationTest.java +++ b/client/src/test/java/io/split/engine/evaluator/EvaluatorIntegrationTest.java @@ -2,23 +2,32 @@ import com.google.common.collect.Lists; import io.split.client.dtos.ConditionType; +import io.split.client.dtos.FallbackTreatmentCalculatorImp; import io.split.client.dtos.MatcherCombiner; import io.split.client.dtos.Partition; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; import io.split.engine.experiments.ParsedCondition; +import io.split.engine.experiments.ParsedRuleBasedSegment; import io.split.engine.experiments.ParsedSplit; import io.split.engine.matchers.AttributeMatcher; import io.split.engine.matchers.CombiningMatcher; +import io.split.engine.matchers.PrerequisitesMatcher; +import io.split.engine.matchers.RuleBasedSegmentMatcher; import io.split.engine.matchers.strings.EndsWithAnyOfMatcher; import io.split.engine.matchers.strings.WhitelistMatcher; +import io.split.storages.RuleBasedSegmentCache; import io.split.storages.SegmentCache; import io.split.storages.SplitCache; import io.split.storages.memory.InMemoryCacheImp; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; import io.split.storages.memory.SegmentCacheInMemoryImpl; import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -30,6 +39,7 @@ public class EvaluatorIntegrationTest { private static final String TEST_LABEL_VALUE_WHITELIST = "test label whitelist"; private static final String TEST_LABEL_VALUE_ROLL_OUT = "test label roll out"; private static final String ON_TREATMENT = "on"; + private static final String OFF_TREATMENT = "off"; @Test public void evaluateFeatureWithWhitelistShouldReturnOn() { @@ -149,34 +159,62 @@ public void evaluateFeaturesSplitsNull() { Map result = evaluator.evaluateFeatures("mauro@test.io", null, null, null); } + @Test + public void evaluateFeatureWithRuleBasedSegmentMatcher() { + Evaluator evaluator = buildEvaluatorAndLoadCache(false, 100); + + EvaluatorImp.TreatmentLabelAndChangeNumber result = evaluator.evaluateFeature("mauro@test.io", null, "split_5", null); + Assert.assertEquals(ON_TREATMENT, result.treatment); + + result = evaluator.evaluateFeature("admin", null, "split_5", null); + Assert.assertEquals(OFF_TREATMENT, result.treatment); + } + private Evaluator buildEvaluatorAndLoadCache(boolean killed, int trafficAllocation) { - SplitCache splitCache = new InMemoryCacheImp(); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>()); + SplitCache splitCache = new InMemoryCacheImp(flagSetsFilter); SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); - Evaluator evaluator = new EvaluatorImp(splitCache, segmentCache); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + FallbackTreatmentCalculatorImp fallbackTreatmentCalculatorImp = new FallbackTreatmentCalculatorImp(null); + Evaluator evaluator = new EvaluatorImp(splitCache, segmentCache, ruleBasedSegmentCache, fallbackTreatmentCalculatorImp); Partition partition = new Partition(); partition.treatment = ON_TREATMENT; partition.size = 100; + Partition partitionOff = new Partition(); + partitionOff.treatment = OFF_TREATMENT; + partitionOff.size = 100; + List partitions = Lists.newArrayList(partition); AttributeMatcher whiteListMatcher = AttributeMatcher.vanilla(new WhitelistMatcher(Lists.newArrayList("test_1", "admin"))); AttributeMatcher endsWithMatcher = AttributeMatcher.vanilla(new EndsWithAnyOfMatcher(Lists.newArrayList("@test.io", "@mail.io"))); + AttributeMatcher ruleBasedSegmentMatcher = AttributeMatcher.vanilla(new RuleBasedSegmentMatcher("sample_rule_based_segment")); CombiningMatcher whitelistCombiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(whiteListMatcher)); CombiningMatcher endsWithCombiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(endsWithMatcher)); + CombiningMatcher ruleBasedSegmentCombinerMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(ruleBasedSegmentMatcher)); ParsedCondition whitelistCondition = new ParsedCondition(ConditionType.WHITELIST, whitelistCombiningMatcher, partitions, TEST_LABEL_VALUE_WHITELIST); ParsedCondition rollOutCondition = new ParsedCondition(ConditionType.ROLLOUT, endsWithCombiningMatcher, partitions, TEST_LABEL_VALUE_ROLL_OUT); + ParsedCondition ruleBasedSegmentCondition = new ParsedCondition(ConditionType.ROLLOUT, ruleBasedSegmentCombinerMatcher, Lists.newArrayList(partitionOff), TEST_LABEL_VALUE_ROLL_OUT); List conditions = Lists.newArrayList(whitelistCondition, rollOutCondition); + List conditionsForRBS = Lists.newArrayList(ruleBasedSegmentCondition, rollOutCondition); + + ParsedSplit parsedSplit1 = new ParsedSplit("split_1", 0, false, DEFAULT_TREATMENT_VALUE, conditions, TRAFFIC_TYPE_VALUE, 223366551, 100, 0, 2, null, new HashSet<>(), true, new PrerequisitesMatcher(null)); + ParsedSplit parsedSplit2 = new ParsedSplit("split_2", 0, true, DEFAULT_TREATMENT_VALUE, conditions, TRAFFIC_TYPE_VALUE, 223366552, 100, 0, 2, null, new HashSet<>(), true, new PrerequisitesMatcher(null)); + ParsedSplit parsedSplit3 = new ParsedSplit("split_3", 0, false, DEFAULT_TREATMENT_VALUE, conditions, TRAFFIC_TYPE_VALUE, 223366554, 100, 0, 2, null, new HashSet<>(), true, new PrerequisitesMatcher(null)); + ParsedSplit parsedSplit4 = new ParsedSplit("split_test", 0, killed, DEFAULT_TREATMENT_VALUE, conditions, TRAFFIC_TYPE_VALUE, 223366555, trafficAllocation, 0, 2, null, new HashSet<>(), true, new PrerequisitesMatcher(null)); + ParsedSplit parsedSplit5 = new ParsedSplit("split_5", 0, false, DEFAULT_TREATMENT_VALUE, conditionsForRBS, TRAFFIC_TYPE_VALUE, 223366554, 100, 0, 2, null, new HashSet<>(), true, new PrerequisitesMatcher(null)); + splitCache.putMany(Stream.of(parsedSplit1, parsedSplit2, parsedSplit3, parsedSplit4, parsedSplit5).collect(Collectors.toList())); - ParsedSplit parsedSplit1 = new ParsedSplit("split_1", 0, false, DEFAULT_TREATMENT_VALUE, conditions, TRAFFIC_TYPE_VALUE, 223366551, 100, 0, 2, null); - ParsedSplit parsedSplit2 = new ParsedSplit("split_2", 0, true, DEFAULT_TREATMENT_VALUE, conditions, TRAFFIC_TYPE_VALUE, 223366552, 100, 0, 2, null); - ParsedSplit parsedSplit3 = new ParsedSplit("split_3", 0, false, DEFAULT_TREATMENT_VALUE, conditions, TRAFFIC_TYPE_VALUE, 223366554, 100, 0, 2, null); - ParsedSplit parsedSplit4 = new ParsedSplit("split_test", 0, killed, DEFAULT_TREATMENT_VALUE, conditions, TRAFFIC_TYPE_VALUE, 223366555, trafficAllocation, 0, 2, null); + ParsedRuleBasedSegment parsedRuleBasedSegment = new ParsedRuleBasedSegment("sample_rule_based_segment", + Lists.newArrayList(new ParsedCondition(ConditionType.WHITELIST, whitelistCombiningMatcher, null, TEST_LABEL_VALUE_WHITELIST)),"user", + 123, Lists.newArrayList("mauro@test.io","gaston@test.io"), Lists.newArrayList()); + ruleBasedSegmentCache.update(Lists.newArrayList(parsedRuleBasedSegment), null, 123); - splitCache.putMany(Stream.of(parsedSplit1, parsedSplit2, parsedSplit3, parsedSplit4).collect(Collectors.toList())); return evaluator; } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/evaluator/EvaluatorTest.java b/client/src/test/java/io/split/engine/evaluator/EvaluatorTest.java index f54d1605a..33ebf6d65 100644 --- a/client/src/test/java/io/split/engine/evaluator/EvaluatorTest.java +++ b/client/src/test/java/io/split/engine/evaluator/EvaluatorTest.java @@ -1,18 +1,23 @@ package io.split.engine.evaluator; -import io.split.client.dtos.ConditionType; -import io.split.client.dtos.Partition; +import io.split.client.dtos.*; +import io.split.client.utils.Json; import io.split.engine.experiments.ParsedCondition; import io.split.engine.experiments.ParsedSplit; import io.split.engine.matchers.CombiningMatcher; +import io.split.engine.matchers.PrerequisitesMatcher; +import io.split.storages.RuleBasedSegmentCacheConsumer; import io.split.storages.SegmentCacheConsumer; import io.split.storages.SplitCacheConsumer; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; @@ -30,6 +35,7 @@ public class EvaluatorTest { private SplitCacheConsumer _splitCacheConsumer; private SegmentCacheConsumer _segmentCacheConsumer; + private RuleBasedSegmentCacheConsumer _ruleBasedSegmentCacheConsumer; private Evaluator _evaluator; private CombiningMatcher _matcher; private Map _configurations; @@ -41,7 +47,8 @@ public class EvaluatorTest { public void before() { _splitCacheConsumer = Mockito.mock(SplitCacheConsumer.class); _segmentCacheConsumer = Mockito.mock(SegmentCacheConsumer.class); - _evaluator = new EvaluatorImp(_splitCacheConsumer, _segmentCacheConsumer); + _ruleBasedSegmentCacheConsumer = Mockito.mock(RuleBasedSegmentCacheConsumer.class); + _evaluator = new EvaluatorImp(_splitCacheConsumer, _segmentCacheConsumer, _ruleBasedSegmentCacheConsumer, new FallbackTreatmentCalculatorImp(null)); _matcher = Mockito.mock(CombiningMatcher.class); _evaluationContext = Mockito.mock(EvaluationContext.class); @@ -62,7 +69,7 @@ public void evaluateWhenSplitNameDoesNotExistReturnControl() { @Test public void evaluateWhenSplitIsKilledReturnDefaultTreatment() { - ParsedSplit split = ParsedSplit.createParsedSplitForTests(SPLIT_NAME, 0, true, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 2); + ParsedSplit split = ParsedSplit.createParsedSplitForTests(SPLIT_NAME, 0, true, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 2, new HashSet<>(), true, new PrerequisitesMatcher(null)); Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); EvaluatorImp.TreatmentLabelAndChangeNumber result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); @@ -74,7 +81,7 @@ public void evaluateWhenSplitIsKilledReturnDefaultTreatment() { @Test public void evaluateWithoutConditionsReturnDefaultTreatment() { - ParsedSplit split = ParsedSplit.createParsedSplitForTests(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 2); + ParsedSplit split = ParsedSplit.createParsedSplitForTests(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 2, new HashSet<>(), true, new PrerequisitesMatcher(null)); Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); EvaluatorImp.TreatmentLabelAndChangeNumber result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); @@ -93,7 +100,7 @@ public void evaluateWithRollOutConditionBucketIsBiggerTrafficAllocationReturnDef ParsedCondition condition = new ParsedCondition(ConditionType.ROLLOUT, _matcher,_partitions, TEST_LABEL_VALUE); _conditions.add(condition); - ParsedSplit split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 10, 12, 2, _configurations); + ParsedSplit split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 10, 12, 2, _configurations, new HashSet<>(), true, new PrerequisitesMatcher(null)); Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); Mockito.when(condition.matcher().match(MATCHING_KEY, BUCKETING_KEY, null, _evaluationContext)).thenReturn(true); @@ -114,7 +121,7 @@ public void evaluateWithRollOutConditionTrafficAllocationIsBiggerBucketReturnTre ParsedCondition condition = new ParsedCondition(ConditionType.ROLLOUT, _matcher, _partitions, TEST_LABEL_VALUE); _conditions.add(condition); - ParsedSplit split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 60, 18, 2, _configurations); + ParsedSplit split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), true, new PrerequisitesMatcher(null)); Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); Mockito.when(condition.matcher().match(Mockito.anyString(), Mockito.anyString(), Mockito.anyObject(), Mockito.anyObject())).thenReturn(true); @@ -135,7 +142,7 @@ public void evaluateWithWhitelistConditionReturnTreatment() { ParsedCondition condition = new ParsedCondition(ConditionType.WHITELIST, _matcher, _partitions, "test whitelist label"); _conditions.add(condition); - ParsedSplit split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 60, 18, 2, _configurations); + ParsedSplit split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), true, new PrerequisitesMatcher(null)); Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); Mockito.when(condition.matcher().match(Mockito.anyString(), Mockito.anyString(), Mockito.anyObject(), Mockito.anyObject())).thenReturn(true); @@ -146,4 +153,147 @@ public void evaluateWithWhitelistConditionReturnTreatment() { assertEquals("test whitelist label", result.label); assertEquals(CHANGE_NUMBER, result.changeNumber); } -} + + @Test + public void evaluateWithSets() { + ParsedSplit split = ParsedSplit.createParsedSplitForTests(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 2, new HashSet<>(Arrays.asList("set1", "set2")), true, new PrerequisitesMatcher(null)); + List sets = new ArrayList<>(Arrays.asList("set1", "empty_set")); + Map> flagSets = new HashMap<>(); + flagSets.put("set1", new HashSet<>(Arrays.asList(SPLIT_NAME))); + flagSets.put("empty_set", null); + Mockito.when(_splitCacheConsumer.getNamesByFlagSets(sets)).thenReturn(flagSets); + Map parsedSplits = new HashMap<>(); + parsedSplits.put(SPLIT_NAME, split); + Mockito.when(_splitCacheConsumer.fetchMany(Arrays.asList(SPLIT_NAME))).thenReturn(parsedSplits); + + Map result = _evaluator.evaluateFeaturesByFlagSets(MATCHING_KEY, BUCKETING_KEY, sets, null); + + EvaluatorImp.TreatmentLabelAndChangeNumber treatmentLabelAndChangeNumber = result.get(SPLIT_NAME); + + assertEquals(DEFAULT_TREATMENT_VALUE, treatmentLabelAndChangeNumber.treatment); + assertEquals("default rule", treatmentLabelAndChangeNumber.label); + assertEquals(CHANGE_NUMBER, treatmentLabelAndChangeNumber.changeNumber); + } + + @Test + public void evaluateWithSetsNotHaveFlags() { + ParsedSplit split = ParsedSplit.createParsedSplitForTests(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 2, new HashSet<>(Arrays.asList("set1", "set2")), true, new PrerequisitesMatcher(null)); + List sets = new ArrayList<>(Arrays.asList("set2")); + Map> flagSets = new HashMap<>(); + Mockito.when(_splitCacheConsumer.getNamesByFlagSets(sets)).thenReturn(flagSets); + Map parsedSplits = new HashMap<>(); + Mockito.when(_splitCacheConsumer.fetchMany(Arrays.asList(SPLIT_NAME))).thenReturn(parsedSplits); + + Map result = _evaluator.evaluateFeaturesByFlagSets(MATCHING_KEY, BUCKETING_KEY, sets, null); + Assert.assertTrue(result.isEmpty()); + } + + @Test + public void evaluateWithPrerequisites() { + Partition partition = new Partition(); + partition.treatment = TREATMENT_VALUE; + partition.size = 100; + _partitions.add(partition); + ParsedCondition condition = new ParsedCondition(ConditionType.WHITELIST, _matcher, _partitions, "test whitelist label"); + _conditions.add(condition); + List prerequisites = Arrays.asList(Json.fromJson("{\"n\": \"split1\", \"ts\": [\"" + TREATMENT_VALUE + "\"]}", Prerequisites.class)); + + ParsedSplit split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), true, new PrerequisitesMatcher(prerequisites)); + ParsedSplit split1 = new ParsedSplit("split1", 0, false, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), true, new PrerequisitesMatcher(null)); + + Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); + Mockito.when(_splitCacheConsumer.get("split1")).thenReturn(split1); + Mockito.when(condition.matcher().match(Mockito.anyString(), Mockito.anyString(), Mockito.anyObject(), Mockito.anyObject())).thenReturn(true); + + EvaluatorImp.TreatmentLabelAndChangeNumber result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); + assertEquals(TREATMENT_VALUE, result.treatment); + assertEquals("test whitelist label", result.label); + assertEquals(CHANGE_NUMBER, result.changeNumber); + + Mockito.when(condition.matcher().match(Mockito.anyString(), Mockito.anyString(), Mockito.anyObject(), Mockito.anyObject())).thenReturn(false); + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); + assertEquals(DEFAULT_TREATMENT_VALUE, result.treatment); + assertEquals(Labels.PREREQUISITES_NOT_MET, result.label); + assertEquals(CHANGE_NUMBER, result.changeNumber); + + // if split is killed, label should be killed. + split = new ParsedSplit(SPLIT_NAME, 0, true, DEFAULT_TREATMENT_VALUE, _conditions, TRAFFIC_TYPE_VALUE, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), true, new PrerequisitesMatcher(prerequisites)); + Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); + assertEquals(DEFAULT_TREATMENT_VALUE, result.treatment); + assertEquals(Labels.KILLED, result.label); + assertEquals(CHANGE_NUMBER, result.changeNumber); + } + + @Test + public void evaluateFallbackTreatmentWorks() { + Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(null); + FallbackTreatmentsConfiguration fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on")); + FallbackTreatmentCalculator fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + _evaluator = new EvaluatorImp(_splitCacheConsumer, _segmentCacheConsumer, _ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculator); + + EvaluatorImp.TreatmentLabelAndChangeNumber result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); + assertEquals("on", result.treatment); + assertEquals("fallback - definition not found", result.label); + + ParsedSplit split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, null, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), false, null); + Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); + assertEquals("on", result.treatment); + assertEquals("fallback - exception", result.label); + + // using byflag only + Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(null); + Mockito.when(_splitCacheConsumer.get("another_name")).thenReturn(null); + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new HashMap() {{ put(SPLIT_NAME, new FallbackTreatment("off")); }} ); + fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + _evaluator = new EvaluatorImp(_splitCacheConsumer, _segmentCacheConsumer, _ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculator); + + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); + assertEquals("off", result.treatment); + assertEquals("fallback - definition not found", result.label); + + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, "another_name", null); + assertEquals("control", result.treatment); + assertEquals("definition not found", result.label); + + split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, null, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), false, null); + Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); + assertEquals("off", result.treatment); + assertEquals("fallback - exception", result.label); + + split = new ParsedSplit("another_name", 0, false, DEFAULT_TREATMENT_VALUE, _conditions, null, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), false, null); + Mockito.when(_splitCacheConsumer.get("another_name")).thenReturn(split); + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, "another_name", null); + assertEquals("control", result.treatment); + assertEquals("exception", result.label); + + // with byflag + Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(null); + Mockito.when(_splitCacheConsumer.get("another_name")).thenReturn(null); + fallbackTreatmentsConfiguration = new FallbackTreatmentsConfiguration(new FallbackTreatment("on"), new HashMap() {{ put(SPLIT_NAME, new FallbackTreatment("off")); }} ); + fallbackTreatmentCalculator = new FallbackTreatmentCalculatorImp(fallbackTreatmentsConfiguration); + _evaluator = new EvaluatorImp(_splitCacheConsumer, _segmentCacheConsumer, _ruleBasedSegmentCacheConsumer, fallbackTreatmentCalculator); + + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); + assertEquals("off", result.treatment); + assertEquals("fallback - definition not found", result.label); + + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, "another_name", null); + assertEquals("on", result.treatment); + assertEquals("fallback - definition not found", result.label); + + split = new ParsedSplit(SPLIT_NAME, 0, false, DEFAULT_TREATMENT_VALUE, _conditions, null, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), false, null); + Mockito.when(_splitCacheConsumer.get(SPLIT_NAME)).thenReturn(split); + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, SPLIT_NAME, null); + assertEquals("off", result.treatment); + assertEquals("fallback - exception", result.label); + + split = new ParsedSplit("another_name", 0, false, DEFAULT_TREATMENT_VALUE, _conditions, null, CHANGE_NUMBER, 60, 18, 2, _configurations, new HashSet<>(), false, null); + Mockito.when(_splitCacheConsumer.get("another_name")).thenReturn(split); + result = _evaluator.evaluateFeature(MATCHING_KEY, BUCKETING_KEY, "another_name", null); + assertEquals("on", result.treatment); + assertEquals("fallback - exception", result.label); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/experiments/AChangePerCallSplitChangeFetcher.java b/client/src/test/java/io/split/engine/experiments/AChangePerCallSplitChangeFetcher.java index 64495e112..0e0f67296 100644 --- a/client/src/test/java/io/split/engine/experiments/AChangePerCallSplitChangeFetcher.java +++ b/client/src/test/java/io/split/engine/experiments/AChangePerCallSplitChangeFetcher.java @@ -32,12 +32,13 @@ public AChangePerCallSplitChangeFetcher(String segmentName) { @Override - public SplitChange fetch(long since, FetchOptions options) { + public SplitChange fetch(long since, long rbSince, FetchOptions options) { long latestChangeNumber = since + 1; Condition condition = null; if (_segmentName != null) { - condition = ConditionsTestUtil.makeUserDefinedSegmentCondition(ConditionType.ROLLOUT, _segmentName, Lists.newArrayList(ConditionsTestUtil.partition("on", 10))); + condition = ConditionsTestUtil.makeUserDefinedSegmentCondition(ConditionType.ROLLOUT, _segmentName, + Lists.newArrayList(ConditionsTestUtil.partition("on", 10))); } else { condition = ConditionsTestUtil.makeAllKeysCondition(Lists.newArrayList(ConditionsTestUtil.partition("on", 10))); } @@ -66,9 +67,9 @@ public SplitChange fetch(long since, FetchOptions options) { SplitChange splitChange = new SplitChange(); - splitChange.splits = Lists.newArrayList(add, remove); - splitChange.since = since; - splitChange.till = latestChangeNumber; + splitChange.featureFlags.d = Lists.newArrayList(add, remove); + splitChange.featureFlags.s = since; + splitChange.featureFlags.t = latestChangeNumber; _lastAdded.set(latestChangeNumber); diff --git a/client/src/test/java/io/split/engine/experiments/ParsedRuleBasedSegmentTest.java b/client/src/test/java/io/split/engine/experiments/ParsedRuleBasedSegmentTest.java new file mode 100644 index 000000000..253636814 --- /dev/null +++ b/client/src/test/java/io/split/engine/experiments/ParsedRuleBasedSegmentTest.java @@ -0,0 +1,88 @@ +package io.split.engine.experiments; + +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import io.split.client.dtos.ConditionType; +import io.split.client.dtos.ExcludedSegments; +import io.split.client.dtos.MatcherCombiner; +import io.split.client.dtos.SplitChange; +import io.split.client.utils.Json; +import io.split.client.utils.RuleBasedSegmentsToUpdate; +import io.split.engine.matchers.AttributeMatcher; +import io.split.engine.matchers.CombiningMatcher; +import io.split.engine.matchers.UserDefinedSegmentMatcher; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +import static io.split.client.utils.RuleBasedSegmentProcessor.processRuleBasedSegmentChanges; + +public class ParsedRuleBasedSegmentTest { + + @Test + public void works() { + List excludedSegments = new ArrayList<>(); + excludedSegments.add(new ExcludedSegments("standard","segment1")); + excludedSegments.add(new ExcludedSegments("standard","segment2")); + + AttributeMatcher segmentMatcher = AttributeMatcher.vanilla(new UserDefinedSegmentMatcher("employees")); + CombiningMatcher segmentCombiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(segmentMatcher)); + ParsedRuleBasedSegment parsedRuleBasedSegment = new ParsedRuleBasedSegment("another_rule_based_segment", + Lists.newArrayList(new ParsedCondition(ConditionType.WHITELIST, segmentCombiningMatcher, null, "label")), "user", + 123, Lists.newArrayList("mauro@test.io", "gaston@test.io"), excludedSegments); + + Assert.assertEquals(Sets.newHashSet("segment2", "segment1", "employees"), parsedRuleBasedSegment.getSegmentsNames()); + Assert.assertEquals("another_rule_based_segment", parsedRuleBasedSegment.ruleBasedSegment()); + Assert.assertEquals(Lists.newArrayList(new ParsedCondition(ConditionType.WHITELIST, segmentCombiningMatcher, null, "label")), + parsedRuleBasedSegment.parsedConditions()); + Assert.assertEquals(123, parsedRuleBasedSegment.changeNumber()); + } + + @Test + public void worksWithoutExcluded() { + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + String load = "{\"ff\":{\"s\":-1,\"t\":-1,\"d\":[]},\"rbs\":{\"s\":-1,\"t\":1457726098069,\"d\":[{ \"changeNumber\": 123, \"trafficTypeName\": \"user\", \"name\": \"some_name\"," + + "\"status\": \"ACTIVE\",\"conditions\": [{\"contitionType\": \"ROLLOUT\"," + + "\"label\": \"some_label\", \"matcherGroup\": { \"matchers\": [{ \"matcherType\": \"ALL_KEYS\", \"negate\": false}]," + + "\"combiner\": \"AND\"}}]}]}}"; + SplitChange change = Json.fromJson(load, SplitChange.class); + RuleBasedSegmentsToUpdate toUpdate = processRuleBasedSegmentChanges(parser, change.ruleBasedSegments.d); + Assert.assertTrue(toUpdate.getToAdd().get(0).excludedKeys().isEmpty()); + Assert.assertTrue(toUpdate.getToAdd().get(0).excludedSegments().isEmpty()); + + load = "{\"ff\":{\"s\":-1,\"t\":-1,\"d\":[]},\"rbs\":{\"s\":-1,\"t\":1457726098069,\"d\":[{ \"changeNumber\": 123, \"trafficTypeName\": \"user\", \"name\": \"some_name\"," + + "\"status\": \"ACTIVE\",\"excluded\":{\"segments\":[{\"type\": \"standard\",\"name\":\"segment1\"}]},\"conditions\": [{\"contitionType\": \"ROLLOUT\"," + + "\"label\": \"some_label\", \"matcherGroup\": { \"matchers\": [{ \"matcherType\": \"ALL_KEYS\", \"negate\": false}]," + + "\"combiner\": \"AND\"}}]}]}}"; + change = Json.fromJson(load, SplitChange.class); + toUpdate = processRuleBasedSegmentChanges(parser, change.ruleBasedSegments.d); + Assert.assertTrue(toUpdate.getToAdd().get(0).excludedKeys().isEmpty()); + + load = "{\"ff\":{\"s\":-1,\"t\":-1,\"d\":[]},\"rbs\":{\"s\":-1,\"t\":1457726098069,\"d\":[{ \"changeNumber\": 123, \"trafficTypeName\": \"user\", \"name\": \"some_name\"," + + "\"status\": \"ACTIVE\",\"excluded\":{\"segments\":[{\"type\": \"standard\",\"name\":\"segment1\"}], \"keys\":null},\"conditions\": [{\"contitionType\": \"ROLLOUT\"," + + "\"label\": \"some_label\", \"matcherGroup\": { \"matchers\": [{ \"matcherType\": \"ALL_KEYS\", \"negate\": false}]," + + "\"combiner\": \"AND\"}}]}]}}"; + change = Json.fromJson(load, SplitChange.class); + toUpdate = processRuleBasedSegmentChanges(parser, change.ruleBasedSegments.d); + Assert.assertTrue(toUpdate.getToAdd().get(0).excludedKeys().isEmpty()); + + load = "{\"ff\":{\"s\":-1,\"t\":-1,\"d\":[]},\"rbs\":{\"s\":-1,\"t\":1457726098069,\"d\":[{ \"changeNumber\": 123, \"trafficTypeName\": \"user\", \"name\": \"some_name\"," + + "\"status\": \"ACTIVE\",\"excluded\":{\"keys\":[\"key1\"]},\"conditions\": [{\"contitionType\": \"ROLLOUT\"," + + "\"label\": \"some_label\", \"matcherGroup\": { \"matchers\": [{ \"matcherType\": \"ALL_KEYS\", \"negate\": false}]," + + "\"combiner\": \"AND\"}}]}]}}"; + change = Json.fromJson(load, SplitChange.class); + toUpdate = processRuleBasedSegmentChanges(parser, change.ruleBasedSegments.d); + Assert.assertTrue(toUpdate.getToAdd().get(0).excludedSegments().isEmpty()); + + load = "{\"ff\":{\"s\":-1,\"t\":-1,\"d\":[]},\"rbs\":{\"s\":-1,\"t\":1457726098069,\"d\":[{ \"changeNumber\": 123, \"trafficTypeName\": \"user\", \"name\": \"some_name\"," + + "\"status\": \"ACTIVE\",\"excluded\":{\"segments\":null, \"keys\":[\"key1\"]},\"conditions\": [{\"contitionType\": \"ROLLOUT\"," + + "\"label\": \"some_label\", \"matcherGroup\": { \"matchers\": [{ \"matcherType\": \"ALL_KEYS\", \"negate\": false}]," + + "\"combiner\": \"AND\"}}]}]}}"; + change = Json.fromJson(load, SplitChange.class); + toUpdate = processRuleBasedSegmentChanges(parser, change.ruleBasedSegments.d); + Assert.assertTrue(toUpdate.getToAdd().get(0).excludedSegments().isEmpty()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/experiments/RuleBasedSegmentParserTest.java b/client/src/test/java/io/split/engine/experiments/RuleBasedSegmentParserTest.java new file mode 100644 index 000000000..add3eb2a5 --- /dev/null +++ b/client/src/test/java/io/split/engine/experiments/RuleBasedSegmentParserTest.java @@ -0,0 +1,556 @@ +package io.split.engine.experiments; + +import com.google.common.collect.Lists; +import io.split.client.dtos.*; +import io.split.client.dtos.Matcher; +import io.split.client.utils.Json; +import io.split.client.utils.RuleBasedSegmentsToUpdate; +import io.split.engine.ConditionsTestUtil; +import io.split.engine.evaluator.Labels; +import io.split.engine.matchers.*; +import io.split.engine.matchers.collections.ContainsAllOfSetMatcher; +import io.split.engine.matchers.collections.ContainsAnyOfSetMatcher; +import io.split.engine.matchers.collections.EqualToSetMatcher; +import io.split.engine.matchers.collections.PartOfSetMatcher; +import io.split.engine.matchers.strings.ContainsAnyOfMatcher; +import io.split.engine.matchers.strings.EndsWithAnyOfMatcher; +import io.split.engine.matchers.strings.StartsWithAnyOfMatcher; +import io.split.engine.segments.SegmentChangeFetcher; +import io.split.grammar.Treatments; +import io.split.storages.SegmentCache; +import io.split.storages.memory.SegmentCacheInMemoryImpl; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static io.split.TestHelper.makeRuleBasedSegment; +import static io.split.client.utils.RuleBasedSegmentProcessor.processRuleBasedSegmentChanges; + +import static org.junit.Assert.assertTrue; + +/** + * Tests for ExperimentParser + * + * @author adil + */ +public class RuleBasedSegmentParserTest { + + public static final String EMPLOYEES = "employees"; + public static final String SALES_PEOPLE = "salespeople"; + public static final int CONDITIONS_UPPER_LIMIT = 50; + + @Test + public void works() { + SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + segmentCache.updateSegment(EMPLOYEES, Stream.of("adil", "pato", "trevor").collect(Collectors.toList()), new ArrayList<>(), 1L); + segmentCache.updateSegment(SALES_PEOPLE, Stream.of("kunal").collect(Collectors.toList()), new ArrayList<>(), 1L); + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); + SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); + SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); + Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChangeEmployee).thenReturn(segmentChangeSalesPeople); + + Matcher employeesMatcher = ConditionsTestUtil.userDefinedSegmentMatcher(EMPLOYEES, false); + Matcher notSalespeople = ConditionsTestUtil.userDefinedSegmentMatcher(SALES_PEOPLE, true); + Condition c = ConditionsTestUtil.and(employeesMatcher, notSalespeople, null); + List conditions = Lists.newArrayList(c); + + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("first-name", conditions, 1); + ParsedRuleBasedSegment actual = parser.parse(ruleBasedSegment); + + AttributeMatcher employeesMatcherLogic = AttributeMatcher.vanilla(new UserDefinedSegmentMatcher(EMPLOYEES)); + AttributeMatcher notSalesPeopleMatcherLogic = new AttributeMatcher(null, new UserDefinedSegmentMatcher(SALES_PEOPLE), true); + CombiningMatcher combiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(employeesMatcherLogic, notSalesPeopleMatcherLogic)); + ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, null); + List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); + + ParsedRuleBasedSegment expected = ParsedRuleBasedSegment.createParsedRuleBasedSegmentForTests ("first-name", listOfMatcherAndSplits, "user", 1, + new ArrayList<>(), new ArrayList<>()); + + Assert.assertEquals(actual, expected); + assertTrue(expected.hashCode() != 0); + assertTrue(expected.equals(expected)); + } + + @Test + public void worksForTwoConditions() { + SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + segmentCache.updateSegment(EMPLOYEES, Stream.of("adil", "pato", "trevor").collect(Collectors.toList()), new ArrayList<>(), 1L); + segmentCache.updateSegment(SALES_PEOPLE, Stream.of("kunal").collect(Collectors.toList()), new ArrayList<>(), 1L); + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); + SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); + SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); + Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChangeEmployee).thenReturn(segmentChangeSalesPeople); + + Matcher employeesMatcher = ConditionsTestUtil.userDefinedSegmentMatcher(EMPLOYEES, false); + + Matcher salespeopleMatcher = ConditionsTestUtil.userDefinedSegmentMatcher(SALES_PEOPLE, false); + + List fullyRollout = Lists.newArrayList(ConditionsTestUtil.partition("on", 100)); + List turnOff = Lists.newArrayList(ConditionsTestUtil.partition(Treatments.CONTROL, 100)); + + Condition c1 = ConditionsTestUtil.and(employeesMatcher, fullyRollout); + Condition c2 = ConditionsTestUtil.and(salespeopleMatcher, turnOff); + + List conditions = Lists.newArrayList(c1, c2); + + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("first-name", conditions, 1); + ParsedRuleBasedSegment actual = parser.parse(ruleBasedSegment); + + ParsedCondition parsedCondition1 = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new UserDefinedSegmentMatcher(EMPLOYEES)), fullyRollout); + ParsedCondition parsedCondition2 = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new UserDefinedSegmentMatcher(EMPLOYEES)), turnOff); + List listOfParsedConditions = Lists.newArrayList(parsedCondition1, parsedCondition2); + + ParsedRuleBasedSegment expected = ParsedRuleBasedSegment.createParsedRuleBasedSegmentForTests ("first-name", listOfParsedConditions, "user", 1, + new ArrayList<>(), new ArrayList<>()); + + Assert.assertEquals(actual, expected); + } + + @Test + public void successForLongConditions() { + SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + segmentCache.updateSegment(EMPLOYEES, Stream.of("adil", "pato", "trevor").collect(Collectors.toList()), new ArrayList<>(), 1L); + segmentCache.updateSegment(SALES_PEOPLE, Stream.of("kunal").collect(Collectors.toList()), new ArrayList<>(), 1L); + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); + SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); + Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChangeEmployee); + + Matcher employeesMatcher = ConditionsTestUtil.userDefinedSegmentMatcher(EMPLOYEES, false); + + List conditions = Lists.newArrayList(); + List p1 = Lists.newArrayList(ConditionsTestUtil.partition("on", 100)); + for (int i = 0 ; i < CONDITIONS_UPPER_LIMIT+1 ; i++) { + Condition c = ConditionsTestUtil.and(employeesMatcher, p1); + conditions.add(c); + } + + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("first-name", conditions, 1); + + Assert.assertNotNull(parser.parse(ruleBasedSegment)); + } + + @Test + public void worksWithAttributes() { + SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + segmentCache.updateSegment(EMPLOYEES, Stream.of("adil", "pato", "trevor").collect(Collectors.toList()), new ArrayList<>(), 1L); + segmentCache.updateSegment(SALES_PEOPLE, Stream.of("kunal").collect(Collectors.toList()), new ArrayList<>(), 1L); + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); + SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); + SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); + Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChangeEmployee).thenReturn(segmentChangeSalesPeople); + + Matcher employeesMatcher = ConditionsTestUtil.userDefinedSegmentMatcher("user", "name", EMPLOYEES, false); + + Matcher creationDateNotOlderThanAPoint = ConditionsTestUtil.numericMatcher("user", "creation_date", + MatcherType.GREATER_THAN_OR_EQUAL_TO, + DataType.DATETIME, + 1457386741L, + true); + + Condition c = ConditionsTestUtil.and(employeesMatcher, creationDateNotOlderThanAPoint, null); + + List conditions = Lists.newArrayList(c); + + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("first-name", conditions, 1); + ParsedRuleBasedSegment actual = parser.parse(ruleBasedSegment); + + AttributeMatcher employeesMatcherLogic = new AttributeMatcher("name", new UserDefinedSegmentMatcher(EMPLOYEES), false); + AttributeMatcher creationDateNotOlderThanAPointLogic = new AttributeMatcher("creation_date", new GreaterThanOrEqualToMatcher(1457386741L, DataType.DATETIME), true); + CombiningMatcher combiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(employeesMatcherLogic, creationDateNotOlderThanAPointLogic)); + ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, null); + List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); + + ParsedRuleBasedSegment expected = ParsedRuleBasedSegment.createParsedRuleBasedSegmentForTests ("first-name", listOfMatcherAndSplits, "user", 1, + new ArrayList<>(), new ArrayList<>()); + + Assert.assertEquals(actual, expected); + } + + @Test + public void lessThanOrEqualTo() { + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); + SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); + SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); + Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChangeEmployee).thenReturn(segmentChangeSalesPeople); + + Matcher ageLessThan10 = ConditionsTestUtil.numericMatcher("user", "age", MatcherType.LESS_THAN_OR_EQUAL_TO, DataType.NUMBER, 10L, false); + Condition c = ConditionsTestUtil.and(ageLessThan10, null); + + List conditions = Lists.newArrayList(c); + + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("first-name", conditions, 1); + ParsedRuleBasedSegment actual = parser.parse(ruleBasedSegment); + + AttributeMatcher ageLessThan10Logic = new AttributeMatcher("age", new LessThanOrEqualToMatcher(10, DataType.NUMBER), false); + CombiningMatcher combiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(ageLessThan10Logic)); + ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, null); + List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); + + ParsedRuleBasedSegment expected = ParsedRuleBasedSegment.createParsedRuleBasedSegmentForTests ("first-name", listOfMatcherAndSplits, "user", 1, + new ArrayList<>(), new ArrayList<>()); + + Assert.assertEquals(actual, expected); + } + + @Test + public void equalTo() { + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); + SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); + SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); + Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChangeEmployee).thenReturn(segmentChangeSalesPeople); + + Matcher ageLessThan10 = ConditionsTestUtil.numericMatcher("user", "age", MatcherType.EQUAL_TO, DataType.NUMBER, 10L, true); + Condition c = ConditionsTestUtil.and(ageLessThan10, null); + List conditions = Lists.newArrayList(c); + + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("first-name", conditions, 1); + ParsedRuleBasedSegment actual = parser.parse(ruleBasedSegment); + + AttributeMatcher equalToMatcher = new AttributeMatcher("age", new EqualToMatcher(10, DataType.NUMBER), true); + CombiningMatcher combiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(equalToMatcher)); + ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, null); + List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); + + ParsedRuleBasedSegment expected = ParsedRuleBasedSegment.createParsedRuleBasedSegmentForTests ("first-name", listOfMatcherAndSplits, "user", 1, + new ArrayList<>(), new ArrayList<>()); + + Assert.assertEquals(actual, expected); + } + + @Test + public void equalToNegativeNumber() { + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); + SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); + SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); + Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChangeEmployee).thenReturn(segmentChangeSalesPeople); + + Matcher equalToNegative10 = ConditionsTestUtil.numericMatcher("user", "age", MatcherType.EQUAL_TO, DataType.NUMBER, -10L, false); + Condition c = ConditionsTestUtil.and(equalToNegative10, null); + List conditions = Lists.newArrayList(c); + + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("first-name", conditions, 1); + ParsedRuleBasedSegment actual = parser.parse(ruleBasedSegment); + + AttributeMatcher ageEqualTo10Logic = new AttributeMatcher("age", new EqualToMatcher(-10, DataType.NUMBER), false); + CombiningMatcher combiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(ageEqualTo10Logic)); + ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, null); + List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); + + ParsedRuleBasedSegment expected = ParsedRuleBasedSegment.createParsedRuleBasedSegmentForTests ("first-name", listOfMatcherAndSplits, "user", 1, + new ArrayList<>(), new ArrayList<>()); + + Assert.assertEquals(actual, expected); + } + + @Test + public void between() { + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); + SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); + SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); + Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChangeEmployee).thenReturn(segmentChangeSalesPeople); + + Matcher ageBetween10And11 = ConditionsTestUtil.betweenMatcher("user", + "age", + DataType.NUMBER, + 10, + 12, + false); + + Condition c = ConditionsTestUtil.and(ageBetween10And11, null); + List conditions = Lists.newArrayList(c); + + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("first-name", conditions, 1); + ParsedRuleBasedSegment actual = parser.parse(ruleBasedSegment); + + AttributeMatcher ageBetween10And11Logic = new AttributeMatcher("age", new BetweenMatcher(10, 12, DataType.NUMBER), false); + CombiningMatcher combiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(ageBetween10And11Logic)); + ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, null); + List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); + + ParsedRuleBasedSegment expected = ParsedRuleBasedSegment.createParsedRuleBasedSegmentForTests ("first-name", listOfMatcherAndSplits, "user", 1, + new ArrayList<>(), new ArrayList<>()); + + Assert.assertEquals(actual, expected); + } + + @Test + public void containsAnyOfSet() { + ArrayList set = Lists.newArrayList("sms", "voice"); + + Condition c = ConditionsTestUtil.containsAnyOfSet("user", + "products", + set, + false, + null + ); + + ContainsAnyOfSetMatcher m = new ContainsAnyOfSetMatcher(set); + setMatcherTest(c, m); + } + + @Test + public void containsAllOfSet() { + ArrayList set = Lists.newArrayList("sms", "voice"); + Condition c = ConditionsTestUtil.containsAllOfSet("user", + "products", + set, + false, + null + ); + + ContainsAllOfSetMatcher m = new ContainsAllOfSetMatcher(set); + setMatcherTest(c, m); + } + + @Test + public void equalToSet() { + ArrayList set = Lists.newArrayList("sms", "voice"); + Condition c = ConditionsTestUtil.equalToSet("user", + "products", + set, + false, + null + ); + + EqualToSetMatcher m = new EqualToSetMatcher(set); + setMatcherTest(c, m); + } + + @Test + public void isPartOfSet() { + ArrayList set = Lists.newArrayList("sms", "voice"); + Condition c = ConditionsTestUtil.isPartOfSet("user", + "products", + set, + false, + null + ); + + PartOfSetMatcher m = new PartOfSetMatcher(set); + setMatcherTest(c, m); + } + + @Test + public void startsWithString() { + ArrayList set = Lists.newArrayList("sms", "voice"); + Condition c = ConditionsTestUtil.startsWithString("user", + "products", + set, + false, + null + ); + + StartsWithAnyOfMatcher m = new StartsWithAnyOfMatcher(set); + setMatcherTest(c, m); + } + + @Test + public void endsWithString() { + ArrayList set = Lists.newArrayList("sms", "voice"); + Condition c = ConditionsTestUtil.endsWithString("user", + "products", + set, + false, + null + ); + + EndsWithAnyOfMatcher m = new EndsWithAnyOfMatcher(set); + setMatcherTest(c, m); + } + + + @Test + public void containsString() { + ArrayList set = Lists.newArrayList("sms", "voice"); + Condition c = ConditionsTestUtil.containsString("user", + "products", + set, + false, + null + ); + + ContainsAnyOfMatcher m = new ContainsAnyOfMatcher(set); + setMatcherTest(c, m); + } + + @Test + public void UnsupportedMatcher() { + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + String splitWithUndefinedMatcher = "{\"ff\":{\"s\":-1,\"t\":-1,\"d\":[]},\"rbs\":{\"s\":-1,\"t\":1457726098069,\"d\":[{ \"changeNumber\": 123, \"trafficTypeName\": \"user\", \"name\": \"some_name\"," + + "\"status\": \"ACTIVE\",\"conditions\": [{\"contitionType\": \"ROLLOUT\"," + + "\"label\": \"some_label\", \"matcherGroup\": { \"matchers\": [{ \"matcherType\": \"UNKNOWN\", \"negate\": false}]," + + "\"combiner\": \"AND\"}}],\"excluded\":{\"keys\":[],\"segments\":[]}}]}}"; + SplitChange change = Json.fromJson(splitWithUndefinedMatcher, SplitChange.class); + for (RuleBasedSegment ruleBasedSegment : change.ruleBasedSegments.d) { + // should not cause exception + ParsedRuleBasedSegment parsedRuleBasedSegment = parser.parse(ruleBasedSegment); + for (ParsedCondition parsedCondition : parsedRuleBasedSegment.parsedConditions()) { + assertTrue(parsedCondition.label() == Labels.UNSUPPORTED_MATCHER); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" in segment all")); + } + } + } + } + + @Test + public void EqualToSemverMatcher() throws IOException { + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + for (RuleBasedSegment ruleBasedSegment : change.ruleBasedSegments.d) { + // should not cause exception + ParsedRuleBasedSegment parsedRuleBasedSegment = parser.parse(ruleBasedSegment); + if (ruleBasedSegment.name.equals("rbs_semver_equalto")) { + for (ParsedCondition parsedCondition : parsedRuleBasedSegment.parsedConditions()) { + assertTrue(parsedCondition.label().equals("equal to semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" == semver 1\\.22\\.9")); + return; + } + } + } + } + assertTrue(false); + } + + @Test + public void GreaterThanOrEqualSemverMatcher() throws IOException { + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + for (RuleBasedSegment ruleBasedSegment : change.ruleBasedSegments.d) { + // should not cause exception + ParsedRuleBasedSegment parsedRuleBasedSegment = parser.parse(ruleBasedSegment); + if (ruleBasedSegment.name.equals("rbs_semver_greater_or_equalto")) { + for (ParsedCondition parsedCondition : parsedRuleBasedSegment.parsedConditions()) { + assertTrue(parsedCondition.label().equals("greater than or equal to semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" >= semver 1\\.22\\.9")); + return; + } + } + } + } + assertTrue(false); + } + + @Test + public void LessThanOrEqualSemverMatcher() throws IOException { + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + for (RuleBasedSegment ruleBasedSegment : change.ruleBasedSegments.d) { + // should not cause exception + ParsedRuleBasedSegment parsedRuleBasedSegment = parser.parse(ruleBasedSegment); + if (ruleBasedSegment.name.equals("rbs_semver_less_or_equalto")) { + for (ParsedCondition parsedCondition : parsedRuleBasedSegment.parsedConditions()) { + assertTrue(parsedCondition.label().equals("less than or equal to semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" <= semver 1\\.22\\.9")); + return; + } + } + } + } + assertTrue(false); + } + + @Test + public void BetweenSemverMatcher() throws IOException { + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + RuleBasedSegmentsToUpdate ruleBasedSegmentsToUpdate = processRuleBasedSegmentChanges(parser, change.ruleBasedSegments.d); + for (ParsedRuleBasedSegment parsedRuleBasedSegment : ruleBasedSegmentsToUpdate.getToAdd()) { + // should not cause exception + if (parsedRuleBasedSegment.ruleBasedSegment().equals("rbs_semver_between")) { + for (ParsedCondition parsedCondition : parsedRuleBasedSegment.parsedConditions()) { + assertTrue(parsedCondition.label().equals("between semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" between semver 1\\.22\\.9 and 2\\.1\\.0")); + return; + } + } + } + } + assertTrue(false); + } + + @Test + public void InListSemverMatcher() throws IOException { + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + for (RuleBasedSegment ruleBasedSegment : change.ruleBasedSegments.d) { + // should not cause exception + ParsedRuleBasedSegment parsedRuleBasedSegment = parser.parse(ruleBasedSegment); + if (ruleBasedSegment.name.equals("rbs_semver_inlist")) { + for (ParsedCondition parsedCondition : parsedRuleBasedSegment.parsedConditions()) { + assertTrue(parsedCondition.label().equals("in list semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().startsWith(" in semver list")); + return; + } + } + } + } + assertTrue(false); + } + + public void setMatcherTest(Condition c, io.split.engine.matchers.Matcher m) { + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); + SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); + SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); + Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChangeEmployee).thenReturn(segmentChangeSalesPeople); + + ArrayList set = Lists.newArrayList("sms", "voice"); + List conditions = Lists.newArrayList(c); + + RuleBasedSegmentParser parser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("first-name", conditions, 1); + ParsedRuleBasedSegment actual = parser.parse(ruleBasedSegment); + + AttributeMatcher attrMatcher = new AttributeMatcher("products", m, false); + CombiningMatcher combiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(attrMatcher)); + ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, null); + List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); + + ParsedRuleBasedSegment expected = ParsedRuleBasedSegment.createParsedRuleBasedSegmentForTests ("first-name", listOfMatcherAndSplits, "user", 1, + new ArrayList<>(), new ArrayList<>()); + + Assert.assertEquals(actual, expected); + } + + private SegmentChange getSegmentChange(long since, long till, String segmentName){ + SegmentChange segmentChange = new SegmentChange(); + segmentChange.name = segmentName; + segmentChange.since = since; + segmentChange.till = till; + segmentChange.added = new ArrayList<>(); + segmentChange.removed = new ArrayList<>(); + return segmentChange; + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/experiments/SplitFetcherImpTest.java b/client/src/test/java/io/split/engine/experiments/SplitFetcherImpTest.java new file mode 100644 index 000000000..78b6eac66 --- /dev/null +++ b/client/src/test/java/io/split/engine/experiments/SplitFetcherImpTest.java @@ -0,0 +1,243 @@ +package io.split.engine.experiments; + +import io.split.SplitMockServer; +import io.split.client.*; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; +import io.split.client.interceptors.GzipDecoderResponseInterceptor; +import io.split.client.interceptors.GzipEncoderRequestInterceptor; +import io.split.client.utils.*; +import io.split.engine.common.FetchOptions; +import io.split.service.SplitHttpClient; +import io.split.service.SplitHttpClientImpl; +import io.split.storages.RuleBasedSegmentCache; +import io.split.storages.RuleBasedSegmentCacheProducer; +import io.split.storages.SplitCache; +import io.split.storages.SplitCacheProducer; +import io.split.storages.memory.InMemoryCacheImp; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; +import io.split.telemetry.storage.InMemoryTelemetryStorage; +import io.split.telemetry.storage.NoopTelemetryStorage; +import io.split.telemetry.storage.TelemetryStorage; +import io.split.telemetry.storage.TelemetryStorageProducer; +import okhttp3.mockwebserver.MockResponse; +import org.apache.hc.client5.http.config.RequestConfig; +import org.apache.hc.client5.http.cookie.StandardCookieSpec; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; +import org.apache.hc.client5.http.impl.classic.HttpClients; +import org.apache.hc.core5.util.Timeout; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; + +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.util.*; + +public class SplitFetcherImpTest { + + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + private static final TelemetryStorage TELEMETRY_STORAGE_NOOP = Mockito.mock(NoopTelemetryStorage.class); + private static final String TEST_FLAG_SETS = "{\"ff\":{\"d\":[{\"trafficTypeName\":\"client\",\"name\":\"workm\",\"trafficAllocation\":100,\"trafficAllocationSeed\":147392224,\"seed\":524417105,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"on\",\"changeNumber\":1602796638344,\"algo\":2,\"configurations\":{},\"sets\":[\"set_1\",\"set_2\"],\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"new_segment\"},\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":100},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"in segment new_segment\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":100},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":0},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"default rule\"}]},{\"trafficTypeName\":\"client\",\"name\":\"workm_set_3\",\"trafficAllocation\":100,\"trafficAllocationSeed\":147392224,\"seed\":524417105,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"on\",\"changeNumber\":1602796638344,\"algo\":2,\"configurations\":{},\"sets\":[\"set_3\"],\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"new_segment\"},\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":100},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"in segment new_segment\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"client\",\"attribute\":null},\"matcherType\":\"ALL_KEYS\",\"negate\":false,\"userDefinedSegmentMatcherData\":null,\"whitelistMatcherData\":null,\"unaryNumericMatcherData\":null,\"betweenMatcherData\":null,\"booleanMatcherData\":null,\"dependencyMatcherData\":null,\"stringMatcherData\":null}]},\"partitions\":[{\"treatment\":\"on\",\"size\":100},{\"treatment\":\"off\",\"size\":0},{\"treatment\":\"free\",\"size\":0},{\"treatment\":\"conta\",\"size\":0}],\"label\":\"default rule\"}]}],\"s\":-1,\"t\":1602796638344},\"rbs\":{\"d\":[],\"t\":-1,\"s\":-1}}"; + + @Test + public void testFetchingSplitsAndRuleBasedSegments() throws Exception { + MockResponse response = new MockResponse().setBody("{" + + "\"ff\":{" + + "\"t\":1675095324253," + + "\"s\":-1," + + "\"d\": [{" + + "\"changeNumber\": 123," + + "\"trafficTypeName\": \"user\"," + + "\"name\": \"some_name\"," + + "\"trafficAllocation\": 100," + + "\"trafficAllocationSeed\": 123456," + + "\"seed\": 321654," + + "\"status\": \"ACTIVE\"," + + "\"killed\": false," + + "\"defaultTreatment\": \"off\"," + + "\"algo\": 2," + + "\"conditions\": [{" + + "\"partitions\": [{\"treatment\": \"on\", \"size\": 50},{\"treatment\": \"off\", \"size\": 50}]," + + "\"contitionType\": \"WHITELIST\"," + + "\"label\": \"some_label\"," + + "\"matcherGroup\": {" + + "\"matchers\": [{\"matcherType\": \"WHITELIST\",\"whitelistMatcherData\": {\"whitelist\": [\"k1\", \"k2\", \"k3\"]},\"negate\": false}]," + + "\"combiner\": \"AND\"}" + + "},{" + + "\"conditionType\": \"ROLLOUT\"," + + "\"matcherGroup\": {\"combiner\": \"AND\"," + + "\"matchers\": [{\"keySelector\": {\"trafficType\": \"user\"},\"matcherType\": \"IN_RULE_BASED_SEGMENT\",\"negate\": false,\"userDefinedSegmentMatcherData\": {\"segmentName\": \"sample_rule_based_segment\"}}]" + + "}," + + "\"partitions\": [{\"treatment\": \"on\",\"size\": 100},{\"treatment\": \"off\",\"size\": 0}]," + + "\"label\": \"in rule based segment sample_rule_based_segment\"" + + "}]," + + "\"sets\": [\"set1\", \"set2\"]}]" + + "}," + + "\"rbs\": {" + + "\"t\": 1585948850111," + + "\"s\": -1," + + "\"d\": [" + + "{" + + "\"changeNumber\": 5," + + "\"name\": \"sample_rule_based_segment\"," + + "\"status\": \"ACTIVE\"," + + "\"trafficTypeName\": \"user\"," + + "\"excluded\":{" + + "\"keys\":[\"mauro@split.io\",\"gaston@split.io\"]," + + "\"segments\":[]" + + "}," + + "\"conditions\": [" + + "{" + + "\"matcherGroup\": {" + + "\"combiner\": \"AND\"," + + "\"matchers\": [" + + "{" + + "\"keySelector\": {" + + "\"trafficType\": \"user\"," + + "\"attribute\": \"email\"" + + "}," + + "\"matcherType\": \"ENDS_WITH\"," + + "\"negate\": false," + + "\"whitelistMatcherData\": {" + + "\"whitelist\": [" + + "\"@split.io\"" + + "]}}]}}]}]}}"); + MockResponse response2 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1675095324253, \"t\":1685095324253}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); + MockResponse response3 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1685095324253, \"t\":1695095324253}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); + MockResponse response4 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1695095324253, \"t\":1775095324253}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); + MockResponse response5 = new MockResponse().setBody("{\"ff\":{\"d\": [], \"s\":1775095324253, \"t\":1775095324253}, \"rbs\":{\"d\":[],\"s\":1585948850111,\"t\":1585948850111}}"); + Queue responses = new LinkedList<>(); + responses.add(response); + Queue responses2 = new LinkedList<>(); + responses2.add(response2); + Queue responses3 = new LinkedList<>(); + responses3.add(response3); + Queue responses4 = new LinkedList<>(); + responses4.add(response4); + Queue responses5 = new LinkedList<>(); + responses5.add(response5); + SplitMockServer splitServer = new SplitMockServer(CustomDispatcher2.builder() + .path(CustomDispatcher2.SPLIT_FETCHER_1, responses) + .path(CustomDispatcher2.SPLIT_FETCHER_2, responses2) + .path(CustomDispatcher2.SPLIT_FETCHER_3, responses3) + .path(CustomDispatcher2.SPLIT_FETCHER_4, responses4) + .path(CustomDispatcher2.SPLIT_FETCHER_5, responses5) + .build()); + splitServer.start(); + + SplitClientConfig config = SplitClientConfig.builder() + .setBlockUntilReadyTimeout(10000) + .endpoint(splitServer.getUrl(), splitServer.getUrl()) + .featuresRefreshRate(20) + .segmentsRefreshRate(30) + .streamingEnabled(false) + .build(); + + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(config.getSetsFilter()); + SplitCache splitCache = new InMemoryCacheImp(flagSetsFilter); + RequestDecorator _requestDecorator = new RequestDecorator(config.customHeaderDecorator()); + SDKMetadata _sdkMetadata = new SDKMetadata("1.1.1", "ip", "machineName"); + RequestConfig requestConfig = RequestConfig.custom() + .setConnectTimeout(Timeout.ofMilliseconds(config.connectionTimeout())) + .setCookieSpec(StandardCookieSpec.STRICT) + .build(); + TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); + TelemetryStorageProducer _telemetryStorageProducer = telemetryStorage; + + HttpClientBuilder httpClientbuilder = HttpClients.custom() + .setDefaultRequestConfig(requestConfig) + .addRequestInterceptorLast(new GzipEncoderRequestInterceptor()) + .addResponseInterceptorLast((new GzipDecoderResponseInterceptor())); + SplitHttpClient _splitHttpClient = SplitHttpClientImpl.create(httpClientbuilder.build(), + _requestDecorator, + "apiToken", + _sdkMetadata); + URI _rootTarget = URI.create(config.endpoint()); + SplitChangeFetcher splitChangeFetcher = HttpSplitChangeFetcher.create(_splitHttpClient, _rootTarget, + _telemetryStorageProducer, config.isSdkEndpointOverridden()); + SplitFetcherImp splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCache, _telemetryStorageProducer, + flagSetsFilter, ruleBasedSegmentParser, ruleBasedSegmentCache); + + splitFetcher.forceRefresh(new FetchOptions.Builder().cacheControlHeaders(false).build()); + splitServer.stop(); + Assert.assertEquals("some_name", splitCache.get("some_name").feature()); + Assert.assertEquals("sample_rule_based_segment", ruleBasedSegmentCache.get("sample_rule_based_segment").ruleBasedSegment()); + } + + @Test + public void testLocalHost() { + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>()); + SplitCacheProducer splitCacheProducer = new InMemoryCacheImp(flagSetsFilter); + RuleBasedSegmentCacheProducer ruleBasedSegmentCacheProducer = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + + InputStreamProvider inputStreamProvider = new FileInputStreamProvider("src/test/resources/split_init.json"); + SplitChangeFetcher splitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + SplitParser splitParser = new SplitParser(); + FetchOptions fetchOptions = new FetchOptions.Builder().build(); + SplitFetcher splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, TELEMETRY_STORAGE_NOOP, flagSetsFilter, + ruleBasedSegmentParser, ruleBasedSegmentCacheProducer); + + FetchResult fetchResult = splitFetcher.forceRefresh(fetchOptions); + + Assert.assertEquals(2, fetchResult.getSegments().size()); + } + + @Test + public void testLocalHostFlagSets() throws IOException { + File file = folder.newFile("test_0.json"); + + byte[] test = TEST_FLAG_SETS.getBytes(); + com.google.common.io.Files.write(test, file); + + InputStreamProvider inputStreamProvider = new FileInputStreamProvider(file.getAbsolutePath()); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("set_1"))); + SplitCacheProducer splitCacheProducer = new InMemoryCacheImp(flagSetsFilter); + RuleBasedSegmentCacheProducer ruleBasedSegmentCacheProducer = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + + SplitChangeFetcher splitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + SplitParser splitParser = new SplitParser(); + FetchOptions fetchOptions = new FetchOptions.Builder().build(); + SplitFetcher splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, TELEMETRY_STORAGE_NOOP, flagSetsFilter, + ruleBasedSegmentParser, ruleBasedSegmentCacheProducer); + + FetchResult fetchResult = splitFetcher.forceRefresh(fetchOptions); + + Assert.assertEquals(1, fetchResult.getSegments().size()); + } + + @Test + public void testLocalHostFlagSetsNotIntersect() throws IOException { + File file = folder.newFile("test_0.json"); + + byte[] test = TEST_FLAG_SETS.getBytes(); + com.google.common.io.Files.write(test, file); + + InputStreamProvider inputStreamProvider = new FileInputStreamProvider(file.getAbsolutePath()); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("set_4"))); + SplitCacheProducer splitCacheProducer = new InMemoryCacheImp(flagSetsFilter); + RuleBasedSegmentCacheProducer ruleBasedSegmentCacheProducer = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + + SplitChangeFetcher splitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + SplitParser splitParser = new SplitParser(); + FetchOptions fetchOptions = new FetchOptions.Builder().build(); + SplitFetcher splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, TELEMETRY_STORAGE_NOOP, flagSetsFilter, + ruleBasedSegmentParser, ruleBasedSegmentCacheProducer); + + FetchResult fetchResult = splitFetcher.forceRefresh(fetchOptions); + + Assert.assertEquals(0, fetchResult.getSegments().size()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/experiments/SplitFetcherTest.java b/client/src/test/java/io/split/engine/experiments/SplitFetcherTest.java index 368fdf172..a6c2468ab 100644 --- a/client/src/test/java/io/split/engine/experiments/SplitFetcherTest.java +++ b/client/src/test/java/io/split/engine/experiments/SplitFetcherTest.java @@ -1,14 +1,14 @@ package io.split.engine.experiments; import com.google.common.collect.Lists; -import io.split.storages.SegmentCacheConsumer; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; +import io.split.storages.*; import io.split.storages.memory.InMemoryCacheImp; -import io.split.storages.SegmentCache; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; import io.split.storages.memory.SegmentCacheInMemoryImpl; -import io.split.storages.SplitCache; import io.split.client.dtos.*; import io.split.engine.ConditionsTestUtil; -import io.split.engine.SDKReadinessGates; import io.split.engine.common.FetchOptions; import io.split.engine.matchers.AllKeysMatcher; import io.split.engine.matchers.CombiningMatcher; @@ -28,6 +28,8 @@ import org.slf4j.LoggerFactory; import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -38,6 +40,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.*; import static org.mockito.Mockito.mock; @@ -49,29 +52,33 @@ public class SplitFetcherTest { private static final Logger _log = LoggerFactory.getLogger(SplitFetcherTest.class); private static final TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); + private static final FlagSetsFilter FLAG_SETS_FILTER = new FlagSetsFilterImpl(new HashSet<>()); @Test @Ignore //This test is ignore since is deprecated. We can review this in a future. - public void works_when_we_start_without_any_state() throws InterruptedException { + public void worksWhenWeStartWithoutAnyState() throws InterruptedException { works(0); } @Test @Ignore //This test is ignore since is deprecated. We can review this in a future. - public void works_when_we_start_with_any_state() throws InterruptedException { + public void worksWhenWeStartWithAnyState() throws InterruptedException { works(11L); } private void works(long startingChangeNumber) throws InterruptedException { AChangePerCallSplitChangeFetcher splitChangeFetcher = new AChangePerCallSplitChangeFetcher(); - SplitCache cache = new InMemoryCacheImp(startingChangeNumber); - SplitFetcherImp fetcher = new SplitFetcherImp(splitChangeFetcher, new SplitParser(), cache, cache, TELEMETRY_STORAGE); + SplitCache cache = new InMemoryCacheImp(startingChangeNumber, FLAG_SETS_FILTER); + RuleBasedSegmentCacheProducer ruleBasedSegmentCacheProducer = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + + SplitFetcherImp fetcher = new SplitFetcherImp(splitChangeFetcher, new SplitParser(), cache, TELEMETRY_STORAGE, FLAG_SETS_FILTER, + ruleBasedSegmentParser, ruleBasedSegmentCacheProducer); // execute the fetcher for a little bit. executeWaitAndTerminate(fetcher, 1, 3, TimeUnit.SECONDS); assertThat(splitChangeFetcher.lastAdded(), is(greaterThan(startingChangeNumber))); -// assertThat(cache.getChangeNumber(), is(equalTo(splitChangeFetcher.lastAdded()))); // all previous splits have been removed since they are dead for (long i = startingChangeNumber; i < cache.getChangeNumber(); i++) { @@ -81,7 +88,7 @@ private void works(long startingChangeNumber) throws InterruptedException { ParsedCondition expectedParsedCondition = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new AllKeysMatcher()), Lists.newArrayList(ConditionsTestUtil.partition("on", 10))); List expectedListOfMatcherAndSplits = Lists.newArrayList(expectedParsedCondition); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("" + cache.getChangeNumber(), (int) cache.getChangeNumber(), false, Treatments.OFF, expectedListOfMatcherAndSplits, null, cache.getChangeNumber(), 1); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("" + cache.getChangeNumber(), (int) cache.getChangeNumber(), false, Treatments.OFF, expectedListOfMatcherAndSplits, null, cache.getChangeNumber(), 1, new HashSet<>(), true, null); ParsedSplit actual = cache.get("" + cache.getChangeNumber()); Thread.sleep(1000); @@ -89,8 +96,7 @@ private void works(long startingChangeNumber) throws InterruptedException { } @Test - public void when_parser_fails_we_remove_the_experiment() throws InterruptedException { - SDKReadinessGates gates = new SDKReadinessGates(); + public void whenParserFailsWeRemoveTheExperiment() throws InterruptedException { Split validSplit = new Split(); validSplit.status = Status.ACTIVE; validSplit.seed = (int) -1; @@ -99,9 +105,14 @@ public void when_parser_fails_we_remove_the_experiment() throws InterruptedExcep validSplit.name = "-1"; SplitChange validReturn = new SplitChange(); - validReturn.splits = Lists.newArrayList(validSplit); - validReturn.since = -1L; - validReturn.till = 0L; + validReturn.featureFlags = new ChangeDto<>(); + validReturn.featureFlags.d = Lists.newArrayList(validSplit); + validReturn.featureFlags.s = -1L; + validReturn.featureFlags.t = 0L; + validReturn.ruleBasedSegments = new ChangeDto<>(); + validReturn.ruleBasedSegments.t = -1; + validReturn.ruleBasedSegments.s = -1; + validReturn.ruleBasedSegments.d = new ArrayList<>(); MatcherGroup invalidMatcherGroup = new MatcherGroup(); invalidMatcherGroup.matchers = Lists.newArrayList(); @@ -118,54 +129,128 @@ public void when_parser_fails_we_remove_the_experiment() throws InterruptedExcep invalidSplit.name = "-1"; SplitChange invalidReturn = new SplitChange(); - invalidReturn.splits = Lists.newArrayList(invalidSplit); - invalidReturn.since = 0L; - invalidReturn.till = 1L; + invalidReturn.featureFlags = new ChangeDto<>(); + invalidReturn.featureFlags.d = Lists.newArrayList(invalidSplit); + invalidReturn.featureFlags.s = 0L; + invalidReturn.featureFlags.t = 1L; + invalidReturn.ruleBasedSegments = new ChangeDto<>(); + invalidReturn.ruleBasedSegments.t = -1; + invalidReturn.ruleBasedSegments.s = -1; + invalidReturn.ruleBasedSegments.d = new ArrayList<>(); SplitChange noReturn = new SplitChange(); - noReturn.splits = Lists.newArrayList(); - noReturn.since = 1L; - noReturn.till = 1L; + noReturn.featureFlags = new ChangeDto<>(); + noReturn.featureFlags.d = Lists.newArrayList(); + noReturn.featureFlags.s = 1L; + noReturn.featureFlags.t = 1L; + noReturn.ruleBasedSegments = new ChangeDto<>(); + noReturn.ruleBasedSegments.t = -1; + noReturn.ruleBasedSegments.s = -1; + noReturn.ruleBasedSegments.d = new ArrayList<>(); SplitChangeFetcher splitChangeFetcher = mock(SplitChangeFetcher.class); - when(splitChangeFetcher.fetch(Mockito.eq(-1L), Mockito.any())).thenReturn(validReturn); - when(splitChangeFetcher.fetch(Mockito.eq(0L), Mockito.any())).thenReturn(invalidReturn); - when(splitChangeFetcher.fetch(Mockito.eq(1L), Mockito.any())).thenReturn(noReturn); + when(splitChangeFetcher.fetch(Mockito.eq(-1L), Mockito.eq(-1L), Mockito.any())).thenReturn(validReturn); + when(splitChangeFetcher.fetch(Mockito.eq(0L), Mockito.eq(-1L), Mockito.any())).thenReturn(invalidReturn); + when(splitChangeFetcher.fetch(Mockito.eq(1L), Mockito.eq(-1L), Mockito.any())).thenReturn(noReturn); SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); - SplitCache cache = new InMemoryCacheImp(-1); + SplitCache cache = new InMemoryCacheImp(-1, FLAG_SETS_FILTER); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); SegmentChangeFetcher segmentChangeFetcher = mock(SegmentChangeFetcher.class); - SegmentSynchronizationTask segmentSynchronizationTask = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1,10, gates, segmentCache, TELEMETRY_STORAGE, cache); - segmentSynchronizationTask.startPeriodicFetching(); - SplitFetcherImp fetcher = new SplitFetcherImp(splitChangeFetcher, new SplitParser(), cache, cache, TELEMETRY_STORAGE); + SegmentSynchronizationTask segmentSynchronizationTask = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1,10, segmentCache, TELEMETRY_STORAGE, cache, null, ruleBasedSegmentCache); + segmentSynchronizationTask.start(); + SplitFetcherImp fetcher = new SplitFetcherImp(splitChangeFetcher, new SplitParser(), cache, TELEMETRY_STORAGE, FLAG_SETS_FILTER, + ruleBasedSegmentParser, ruleBasedSegmentCache); + // execute the fetcher for a little bit. executeWaitAndTerminate(fetcher, 1, 5, TimeUnit.SECONDS); - assertThat(cache.getChangeNumber(), is(equalTo(1L))); + assertEquals(1L, cache.getChangeNumber()); // verify that the fetcher return null - assertThat(cache.get("-1"), is(nullValue())); + Assert.assertNull(cache.get("-1")); } @Test - public void if_there_is_a_problem_talking_to_split_change_count_down_latch_is_not_decremented() throws Exception { - SDKReadinessGates gates = new SDKReadinessGates(); - SplitCache cache = new InMemoryCacheImp(-1); + public void ifThereIsAProblemTalkingToSplitChangeCountDownLatchIsNotDecremented() throws Exception { + SplitCache cache = new InMemoryCacheImp(-1, FLAG_SETS_FILTER); SplitChangeFetcher splitChangeFetcher = mock(SplitChangeFetcher.class); - when(splitChangeFetcher.fetch(-1L, new FetchOptions.Builder().build())).thenThrow(new RuntimeException()); + when(splitChangeFetcher.fetch(-1L, -1, new FetchOptions.Builder().build())).thenThrow(new RuntimeException()); SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); SegmentChangeFetcher segmentChangeFetcher = mock(SegmentChangeFetcher.class); - SegmentSynchronizationTask segmentSynchronizationTask = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1,10, gates, segmentCache, TELEMETRY_STORAGE, cache); - segmentSynchronizationTask.startPeriodicFetching(); - SplitFetcherImp fetcher = new SplitFetcherImp(splitChangeFetcher, new SplitParser(),cache, cache, TELEMETRY_STORAGE); + SegmentSynchronizationTask segmentSynchronizationTask = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1,10, segmentCache, TELEMETRY_STORAGE, cache, null, ruleBasedSegmentCache); + segmentSynchronizationTask.start(); + SplitFetcherImp fetcher = new SplitFetcherImp(splitChangeFetcher, new SplitParser(), cache, TELEMETRY_STORAGE, FLAG_SETS_FILTER, + ruleBasedSegmentParser, ruleBasedSegmentCache); // execute the fetcher for a little bit. executeWaitAndTerminate(fetcher, 1, 5, TimeUnit.SECONDS); - assertThat(cache.getChangeNumber(), is(equalTo(-1L))); + Assert.assertEquals(-1L, cache.getChangeNumber()); + } + + @Test + public void addFeatureFlags() throws InterruptedException { + SplitCache cache = new InMemoryCacheImp(-1, new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("set_1", "set_2")))); + + Split featureFlag1 = new Split(); + featureFlag1.status = Status.ACTIVE; + featureFlag1.seed = (int) -1; + featureFlag1.conditions = Lists.newArrayList(ConditionsTestUtil.makeAllKeysCondition(Lists.newArrayList(ConditionsTestUtil.partition("on", 10)))); + featureFlag1.defaultTreatment = Treatments.OFF; + featureFlag1.name = "feature_flag"; + featureFlag1.sets = new HashSet<>(Arrays.asList("set_1", "set_2")); + featureFlag1.trafficAllocation = 100; + featureFlag1.trafficAllocationSeed = 147392224; + + SplitChange validReturn = new SplitChange(); + validReturn.featureFlags = new ChangeDto<>(); + validReturn.featureFlags.d = Lists.newArrayList(featureFlag1); + validReturn.featureFlags.s = -1L; + validReturn.featureFlags.t = 0L; + validReturn.ruleBasedSegments = new ChangeDto<>(); + validReturn.ruleBasedSegments.t = -1; + validReturn.ruleBasedSegments.s = -1; + validReturn.ruleBasedSegments.d = new ArrayList<>(); + + SplitChangeFetcher splitChangeFetcher = mock(SplitChangeFetcher.class); + when(splitChangeFetcher.fetch(Mockito.eq(-1L), Mockito.eq(-1L), Mockito.any())).thenReturn(validReturn); + + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("set_1", "set_2"))); + SplitFetcherImp fetcher = new SplitFetcherImp(splitChangeFetcher, new SplitParser(), cache, TELEMETRY_STORAGE, flagSetsFilter, + ruleBasedSegmentParser, ruleBasedSegmentCache); + + executeWaitAndTerminate(fetcher, 1, 5, TimeUnit.SECONDS); + + Assert.assertTrue(cache.getNamesByFlagSets(Arrays.asList("set_1", "set_2")).get("set_1").contains("feature_flag")); + Assert.assertTrue(cache.getNamesByFlagSets(Arrays.asList("set_1", "set_2")).get("set_2").contains("feature_flag")); + + featureFlag1.sets.remove("set_2"); + + validReturn = new SplitChange(); + validReturn.featureFlags = new ChangeDto<>(); + validReturn.featureFlags.d = Lists.newArrayList(featureFlag1); + validReturn.featureFlags.s = 0L; + validReturn.featureFlags.t = 1L; + validReturn.ruleBasedSegments = new ChangeDto<>(); + validReturn.ruleBasedSegments.t = -1; + validReturn.ruleBasedSegments.s = -1; + validReturn.ruleBasedSegments.d = new ArrayList<>(); + + when(splitChangeFetcher.fetch(Mockito.eq(0L), Mockito.eq(-1L), Mockito.any())).thenReturn(validReturn); + + executeWaitAndTerminate(fetcher, 1, 5, TimeUnit.SECONDS); + + Assert.assertTrue(cache.getNamesByFlagSets(Arrays.asList("set_1", "set_2")).get("set_1").contains("feature_flag")); + Assert.assertFalse(cache.getNamesByFlagSets(Arrays.asList("set_1", "set_2")).get("set_2").contains("feature_flag")); } private void executeWaitAndTerminate(Runnable runnable, long frequency, long waitInBetween, TimeUnit unit) throws InterruptedException { @@ -189,20 +274,22 @@ private void executeWaitAndTerminate(Runnable runnable, long frequency, long wai @Test @Ignore //This test is ignore since is deprecated. We can review this in a future. - public void works_with_user_defined_segments() throws Exception { + public void worksWithUserDefinedSegments() throws Exception { long startingChangeNumber = -1; String segmentName = "foosegment"; AChangePerCallSplitChangeFetcher experimentChangeFetcher = new AChangePerCallSplitChangeFetcher(segmentName); - SDKReadinessGates gates = new SDKReadinessGates(); - SplitCache cache = new InMemoryCacheImp(startingChangeNumber); + SplitCache cache = new InMemoryCacheImp(startingChangeNumber, FLAG_SETS_FILTER); SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); SegmentChangeFetcher segmentChangeFetcher = mock(SegmentChangeFetcher.class); SegmentChange segmentChange = getSegmentChange(0L, 0L, segmentName); when(segmentChangeFetcher.fetch(anyString(), anyLong(), any())).thenReturn(segmentChange); - SegmentSynchronizationTask segmentSynchronizationTask = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1,10, gates, segmentCache, Mockito.mock(TelemetryStorage.class), cache); - segmentSynchronizationTask.startPeriodicFetching(); - SplitFetcherImp fetcher = new SplitFetcherImp(experimentChangeFetcher, new SplitParser(), cache, cache, TELEMETRY_STORAGE); + SegmentSynchronizationTask segmentSynchronizationTask = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1,10, segmentCache, Mockito.mock(TelemetryStorage.class), cache, null, ruleBasedSegmentCache); + segmentSynchronizationTask.start(); + SplitFetcherImp fetcher = new SplitFetcherImp(experimentChangeFetcher, new SplitParser(), cache, TELEMETRY_STORAGE, FLAG_SETS_FILTER, + ruleBasedSegmentParser, ruleBasedSegmentCache); // execute the fetcher for a little bit. executeWaitAndTerminate(fetcher, 1, 5, TimeUnit.SECONDS); @@ -220,30 +307,39 @@ public void works_with_user_defined_segments() throws Exception { @Test public void testBypassCdnClearedAfterFirstHit() { SplitChangeFetcher mockFetcher = Mockito.mock(SplitChangeFetcher.class); - SegmentSynchronizationTask segmentSynchronizationTaskMock = Mockito.mock(SegmentSynchronizationTask.class); - SegmentCacheConsumer segmentCacheMock = Mockito.mock(SegmentCacheConsumer.class); SplitParser mockParser = new SplitParser(); - SDKReadinessGates mockGates = Mockito.mock(SDKReadinessGates.class); - SplitCache mockCache = new InMemoryCacheImp(); - SplitFetcherImp fetcher = new SplitFetcherImp(mockFetcher, mockParser, mockCache, mockCache, Mockito.mock(TelemetryRuntimeProducer.class)); - + SplitCache mockCache = new InMemoryCacheImp(FLAG_SETS_FILTER); + RuleBasedSegmentCacheProducer ruleBasedSegmentCacheProducer = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + SplitFetcherImp fetcher = new SplitFetcherImp(mockFetcher, mockParser, mockCache, Mockito.mock(TelemetryRuntimeProducer.class), FLAG_SETS_FILTER, + ruleBasedSegmentParser, ruleBasedSegmentCacheProducer); SplitChange response1 = new SplitChange(); - response1.splits = new ArrayList<>(); - response1.since = -1; - response1.till = 1; + response1.featureFlags = new ChangeDto<>(); + response1.featureFlags.d = new ArrayList<>(); + response1.featureFlags.s = -1; + response1.featureFlags.t = 1; + response1.ruleBasedSegments = new ChangeDto<>(); + response1.ruleBasedSegments.t = -1; + response1.ruleBasedSegments.s = -1; + response1.ruleBasedSegments.d = new ArrayList<>(); SplitChange response2 = new SplitChange(); - response2.splits = new ArrayList<>(); - response2.since = 1; - response2.till = 1; - + response2.featureFlags = new ChangeDto<>(); + response2.featureFlags.d = new ArrayList<>(); + response2.featureFlags.s = 1; + response2.featureFlags.t = 1; + response2.ruleBasedSegments = new ChangeDto<>(); + response2.ruleBasedSegments.t = -1; + response2.ruleBasedSegments.s = -1; + response2.ruleBasedSegments.d = new ArrayList<>(); ArgumentCaptor optionsCaptor = ArgumentCaptor.forClass(FetchOptions.class); ArgumentCaptor cnCaptor = ArgumentCaptor.forClass(Long.class); - when(mockFetcher.fetch(cnCaptor.capture(), optionsCaptor.capture())).thenReturn(response1, response2); + ArgumentCaptor rbsCnCaptor = ArgumentCaptor.forClass(Long.class); + when(mockFetcher.fetch(cnCaptor.capture(), rbsCnCaptor.capture(), optionsCaptor.capture())).thenReturn(response1, response2); - FetchOptions originalOptions = new FetchOptions.Builder().targetChangeNumber(123).build(); + FetchOptions originalOptions = new FetchOptions.Builder().targetChangeNumber(123).targetChangeNumberRBS(-1).build(); fetcher.forceRefresh(originalOptions); List capturedCNs = cnCaptor.getAllValues(); List capturedOptions = optionsCaptor.getAllValues(); @@ -270,4 +366,4 @@ private SegmentChange getSegmentChange(long since, long till, String segmentName segmentChange.removed = new ArrayList<>(); return segmentChange; } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/experiments/SplitParserTest.java b/client/src/test/java/io/split/engine/experiments/SplitParserTest.java index 856534a14..d9e945bfa 100644 --- a/client/src/test/java/io/split/engine/experiments/SplitParserTest.java +++ b/client/src/test/java/io/split/engine/experiments/SplitParserTest.java @@ -1,11 +1,17 @@ package io.split.engine.experiments; import com.google.common.collect.Lists; -import io.split.storages.SegmentCache; -import io.split.storages.memory.SegmentCacheInMemoryImpl; -import io.split.client.dtos.*; -import io.split.engine.ConditionsTestUtil; -import io.split.engine.SDKReadinessGates; +import io.split.client.dtos.Condition; +import io.split.client.dtos.DataType; +import io.split.client.dtos.Matcher; +import io.split.client.dtos.MatcherCombiner; +import io.split.client.dtos.MatcherType; +import io.split.client.dtos.Partition; +import io.split.client.dtos.SegmentChange; +import io.split.client.dtos.Split; +import io.split.client.dtos.SplitChange; +import io.split.client.dtos.Status; +import io.split.engine.matchers.PrerequisitesMatcher; import io.split.engine.matchers.AttributeMatcher; import io.split.engine.matchers.BetweenMatcher; import io.split.engine.matchers.CombiningMatcher; @@ -13,6 +19,11 @@ import io.split.engine.matchers.GreaterThanOrEqualToMatcher; import io.split.engine.matchers.LessThanOrEqualToMatcher; import io.split.engine.matchers.UserDefinedSegmentMatcher; +import io.split.storages.SegmentCache; +import io.split.storages.memory.SegmentCacheInMemoryImpl; +import io.split.client.utils.Json; +import io.split.engine.evaluator.Labels; +import io.split.engine.ConditionsTestUtil; import io.split.engine.matchers.collections.ContainsAllOfSetMatcher; import io.split.engine.matchers.collections.ContainsAnyOfSetMatcher; import io.split.engine.matchers.collections.EqualToSetMatcher; @@ -21,25 +32,25 @@ import io.split.engine.matchers.strings.EndsWithAnyOfMatcher; import io.split.engine.matchers.strings.StartsWithAnyOfMatcher; import io.split.engine.segments.SegmentChangeFetcher; -import io.split.engine.segments.SegmentSynchronizationTask; -import io.split.engine.segments.SegmentSynchronizationTaskImp; import io.split.grammar.Treatments; -import io.split.telemetry.storage.InMemoryTelemetryStorage; -import io.split.telemetry.storage.TelemetryStorage; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; /** * Tests for ExperimentParser @@ -51,11 +62,9 @@ public class SplitParserTest { public static final String EMPLOYEES = "employees"; public static final String SALES_PEOPLE = "salespeople"; public static final int CONDITIONS_UPPER_LIMIT = 50; - private static final TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); @Test public void works() { - SDKReadinessGates gates = new SDKReadinessGates(); SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); segmentCache.updateSegment(EMPLOYEES, Stream.of("adil", "pato", "trevor").collect(Collectors.toList()), new ArrayList<>(), 1L); segmentCache.updateSegment(SALES_PEOPLE, Stream.of("kunal").collect(Collectors.toList()), new ArrayList<>(), 1L); @@ -76,7 +85,7 @@ public void works() { List conditions = Lists.newArrayList(c); - Split split = makeSplit("first.name", 123, conditions, 1); + Split split = makeSplit("first-name", 123, conditions, 1); ParsedSplit actual = parser.parse(split); @@ -86,14 +95,16 @@ public void works() { ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, partitions); List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first.name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first-name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1, null, false, new PrerequisitesMatcher(null)); - assertThat(actual, is(equalTo(expected))); + compareParsed(actual, expected); + assertTrue(expected.hashCode() != 0); + assertTrue(expected.equals(expected)); + Assert.assertEquals(expected.toString(), actual.toString()); } @Test public void worksWithConfig() { - SDKReadinessGates gates = new SDKReadinessGates(); SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); segmentCache.updateSegment(EMPLOYEES, Stream.of("adil", "pato", "trevor").collect(Collectors.toList()), new ArrayList<>(), 1L); segmentCache.updateSegment(SALES_PEOPLE, Stream.of("kunal").collect(Collectors.toList()), new ArrayList<>(), 1L); @@ -117,7 +128,7 @@ public void worksWithConfig() { Map configurations = new HashMap<>(); configurations.put("on", "{\"size\":15,\"test\":20}"); configurations.put("off", "{\"size\":10}"); - Split split = makeSplit("first.name", 123, conditions, 1, configurations); + Split split = makeSplit("first-name", 123, conditions, 1, configurations); ParsedSplit actual = parser.parse(split); @@ -127,14 +138,29 @@ public void worksWithConfig() { ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, partitions); List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first.name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1, configurations); - - assertThat(actual, is(equalTo(expected))); - assertThat(actual.configurations().get("on"), is(equalTo(configurations.get("on")))); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first-name", 123, false, Treatments.OFF, + listOfMatcherAndSplits, "user", 1, 1, configurations, new HashSet<>(), false, new PrerequisitesMatcher(null)); + + Assert.assertEquals(actual.parsedConditions(), expected.parsedConditions()); + Assert.assertEquals(actual.feature(), expected.feature()); + Assert.assertEquals(actual.changeNumber(), expected.changeNumber()); + Assert.assertEquals(actual.defaultTreatment(), expected.defaultTreatment()); + Assert.assertEquals(actual.killed(), expected.killed()); + Assert.assertEquals(actual.impressionsDisabled(), expected.impressionsDisabled()); + Assert.assertEquals(null, actual.flagSets()); + Assert.assertEquals(actual.algo(), expected.algo()); + Assert.assertEquals(actual.seed(), expected.seed()); + Assert.assertEquals(actual.trafficAllocation(), expected.trafficAllocation()); + Assert.assertEquals(actual.trafficAllocationSeed(), expected.trafficAllocationSeed()); + Assert.assertEquals(actual.getSegmentsNames(), expected.getSegmentsNames()); + Assert.assertEquals(actual.getRuleBasedSegmentsNames(), expected.getRuleBasedSegmentsNames()); + Assert.assertEquals(actual.prerequisitesMatcher().toString(), expected.prerequisitesMatcher().toString()); + + Assert.assertEquals(actual.configurations().get("on"), configurations.get("on")); } @Test - public void works_for_two_conditions() { + public void worksForTwoConditions() { SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); segmentCache.updateSegment(EMPLOYEES, Stream.of("adil", "pato", "trevor").collect(Collectors.toList()), new ArrayList<>(), 1L); segmentCache.updateSegment(SALES_PEOPLE, Stream.of("kunal").collect(Collectors.toList()), new ArrayList<>(), 1L); @@ -158,22 +184,21 @@ public void works_for_two_conditions() { List conditions = Lists.newArrayList(c1, c2); - Split split = makeSplit("first.name", 123, conditions, 1); + Split split = makeSplit("first-name", 123, conditions, 1); ParsedSplit actual = parser.parse(split); ParsedCondition parsedCondition1 = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new UserDefinedSegmentMatcher(EMPLOYEES)), fullyRollout); - ParsedCondition parsedCondition2 = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new UserDefinedSegmentMatcher(EMPLOYEES)), turnOff); + ParsedCondition parsedCondition2 = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new UserDefinedSegmentMatcher(SALES_PEOPLE)), turnOff); List listOfParsedConditions = Lists.newArrayList(parsedCondition1, parsedCondition2); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first.name", 123, false, Treatments.OFF, listOfParsedConditions, "user", 1, 1); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first-name", 123, false, Treatments.OFF, listOfParsedConditions, "user", 1, 1, null, false, new PrerequisitesMatcher(null)); - assertThat(actual, is(equalTo(expected))); + compareParsed(actual, expected); } @Test - public void success_for_long_conditions() { - SDKReadinessGates gates = new SDKReadinessGates(); + public void successForLongConditions() { SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); segmentCache.updateSegment(EMPLOYEES, Stream.of("adil", "pato", "trevor").collect(Collectors.toList()), new ArrayList<>(), 1L); segmentCache.updateSegment(SALES_PEOPLE, Stream.of("kunal").collect(Collectors.toList()), new ArrayList<>(), 1L); @@ -193,15 +218,14 @@ public void success_for_long_conditions() { conditions.add(c); } - Split split = makeSplit("first.name", 123, conditions, 1); + Split split = makeSplit("first-name", 123, conditions, 1); Assert.assertNotNull(parser.parse(split)); } @Test - public void works_with_attributes() { - SDKReadinessGates gates = new SDKReadinessGates(); + public void worksWithAttributes() { SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); segmentCache.updateSegment(EMPLOYEES, Stream.of("adil", "pato", "trevor").collect(Collectors.toList()), new ArrayList<>(), 1L); segmentCache.updateSegment(SALES_PEOPLE, Stream.of("kunal").collect(Collectors.toList()), new ArrayList<>(), 1L); @@ -227,7 +251,7 @@ public void works_with_attributes() { List conditions = Lists.newArrayList(c); - Split split = makeSplit("first.name", 123, conditions, 1); + Split split = makeSplit("first-name", 123, conditions, 1); ParsedSplit actual = parser.parse(split); @@ -237,19 +261,13 @@ public void works_with_attributes() { ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, partitions); List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first.name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first-name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1, null, false, new PrerequisitesMatcher(null)); - assertThat(actual, is(equalTo(expected))); + compareParsed(actual, expected); } @Test - public void less_than_or_equal_to() { - - -// SegmentSynchronizationTask segmentFetcher = new SegmentSynchronizationTaskImp(fetcherMap); -// SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); - SDKReadinessGates gates = new SDKReadinessGates(); - SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + public void lessThanOrEqualTo() { SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); @@ -267,7 +285,7 @@ public void less_than_or_equal_to() { List conditions = Lists.newArrayList(c); - Split split = makeSplit("first.name", 123, conditions, 1); + Split split = makeSplit("first-name", 123, conditions, 1); ParsedSplit actual = parser.parse(split); @@ -276,18 +294,13 @@ public void less_than_or_equal_to() { ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, partitions); List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first.name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first-name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1, null, false, new PrerequisitesMatcher(null)); - assertThat(actual, is(equalTo(expected))); + compareParsed(actual, expected); } @Test - public void equal_to() { - -// SegmentSynchronizationTask segmentFetcher = new SegmentSynchronizationTaskImp(fetcherMap); -// SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); - SDKReadinessGates gates = new SDKReadinessGates(); - SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + public void equalTo() { SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); @@ -304,7 +317,7 @@ public void equal_to() { List conditions = Lists.newArrayList(c); - Split split = makeSplit("first.name", 123, conditions, 1); + Split split = makeSplit("first-name", 123, conditions, 1); ParsedSplit actual = parser.parse(split); @@ -313,18 +326,13 @@ public void equal_to() { ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, partitions); List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first.name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first-name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1, null, false, new PrerequisitesMatcher(null)); - assertThat(actual, is(equalTo(expected))); + compareParsed(actual, expected); } @Test - public void equal_to_negative_number() { - -// SegmentSynchronizationTask segmentFetcher = new SegmentSynchronizationTaskImp(fetcherMap); -// SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); - SDKReadinessGates gates = new SDKReadinessGates(); - SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + public void equalToNegativeNumber() { SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); @@ -340,7 +348,7 @@ public void equal_to_negative_number() { List conditions = Lists.newArrayList(c); - Split split = makeSplit("first.name", 123, conditions, 1); + Split split = makeSplit("first-name", 123, conditions, 1); ParsedSplit actual = parser.parse(split); @@ -349,18 +357,13 @@ public void equal_to_negative_number() { ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, partitions); List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first.name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first-name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1, null, false, new PrerequisitesMatcher(null)); - assertThat(actual, is(equalTo(expected))); + compareParsed(actual, expected); } @Test public void between() { - -// SegmentSynchronizationTask segmentFetcher = new SegmentSynchronizationTaskImp(fetcherMap); -// SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); - SDKReadinessGates gates = new SDKReadinessGates(); - SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); @@ -381,7 +384,7 @@ public void between() { List conditions = Lists.newArrayList(c); - Split split = makeSplit("first.name", 123, conditions, 1); + Split split = makeSplit("first-name", 123, conditions, 1); ParsedSplit actual = parser.parse(split); @@ -390,14 +393,13 @@ public void between() { ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, partitions); List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first.name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("first-name", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1, null, false, new PrerequisitesMatcher(null)); - assertThat(actual, is(equalTo(expected))); + compareParsed(actual, expected); } @Test - public void contains_any_of_set() { - + public void containsAnyOfSet() { ArrayList set = Lists.newArrayList("sms", "voice"); List partitions = Lists.newArrayList(ConditionsTestUtil.partition("on", 100)); @@ -411,12 +413,11 @@ public void contains_any_of_set() { ContainsAnyOfSetMatcher m = new ContainsAnyOfSetMatcher(set); - set_matcher_test(c, m); + setMatcherTest(c, m); } @Test - public void contains_all_of_set() { - + public void containsAllOfSet() { ArrayList set = Lists.newArrayList("sms", "voice"); List partitions = Lists.newArrayList(ConditionsTestUtil.partition("on", 100)); @@ -430,12 +431,11 @@ public void contains_all_of_set() { ContainsAllOfSetMatcher m = new ContainsAllOfSetMatcher(set); - set_matcher_test(c, m); + setMatcherTest(c, m); } @Test - public void equal_to_set() { - + public void equalToSet() { ArrayList set = Lists.newArrayList("sms", "voice"); List partitions = Lists.newArrayList(ConditionsTestUtil.partition("on", 100)); @@ -449,12 +449,11 @@ public void equal_to_set() { EqualToSetMatcher m = new EqualToSetMatcher(set); - set_matcher_test(c, m); + setMatcherTest(c, m); } @Test - public void is_part_of_set() { - + public void isPartOfSet() { ArrayList set = Lists.newArrayList("sms", "voice"); List partitions = Lists.newArrayList(ConditionsTestUtil.partition("on", 100)); @@ -468,12 +467,11 @@ public void is_part_of_set() { PartOfSetMatcher m = new PartOfSetMatcher(set); - set_matcher_test(c, m); + setMatcherTest(c, m); } @Test - public void starts_with_string() { - + public void startsWithString() { ArrayList set = Lists.newArrayList("sms", "voice"); List partitions = Lists.newArrayList(ConditionsTestUtil.partition("on", 100)); @@ -487,12 +485,11 @@ public void starts_with_string() { StartsWithAnyOfMatcher m = new StartsWithAnyOfMatcher(set); - set_matcher_test(c, m); + setMatcherTest(c, m); } @Test - public void ends_with_string() { - + public void endsWithString() { ArrayList set = Lists.newArrayList("sms", "voice"); List partitions = Lists.newArrayList(ConditionsTestUtil.partition("on", 100)); @@ -506,13 +503,12 @@ public void ends_with_string() { EndsWithAnyOfMatcher m = new EndsWithAnyOfMatcher(set); - set_matcher_test(c, m); + setMatcherTest(c, m); } @Test - public void contains_string() { - + public void containsString() { ArrayList set = Lists.newArrayList("sms", "voice"); List partitions = Lists.newArrayList(ConditionsTestUtil.partition("on", 100)); @@ -526,15 +522,170 @@ public void contains_string() { ContainsAnyOfMatcher m = new ContainsAnyOfMatcher(set); - set_matcher_test(c, m); + setMatcherTest(c, m); } - public void set_matcher_test(Condition c, io.split.engine.matchers.Matcher m) { + @Test + public void UnsupportedMatcher() { + SplitParser parser = new SplitParser(); + String splitWithUndefinedMatcher = "{\"ff\":{\"s\":-1,\"t\": 1457726098069,\"d\": [{ \"changeNumber\": 123, \"trafficTypeName\": \"user\", \"name\": \"some_name\"," + + "\"trafficAllocation\": 100, \"trafficAllocationSeed\": 123456, \"seed\": 321654, \"status\": \"ACTIVE\"," + + "\"killed\": false, \"defaultTreatment\": \"off\", \"algo\": 2,\"conditions\": [{ \"partitions\": [" + + "{\"treatment\": \"on\", \"size\": 50}, {\"treatment\": \"off\", \"size\": 50}], \"contitionType\": \"ROLLOUT\"," + + "\"label\": \"some_label\", \"matcherGroup\": { \"matchers\": [{ \"matcherType\": \"UNKNOWN\", \"negate\": false}]," + + "\"combiner\": \"AND\"}}], \"sets\": [\"set1\"]}]}, \"rbs\":{\"s\":-1,\"t\":-1,\"d\":[]}}"; + SplitChange change = Json.fromJson(splitWithUndefinedMatcher, SplitChange.class); + for (Split split : change.featureFlags.d) { + // should not cause exception + ParsedSplit parsedSplit = parser.parse(split); + for (ParsedCondition parsedCondition : parsedSplit.parsedConditions()) { + assertTrue(parsedCondition.label() == Labels.UNSUPPORTED_MATCHER); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" in segment all")); + } + } + } + } + + @Test + public void EqualToSemverMatcher() throws IOException { + SplitParser parser = new SplitParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + for (Split split : change.featureFlags.d) { + // should not cause exception + ParsedSplit parsedSplit = parser.parse(split); + if (split.name.equals("semver_equalto")) { + for (ParsedCondition parsedCondition : parsedSplit.parsedConditions()) { + assertTrue(parsedCondition.label().equals("equal to semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" == semver 1\\.22\\.9")); + return; + } + } + } + } + assertTrue(false); + } + + @Test + public void GreaterThanOrEqualSemverMatcher() throws IOException { + SplitParser parser = new SplitParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + for (Split split : change.featureFlags.d) { + // should not cause exception + ParsedSplit parsedSplit = parser.parse(split); + if (split.name.equals("semver_greater_or_equalto")) { + for (ParsedCondition parsedCondition : parsedSplit.parsedConditions()) { + assertTrue(parsedCondition.label().equals("greater than or equal to semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" >= semver 1\\.22\\.9")); + return; + } + } + } + } + assertTrue(false); + } + + @Test + public void LessThanOrEqualSemverMatcher() throws IOException { + SplitParser parser = new SplitParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + for (Split split : change.featureFlags.d) { + // should not cause exception + ParsedSplit parsedSplit = parser.parse(split); + if (split.name.equals("semver_less_or_equalto")) { + for (ParsedCondition parsedCondition : parsedSplit.parsedConditions()) { + assertTrue(parsedCondition.label().equals("less than or equal to semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" <= semver 1\\.22\\.9")); + return; + } + } + } + } + assertTrue(false); + } + + @Test + public void BetweenSemverMatcher() throws IOException { + SplitParser parser = new SplitParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + for (Split split : change.featureFlags.d) { + // should not cause exception + ParsedSplit parsedSplit = parser.parse(split); + if (split.name.equals("semver_between")) { + for (ParsedCondition parsedCondition : parsedSplit.parsedConditions()) { + assertTrue(parsedCondition.label().equals("between semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().equals(" between semver 1\\.22\\.9 and 2\\.1\\.0")); + return; + } + } + } + } + assertTrue(false); + } + + @Test + public void InListSemverMatcher() throws IOException { + SplitParser parser = new SplitParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/semver/semver-splits.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + for (Split split : change.featureFlags.d) { + // should not cause exception + ParsedSplit parsedSplit = parser.parse(split); + if (split.name.equals("semver_inlist")) { + for (ParsedCondition parsedCondition : parsedSplit.parsedConditions()) { + assertTrue(parsedCondition.label().equals("in list semver")); + for (AttributeMatcher matcher : parsedCondition.matcher().attributeMatchers()) { + // Check the matcher is ALL_KEYS + assertTrue(matcher.matcher().toString().startsWith(" in semver list")); + return; + } + } + } + } + assertTrue(false); + } + + @Test + public void ImpressionToggleParseTest() throws IOException { + SplitParser parser = new SplitParser(); + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/splits_imp_toggle.json")), StandardCharsets.UTF_8); + SplitChange change = Json.fromJson(load, SplitChange.class); + boolean check1 = false, check2 = false, check3 = false; + for (Split split : change.featureFlags.d) { + ParsedSplit parsedSplit = parser.parse(split); + if (split.name.equals("without_impression_toggle")) { + assertFalse(parsedSplit.impressionsDisabled()); + check1 = true; + } + if (split.name.equals("impression_toggle_on")) { + assertFalse(parsedSplit.impressionsDisabled()); + check2 = true; + } + if (split.name.equals("impression_toggle_off")) { + assertTrue(parsedSplit.impressionsDisabled()); + check3 = true; + } + } + assertTrue(check1); + assertTrue(check2); + assertTrue(check3); + } + + public void setMatcherTest(Condition c, io.split.engine.matchers.Matcher m) { -// SegmentSynchronizationTask segmentFetcher = new SegmentSynchronizationTaskImp(fetcherMap); -// SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); - SDKReadinessGates gates = new SDKReadinessGates(); - SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); SegmentChange segmentChangeEmployee = getSegmentChange(-1L, -1L, EMPLOYEES); SegmentChange segmentChangeSalesPeople = getSegmentChange(-1L, -1L, SALES_PEOPLE); @@ -558,9 +709,27 @@ public void set_matcher_test(Condition c, io.split.engine.matchers.Matcher m) { ParsedCondition parsedCondition = ParsedCondition.createParsedConditionForTests(combiningMatcher, partitions); List listOfMatcherAndSplits = Lists.newArrayList(parsedCondition); - ParsedSplit expected = ParsedSplit.createParsedSplitForTests("splitName", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1); + ParsedSplit expected = ParsedSplit.createParsedSplitForTests("splitName", 123, false, Treatments.OFF, listOfMatcherAndSplits, "user", 1, 1, null, false, new PrerequisitesMatcher(null)); - assertThat(actual, is(equalTo(expected))); + compareParsed(actual, expected); + } + + private void compareParsed(ParsedSplit actual, ParsedSplit expected) { + Assert.assertEquals(actual.getRuleBasedSegmentsNames(), expected.getRuleBasedSegmentsNames()); + Assert.assertEquals(actual.seed(), expected.seed()); + Assert.assertEquals(actual.algo(), expected.algo()); + Assert.assertEquals(actual.trafficAllocationSeed(), expected.trafficAllocationSeed()); + Assert.assertEquals(actual.flagSets(), expected.flagSets()); + Assert.assertEquals(actual.parsedConditions(), expected.parsedConditions()); + Assert.assertEquals(actual.trafficAllocation(), expected.trafficAllocation()); + Assert.assertEquals(actual.getSegmentsNames(), expected.getSegmentsNames()); + Assert.assertEquals(actual.impressionsDisabled(), expected.impressionsDisabled()); + Assert.assertEquals(actual.killed(), expected.killed()); + Assert.assertEquals(actual.defaultTreatment(), expected.defaultTreatment()); + Assert.assertEquals(actual.changeNumber(), expected.changeNumber()); + Assert.assertEquals(actual.feature(), expected.feature()); + Assert.assertEquals(actual.configurations(), expected.configurations()); + Assert.assertEquals(actual.prerequisitesMatcher().toString(), expected.prerequisitesMatcher().toString()); } private Split makeSplit(String name, int seed, List conditions, long changeNumber) { @@ -580,6 +749,7 @@ private Split makeSplit(String name, int seed, List conditions, long split.changeNumber = changeNumber; split.algo = 1; split.configurations = configurations; + split.prerequisites = new ArrayList<>(); return split; } @@ -592,5 +762,4 @@ private SegmentChange getSegmentChange(long since, long till, String segmentName segmentChange.removed = new ArrayList<>(); return segmentChange; } - -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/experiments/SplitSynchronizationTaskTest.java b/client/src/test/java/io/split/engine/experiments/SplitSynchronizationTaskTest.java new file mode 100644 index 000000000..ce04294cb --- /dev/null +++ b/client/src/test/java/io/split/engine/experiments/SplitSynchronizationTaskTest.java @@ -0,0 +1,55 @@ +package io.split.engine.experiments; + +import io.split.client.JsonLocalhostSplitChangeFetcher; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; +import io.split.engine.common.FetchOptions; +import io.split.storages.RuleBasedSegmentCacheProducer; +import io.split.storages.SplitCacheProducer; +import io.split.storages.memory.InMemoryCacheImp; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; +import io.split.telemetry.storage.NoopTelemetryStorage; +import io.split.telemetry.storage.TelemetryStorage; +import org.junit.Test; +import org.mockito.Mockito; + +import java.util.HashSet; + +public class SplitSynchronizationTaskTest { + + private static final TelemetryStorage TELEMETRY_STORAGE_NOOP = Mockito.mock(NoopTelemetryStorage.class); + private static final FlagSetsFilter FLAG_SETS_FILTER = new FlagSetsFilterImpl(new HashSet<>()); + + @Test + public void testLocalhost() throws InterruptedException { + SplitCacheProducer splitCacheProducer = new InMemoryCacheImp(FLAG_SETS_FILTER); + + SplitChangeFetcher splitChangeFetcher = Mockito.mock(JsonLocalhostSplitChangeFetcher.class); + SplitParser splitParser = new SplitParser(); + FetchOptions fetchOptions = new FetchOptions.Builder().build(); + RuleBasedSegmentCacheProducer ruleBasedSegmentCacheProducer = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + SplitFetcher splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, TELEMETRY_STORAGE_NOOP, FLAG_SETS_FILTER, + ruleBasedSegmentParser, ruleBasedSegmentCacheProducer); + + SplitSynchronizationTask splitSynchronizationTask = new SplitSynchronizationTask(splitFetcher, splitCacheProducer, 1000, null); + + splitSynchronizationTask.start(); + + Thread.sleep(2000); + + Mockito.verify(splitChangeFetcher, Mockito.times(1)).fetch(-1, -1, fetchOptions); + } + + @Test + public void testStartAndStop() throws InterruptedException { + SplitCacheProducer splitCacheProducer = new InMemoryCacheImp(FLAG_SETS_FILTER); + SplitFetcherImp splitFetcherImp = Mockito.mock(SplitFetcherImp.class); + SplitSynchronizationTask splitSynchronizationTask = new SplitSynchronizationTask(splitFetcherImp, splitCacheProducer, 1000, null); + splitSynchronizationTask.start(); + + Thread.sleep(2000); + + Mockito.verify(splitFetcherImp, Mockito.times(1)).run(); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/matchers/AttributeMatcherTest.java b/client/src/test/java/io/split/engine/matchers/AttributeMatcherTest.java index 7c723adb6..9f535790d 100644 --- a/client/src/test/java/io/split/engine/matchers/AttributeMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/AttributeMatcherTest.java @@ -5,6 +5,7 @@ import com.google.common.collect.Maps; import io.split.client.dtos.DataType; import io.split.engine.matchers.strings.WhitelistMatcher; +import org.junit.Assert; import org.junit.Test; import java.util.Calendar; @@ -12,9 +13,6 @@ import java.util.Date; import java.util.Map; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Tests for AllKeysMatcher */ @@ -23,60 +21,62 @@ public class AttributeMatcherTest { @Test public void works() { AttributeMatcher matcher = new AttributeMatcher("creation_date", new GreaterThanOrEqualToMatcher(100L, DataType.NUMBER), false); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 99L), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 100L), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101.3), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", Calendar.getInstance()), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", new Date()), null), is(false)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", 99L), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 100L), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101.3), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", Calendar.getInstance()), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", new Date()), null)); } @Test - public void works_negation() { + public void worksNegation() { AttributeMatcher matcher = new AttributeMatcher("creation_date", new GreaterThanOrEqualToMatcher(100L, DataType.NUMBER), true); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 99L), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 100L), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101.3), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", Calendar.getInstance()), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", new Date()), null), is(true)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 99L), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 99L), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", 100L), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101.3), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", Calendar.getInstance()), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", new Date()), null)); } @Test - public void works_less_than_or_equal_to() { + public void worksLessThanOrEqualTo() { AttributeMatcher matcher = new AttributeMatcher("creation_date", new LessThanOrEqualToMatcher(100L, DataType.NUMBER), false); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 99L), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 100L), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101.3), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", Calendar.getInstance()), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", new Date()), null), is(false)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 99L), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 100L), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101.3), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", 101.3), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", Calendar.getInstance()), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", new Date()), null)); } @Test - public void works_boolean() { + public void worksBoolean() { AttributeMatcher matcher = new AttributeMatcher("value", new BooleanMatcher(true), false); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", true), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", "true"), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", "True"), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", "TrUe"), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", "TRUE"), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", false), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", "false"), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", "False"), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", "FALSE"), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", "faLSE"), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", ""), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", 0), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("value", 1), null), is(false)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("value", true), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("value", "true"), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("value", "True"), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("value", "TrUe"), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("value", "TRUE"), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("value", false), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("value", "false"), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("value", "False"), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("value", "FALSE"), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("value", "faLSE"), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("value", ""), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("value", 0), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("value", 1), null)); } @Test - public void error_conditions() { + public void errorConditions() { AttributeMatcher matcher = new AttributeMatcher("creation_date", new GreaterThanOrEqualToMatcher(100L, DataType.NUMBER), false); - assertThat(matcher.match("ignore", null, null, null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("foo", 101), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", "101"), null), is(false)); + Assert.assertFalse(matcher.match("ignore", null, null, null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("foo", 101), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", "101"), null)); } @Test @@ -85,38 +85,38 @@ public void dates() { Calendar c = Calendar.getInstance(); c.add(Calendar.YEAR, -1); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", c.getTimeInMillis()), null), is(false)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", c.getTimeInMillis()), null)); c.add(Calendar.YEAR, 2); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", c.getTimeInMillis()), null), is(true)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", c.getTimeInMillis()), null)); } @Test public void between() { AttributeMatcher matcher = new AttributeMatcher("creation_date", new BetweenMatcher(10, 12, DataType.NUMBER), false); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 9), null), is(false)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 10), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 11), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 12), null), is(true)); - assertThat(matcher.match("ignore", null, ImmutableMap.of("creation_date", 13), null), is(false)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", 9), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 10), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 11), null)); + Assert.assertTrue(matcher.match("ignore", null, ImmutableMap.of("creation_date", 12), null)); + Assert.assertFalse(matcher.match("ignore", null, ImmutableMap.of("creation_date", 13), null)); } @Test - public void when_no_attribute_we_use_the_key() { + public void whenNoAttributeWeUseTheKey() { AttributeMatcher matcher = new AttributeMatcher(null, new WhitelistMatcher(Lists.newArrayList("trial")), false); Map nullMap = Maps.newHashMap(); nullMap.put("planType", null); - assertThat(matcher.match("trial", null, ImmutableMap.of("planType", "trial"), null), is(true)); - assertThat(matcher.match("trial", null, ImmutableMap.of("planType", "Trial"), null), is(true)); - assertThat(matcher.match("trial", null, nullMap, null), is(true)); - assertThat(matcher.match("trial", null, ImmutableMap.of("planType", "premium"), null), is(true)); - assertThat(matcher.match("trial", null, ImmutableMap.of("planType", 10), null), is(true)); - assertThat(matcher.match("trial", null, Collections.emptyMap(), null), is(true)); - assertThat(matcher.match("trial", null, null, null), is(true)); - assertThat(matcher.match("premium", null, null, null), is(false)); + Assert.assertTrue(matcher.match("trial", null, ImmutableMap.of("planType", "trial"), null)); + Assert.assertTrue(matcher.match("trial", null, ImmutableMap.of("planType", "Trial"), null)); + Assert.assertTrue(matcher.match("trial", null, nullMap, null)); + Assert.assertTrue(matcher.match("trial", null, ImmutableMap.of("planType", "premium"), null)); + Assert.assertTrue(matcher.match("trial", null, ImmutableMap.of("planType", 10), null)); + Assert.assertTrue(matcher.match("trial", null, Collections.emptyMap(), null)); + Assert.assertTrue(matcher.match("trial", null, null, null)); + Assert.assertFalse(matcher.match("premium", null, null, null)); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/matchers/BetweenMatcherTest.java b/client/src/test/java/io/split/engine/matchers/BetweenMatcherTest.java index 410977699..22bc3b449 100644 --- a/client/src/test/java/io/split/engine/matchers/BetweenMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/BetweenMatcherTest.java @@ -1,11 +1,9 @@ package io.split.engine.matchers; import io.split.client.dtos.DataType; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Tests for BetweenMatcherTest */ @@ -18,18 +16,18 @@ public void works() { BetweenMatcher matcher = new BetweenMatcher(start, end, DataType.NUMBER); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); for (int i = start; i <= end; i++) { - assertThat(matcher.match(i, null, null, null), is(true)); + Assert.assertTrue(matcher.match(i, null, null, null)); } - assertThat(matcher.match(new Long(start - 1), null, null, null), is(false)); - assertThat(matcher.match(end + 1, null, null, null), is(false)); + Assert.assertFalse(matcher.match(new Long(start - 1), null, null, null)); + Assert.assertFalse(matcher.match(end + 1, null, null, null)); } @Test - public void works_dates() { + public void worksDates() { long april11_2016_23_59 = 1460419199000L; long april12_2016_midnight_19 = 1460420360000L; long april12_2016_midnight_20 = 1460420421903L; @@ -39,14 +37,10 @@ public void works_dates() { BetweenMatcher matcher = new BetweenMatcher(april12_2016_midnight_19, april12_2016_midnight_20_59, DataType.DATETIME); - assertThat(matcher.match(april11_2016_23_59, null, null, null), is(false)); - assertThat(matcher.match(april12_2016_midnight_19, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_midnight_20, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_midnight_20_59, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_1_20, null, null, null), is(false)); - - + Assert.assertFalse(matcher.match(april11_2016_23_59, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_19, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_20, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_20_59, null, null, null)); + Assert.assertFalse(matcher.match(april12_2016_1_20, null, null, null)); } - - -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/matchers/BetweenSemverMatcherTest.java b/client/src/test/java/io/split/engine/matchers/BetweenSemverMatcherTest.java new file mode 100644 index 000000000..41a4f76d4 --- /dev/null +++ b/client/src/test/java/io/split/engine/matchers/BetweenSemverMatcherTest.java @@ -0,0 +1,36 @@ +package io.split.engine.matchers; + +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; + +/** + * Tests for EqualToSemverMatcher + */ + +public class BetweenSemverMatcherTest { + + @Test + public void works() { + BetweenSemverMatcher betweenSemverMatcher = new BetweenSemverMatcher("2.1.8", "3.0.0"); + + assertTrue( betweenSemverMatcher.match("2.1.8", null, null, null)); + assertTrue( betweenSemverMatcher.match("2.1.9", null, null, null)); + assertFalse( betweenSemverMatcher.match("2.1.8-rc", null, null, null)); + assertTrue( betweenSemverMatcher.match("3.0.0+build", null, null, null)); + assertFalse( betweenSemverMatcher.match("4.5.8", null, null, null)); + assertFalse( betweenSemverMatcher.match("1.0.4", null, null, null)); + assertTrue(betweenSemverMatcher.equals(betweenSemverMatcher)); + assertTrue(betweenSemverMatcher.hashCode() != 0); + } + + @Test + public void testNull() { + BetweenSemverMatcher betweenSemverMatcher = new BetweenSemverMatcher("2.1.8", "3.0.0"); + assertFalse( betweenSemverMatcher.match(null, null, null, null)); + + betweenSemverMatcher = new BetweenSemverMatcher("2.www.8", "3.xx.0"); + assertFalse(betweenSemverMatcher.match("2.www.8", null, null, null)); + } +} diff --git a/client/src/test/java/io/split/engine/matchers/BooleanMatcherTest.java b/client/src/test/java/io/split/engine/matchers/BooleanMatcherTest.java index cb466bc0b..14889af68 100644 --- a/client/src/test/java/io/split/engine/matchers/BooleanMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/BooleanMatcherTest.java @@ -8,7 +8,7 @@ public class BooleanMatcherTest { @Test - public void works_true() { + public void worksTrue() { BooleanMatcher matcher = new BooleanMatcher(true); assertThat(matcher.match(null, null, null, null), is(false)); assertThat(matcher.match(true, null, null, null), is(true)); @@ -23,7 +23,7 @@ public void works_true() { } @Test - public void works_false() { + public void worksFalse() { BooleanMatcher matcher = new BooleanMatcher(false); assertThat(matcher.match(null, null, null, null), is(false)); assertThat(matcher.match(true, null, null, null), is(false)); diff --git a/client/src/test/java/io/split/engine/matchers/CombiningMatcherTest.java b/client/src/test/java/io/split/engine/matchers/CombiningMatcherTest.java index ee58a18a2..3946aed50 100644 --- a/client/src/test/java/io/split/engine/matchers/CombiningMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/CombiningMatcherTest.java @@ -3,13 +3,11 @@ import com.google.common.collect.Lists; import io.split.client.dtos.MatcherCombiner; import io.split.engine.matchers.strings.WhitelistMatcher; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Tests CombiningMatcher * @@ -18,15 +16,14 @@ public class CombiningMatcherTest { @Test - public void works_and() { + public void worksAnd() { AttributeMatcher matcher1 = AttributeMatcher.vanilla(new AllKeysMatcher()); AttributeMatcher matcher2 = AttributeMatcher.vanilla(new WhitelistMatcher(Lists.newArrayList("a", "b"))); CombiningMatcher combiner = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(matcher1, matcher2)); - assertThat(combiner.match("a", null, null, null), is(true)); - assertThat(combiner.match("b", null, Collections.emptyMap(), null), is(true)); - assertThat(combiner.match("c", null, null, null), is(false)); + Assert.assertTrue(combiner.match("a", null, null, null)); + Assert.assertTrue(combiner.match("b", null, Collections.emptyMap(), null)); + Assert.assertFalse(combiner.match("c", null, null, null)); } - -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/matchers/EqualToMatcherTest.java b/client/src/test/java/io/split/engine/matchers/EqualToMatcherTest.java index aeb887f9c..f8320e511 100644 --- a/client/src/test/java/io/split/engine/matchers/EqualToMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/EqualToMatcherTest.java @@ -1,11 +1,9 @@ package io.split.engine.matchers; import io.split.client.dtos.DataType; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Tests for AllKeysMatcher */ @@ -14,30 +12,30 @@ public class EqualToMatcherTest { @Test public void works() { EqualToMatcher matcher = new EqualToMatcher(10, DataType.NUMBER); - assertThat(matcher.match(null, null, null, null), is(false)); - assertThat(matcher.match(1, null, null, null), is(false)); - assertThat(matcher.match(new Long(-1), null, null, null), is(false)); - assertThat(matcher.match(9, null, null, null), is(false)); - assertThat(matcher.match(new Long(10), null, null, null), is(true)); - assertThat(matcher.match(11, null, null, null), is(false)); - assertThat(matcher.match(100, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); + Assert.assertFalse(matcher.match(1, null, null, null)); + Assert.assertFalse(matcher.match(new Long(-1), null, null, null)); + Assert.assertFalse(matcher.match(9, null, null, null)); + Assert.assertTrue(matcher.match(new Long(10), null, null, null)); + Assert.assertFalse(matcher.match(11, null, null, null)); + Assert.assertFalse(matcher.match(100, null, null, null)); } @Test - public void works_negative() { + public void worksNegative() { EqualToMatcher matcher = new EqualToMatcher(-10, DataType.NUMBER); - assertThat(matcher.match(null, null, null, null), is(false)); - assertThat(matcher.match(1, null, null, null), is(false)); - assertThat(matcher.match(new Long(-1), null, null, null), is(false)); - assertThat(matcher.match(9, null, null, null), is(false)); - assertThat(matcher.match(new Long(10), null, null, null), is(false)); - assertThat(matcher.match(11, null, null, null), is(false)); - assertThat(matcher.match(-10, null, null, null), is(true)); - assertThat(matcher.match(-11, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); + Assert.assertFalse(matcher.match(1, null, null, null)); + Assert.assertFalse(matcher.match(new Long(-1), null, null, null)); + Assert.assertFalse(matcher.match(9, null, null, null)); + Assert.assertFalse(matcher.match(new Long(10), null, null, null)); + Assert.assertFalse(matcher.match(11, null, null, null)); + Assert.assertTrue(matcher.match(-10, null, null, null)); + Assert.assertFalse(matcher.match(-11, null, null, null)); } @Test - public void works_dates() { + public void worksDates() { long april11_2016_23_59_59 = 1460419199000L; long april12_2016_midnight_19 = 1460420360000L; long april12_2016_midnight_20 = 1460420421903L; @@ -45,12 +43,10 @@ public void works_dates() { long april13_2016_00_00_00 = 1460505600000L; EqualToMatcher matcher = new EqualToMatcher(april12_2016_midnight_20, DataType.DATETIME); - assertThat(matcher.match(april11_2016_23_59_59, null, null, null), is(false)); - assertThat(matcher.match(april12_2016_midnight_19, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_midnight_20, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_1_20, null, null, null), is(true)); - assertThat(matcher.match(april13_2016_00_00_00, null, null, null), is(false)); - + Assert.assertFalse(matcher.match(april11_2016_23_59_59, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_19, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_20, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_1_20, null, null, null)); + Assert.assertFalse(matcher.match(april13_2016_00_00_00, null, null, null)); } - -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/matchers/EqualToSemverMatcherTest.java b/client/src/test/java/io/split/engine/matchers/EqualToSemverMatcherTest.java new file mode 100644 index 000000000..a5a41e2bb --- /dev/null +++ b/client/src/test/java/io/split/engine/matchers/EqualToSemverMatcherTest.java @@ -0,0 +1,34 @@ +package io.split.engine.matchers; + +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; + +/** + * Tests for EqualToSemverMatcher + */ + +public class EqualToSemverMatcherTest { + + @Test + public void works() { + EqualToSemverMatcher equalToSemverMatcher = new EqualToSemverMatcher("2.1.8"); + + assertTrue( equalToSemverMatcher.match("2.1.8", null, null, null)); + assertFalse(equalToSemverMatcher.match("2.1.9", null, null, null)); + assertFalse(equalToSemverMatcher.match("2.1.8-rc", null, null, null)); + assertFalse( equalToSemverMatcher.match("2.1.8+build", null, null, null)); + assertTrue(equalToSemverMatcher.equals(equalToSemverMatcher)); + assertTrue(equalToSemverMatcher.hashCode() != 0); + } + + @Test + public void testNull() { + EqualToSemverMatcher equalToSemverMatcher = new EqualToSemverMatcher("2.1.8"); + assertFalse( equalToSemverMatcher.match(null, null, null, null)); + + equalToSemverMatcher = new EqualToSemverMatcher("2.ee.8"); + assertFalse(equalToSemverMatcher.match("2.ee.8", null, null, null)); + } +} diff --git a/client/src/test/java/io/split/engine/matchers/GreaterThanOrEqualToMatcherTest.java b/client/src/test/java/io/split/engine/matchers/GreaterThanOrEqualToMatcherTest.java index 566f53623..bbe37fdd7 100644 --- a/client/src/test/java/io/split/engine/matchers/GreaterThanOrEqualToMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/GreaterThanOrEqualToMatcherTest.java @@ -1,11 +1,9 @@ package io.split.engine.matchers; import io.split.client.dtos.DataType; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Tests for AllKeysMatcher */ @@ -14,31 +12,31 @@ public class GreaterThanOrEqualToMatcherTest { @Test public void works() { GreaterThanOrEqualToMatcher matcher = new GreaterThanOrEqualToMatcher(10, DataType.NUMBER); - assertThat(matcher.match(null, null, null, null), is(false)); - assertThat(matcher.match(1, null, null, null), is(false)); - assertThat(matcher.match(new Long(-1), null, null, null), is(false)); - assertThat(matcher.match(9, null, null, null), is(false)); - assertThat(matcher.match(new Long(10), null, null, null), is(true)); - assertThat(matcher.match(11, null, null, null), is(true)); - assertThat(matcher.match(100, null, null, null), is(true)); + Assert.assertFalse(matcher.match(null, null, null, null)); + Assert.assertFalse(matcher.match(1, null, null, null)); + Assert.assertFalse(matcher.match(new Long(-1), null, null, null)); + Assert.assertFalse(matcher.match(9, null, null, null)); + Assert.assertTrue(matcher.match(new Long(10), null, null, null)); + Assert.assertTrue(matcher.match(11, null, null, null)); + Assert.assertTrue(matcher.match(100, null, null, null)); } @Test - public void works_negative() { + public void worksNegative() { GreaterThanOrEqualToMatcher matcher = new GreaterThanOrEqualToMatcher(-10, DataType.NUMBER); - assertThat(matcher.match(null, null, null, null), is(false)); - assertThat(matcher.match(1, null, null, null), is(true)); - assertThat(matcher.match(new Long(-1), null, null, null), is(true)); - assertThat(matcher.match(9, null, null, null), is(true)); - assertThat(matcher.match(new Long(10), null, null, null), is(true)); - assertThat(matcher.match(11, null, null, null), is(true)); - assertThat(matcher.match(100, null, null, null), is(true)); - assertThat(matcher.match(-10, null, null, null), is(true)); - assertThat(matcher.match(-11, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); + Assert.assertTrue(matcher.match(1, null, null, null)); + Assert.assertTrue(matcher.match(new Long(-1), null, null, null)); + Assert.assertTrue(matcher.match(9, null, null, null)); + Assert.assertTrue(matcher.match(new Long(10), null, null, null)); + Assert.assertTrue(matcher.match(11, null, null, null)); + Assert.assertTrue(matcher.match(100, null, null, null)); + Assert.assertTrue(matcher.match(-10, null, null, null)); + Assert.assertFalse(matcher.match(-11, null, null, null)); } @Test - public void works_dates() { + public void worksDates() { long april12_2016_midnight_19 = 1460420360000L; long april12_2016_midnight_20 = 1460420421903L; long april12_2016_midnight_20_59 = 1460420459000L; @@ -46,11 +44,11 @@ public void works_dates() { long april12_2016_18_20 = 1460485239000L; GreaterThanOrEqualToMatcher matcher = new GreaterThanOrEqualToMatcher(april12_2016_midnight_20, DataType.DATETIME); - assertThat(matcher.match(april12_2016_midnight_19, null, null, null), is(false)); - assertThat(matcher.match(april12_2016_midnight_20_59, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_midnight_20, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_1_20, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_18_20, null, null, null), is(true)); + Assert.assertFalse(matcher.match(april12_2016_midnight_19, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_20_59, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_20_59, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_20, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_1_20, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_18_20, null, null, null)); } - -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/matchers/GreaterThanOrEqualToSemverMatcherTest.java b/client/src/test/java/io/split/engine/matchers/GreaterThanOrEqualToSemverMatcherTest.java new file mode 100644 index 000000000..753034c70 --- /dev/null +++ b/client/src/test/java/io/split/engine/matchers/GreaterThanOrEqualToSemverMatcherTest.java @@ -0,0 +1,34 @@ +package io.split.engine.matchers; + +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; + +/** + * Tests for EqualToSemverMatcher + */ + +public class GreaterThanOrEqualToSemverMatcherTest { + + @Test + public void works() { + GreaterThanOrEqualToSemverMatcher greaterThanOrEqualToSemverMatcher = new GreaterThanOrEqualToSemverMatcher("2.1.8"); + assertTrue( greaterThanOrEqualToSemverMatcher.match("2.1.8", null, null, null)); + assertTrue( greaterThanOrEqualToSemverMatcher.match("2.1.9", null, null, null)); + assertFalse( greaterThanOrEqualToSemverMatcher.match("2.1.8-rc", null, null, null)); + assertFalse( greaterThanOrEqualToSemverMatcher.match("2.0.10", null, null, null)); + assertTrue( greaterThanOrEqualToSemverMatcher.match("2.1.8+build", null, null, null)); + assertTrue(greaterThanOrEqualToSemverMatcher.equals(greaterThanOrEqualToSemverMatcher)); + assertTrue(greaterThanOrEqualToSemverMatcher.hashCode() != 0); + } + + @Test + public void testNull() { + GreaterThanOrEqualToSemverMatcher greaterThanOrEqualToSemverMatcher = new GreaterThanOrEqualToSemverMatcher("2.1.8"); + assertFalse( greaterThanOrEqualToSemverMatcher.match(null, null, null, null)); + + greaterThanOrEqualToSemverMatcher = new GreaterThanOrEqualToSemverMatcher("2.ee.8"); + assertFalse(greaterThanOrEqualToSemverMatcher.match("2.ee.8", null, null, null)); + } +} diff --git a/client/src/test/java/io/split/engine/matchers/InListSemverMatcherTest.java b/client/src/test/java/io/split/engine/matchers/InListSemverMatcherTest.java new file mode 100644 index 000000000..c01371251 --- /dev/null +++ b/client/src/test/java/io/split/engine/matchers/InListSemverMatcherTest.java @@ -0,0 +1,41 @@ +package io.split.engine.matchers; + +import com.google.common.collect.Lists; +import org.junit.Test; + +import java.util.List; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** + * Tests for EqualToSemverMatcher + */ + +public class InListSemverMatcherTest { + + @Test + public void works() { + List whitelist = Lists.newArrayList("2.1.8", "3.4.0"); + InListSemverMatcher inListSemverMatcher = new InListSemverMatcher(whitelist); + + assertTrue( inListSemverMatcher.match("2.1.8", null, null, null) == true); + assertTrue( inListSemverMatcher.match("2.1.9", null, null, null) == false); + assertTrue( inListSemverMatcher.match("2.1.8-rc", null, null, null) == false); + assertTrue( inListSemverMatcher.match("3.4.0", null, null, null) == true); + assertTrue( inListSemverMatcher.match("3.4.0+build", null, null, null) == false); + assertTrue(inListSemverMatcher.equals(inListSemverMatcher)); + assertTrue(inListSemverMatcher.hashCode() != 0); + } + + @Test + public void testNull() { + List whitelist = Lists.newArrayList("2.1.8", "3.4.0"); + InListSemverMatcher inListSemverMatcher = new InListSemverMatcher(whitelist); + assertFalse( inListSemverMatcher.match(null, null, null, null)); + + whitelist = Lists.newArrayList("2.1.eee", "3.xxx.0"); + inListSemverMatcher = new InListSemverMatcher(whitelist); + assertFalse(inListSemverMatcher.match("2.1.eee", null, null, null)); + } +} diff --git a/client/src/test/java/io/split/engine/matchers/LessThanOrEqualToMatcherTest.java b/client/src/test/java/io/split/engine/matchers/LessThanOrEqualToMatcherTest.java index 1663a6085..47853ed4c 100644 --- a/client/src/test/java/io/split/engine/matchers/LessThanOrEqualToMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/LessThanOrEqualToMatcherTest.java @@ -1,11 +1,9 @@ package io.split.engine.matchers; import io.split.client.dtos.DataType; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Tests for AllKeysMatcher */ @@ -14,31 +12,31 @@ public class LessThanOrEqualToMatcherTest { @Test public void works() { LessThanOrEqualToMatcher matcher = new LessThanOrEqualToMatcher(10, DataType.NUMBER); - assertThat(matcher.match(null, null, null, null), is(false)); - assertThat(matcher.match(1, null, null, null), is(true)); - assertThat(matcher.match(new Long(-1), null, null, null), is(true)); - assertThat(matcher.match(9, null, null, null), is(true)); - assertThat(matcher.match(new Long(10), null, null, null), is(true)); - assertThat(matcher.match(11, null, null, null), is(false)); - assertThat(matcher.match(100, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); + Assert.assertTrue(matcher.match(1, null, null, null)); + Assert.assertTrue(matcher.match(new Long(-1), null, null, null)); + Assert.assertTrue(matcher.match(9, null, null, null)); + Assert.assertTrue(matcher.match(new Long(10), null, null, null)); + Assert.assertFalse(matcher.match(11, null, null, null)); + Assert.assertFalse(matcher.match(100, null, null, null)); } @Test - public void works_negative() { + public void worksNegative() { LessThanOrEqualToMatcher matcher = new LessThanOrEqualToMatcher(-10, DataType.NUMBER); - assertThat(matcher.match(null, null, null, null), is(false)); - assertThat(matcher.match(1, null, null, null), is(false)); - assertThat(matcher.match(new Long(-1), null, null, null), is(false)); - assertThat(matcher.match(9, null, null, null), is(false)); - assertThat(matcher.match(new Long(10), null, null, null), is(false)); - assertThat(matcher.match(11, null, null, null), is(false)); - assertThat(matcher.match(-9, null, null, null), is(false)); - assertThat(matcher.match(-10, null, null, null), is(true)); - assertThat(matcher.match(-11, null, null, null), is(true)); + Assert.assertFalse(matcher.match(null, null, null, null)); + Assert.assertFalse(matcher.match(1, null, null, null)); + Assert.assertFalse(matcher.match(new Long(-1), null, null, null)); + Assert.assertFalse(matcher.match(9, null, null, null)); + Assert.assertFalse(matcher.match(new Long(10), null, null, null)); + Assert.assertFalse(matcher.match(11, null, null, null)); + Assert.assertFalse(matcher.match(-9, null, null, null)); + Assert.assertTrue(matcher.match(-10, null, null, null)); + Assert.assertTrue(matcher.match(-11, null, null, null)); } @Test - public void works_dates() { + public void worksDates() { long april11_2016_23_59 = 1460419199000L; long april12_2016_midnight_19 = 1460420360000L; long april12_2016_midnight_20 = 1460420421903L; @@ -46,12 +44,10 @@ public void works_dates() { long april12_2016_1_20 = 1460424039000L; LessThanOrEqualToMatcher matcher = new LessThanOrEqualToMatcher(april12_2016_midnight_20, DataType.DATETIME); - assertThat(matcher.match(april11_2016_23_59, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_midnight_19, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_midnight_20, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_midnight_20_59, null, null, null), is(true)); - assertThat(matcher.match(april12_2016_1_20, null, null, null), is(false)); + Assert.assertTrue(matcher.match(april11_2016_23_59, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_19, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_20, null, null, null)); + Assert.assertTrue(matcher.match(april12_2016_midnight_20_59, null, null, null)); + Assert.assertFalse(matcher.match(april12_2016_1_20, null, null, null)); } - - -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/matchers/LessThanOrEqualToSemverMatcherTest.java b/client/src/test/java/io/split/engine/matchers/LessThanOrEqualToSemverMatcherTest.java new file mode 100644 index 000000000..349a608ae --- /dev/null +++ b/client/src/test/java/io/split/engine/matchers/LessThanOrEqualToSemverMatcherTest.java @@ -0,0 +1,35 @@ +package io.split.engine.matchers; + +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; + +/** + * Tests for EqualToSemverMatcher + */ + +public class LessThanOrEqualToSemverMatcherTest { + + @Test + public void works() { + LessThanOrEqualToSemverMatcher lessThanOrEqualToSemverMatcher = new LessThanOrEqualToSemverMatcher("2.1.8"); + + assertTrue( lessThanOrEqualToSemverMatcher.match("2.1.8", null, null, null)); + assertFalse( lessThanOrEqualToSemverMatcher.match("2.1.9", null, null, null)); + assertTrue( lessThanOrEqualToSemverMatcher.match("2.1.8-rc", null, null, null)); + assertTrue( lessThanOrEqualToSemverMatcher.match("2.0.10", null, null, null)); + assertTrue( lessThanOrEqualToSemverMatcher.match("2.1.8+build", null, null, null)); + assertTrue(lessThanOrEqualToSemverMatcher.equals(lessThanOrEqualToSemverMatcher)); + assertTrue(lessThanOrEqualToSemverMatcher.hashCode() != 0); + } + + @Test + public void testNull() { + LessThanOrEqualToSemverMatcher lessThanOrEqualToSemverMatcher = new LessThanOrEqualToSemverMatcher("2.1.8"); + assertFalse( lessThanOrEqualToSemverMatcher.match(null, null, null, null)); + + lessThanOrEqualToSemverMatcher = new LessThanOrEqualToSemverMatcher("2.ee.8"); + assertFalse(lessThanOrEqualToSemverMatcher.match("2.ee.8", null, null, null)); + } +} diff --git a/client/src/test/java/io/split/engine/matchers/NegatableMatcherTest.java b/client/src/test/java/io/split/engine/matchers/NegatableMatcherTest.java index 3aec5ab0d..f80f38739 100644 --- a/client/src/test/java/io/split/engine/matchers/NegatableMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/NegatableMatcherTest.java @@ -4,8 +4,11 @@ import io.split.engine.evaluator.EvaluationContext; import io.split.engine.evaluator.Evaluator; import io.split.engine.matchers.strings.WhitelistMatcher; +import io.split.storages.RuleBasedSegmentCache; import io.split.storages.SegmentCache; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; import io.split.storages.memory.SegmentCacheInMemoryImpl; +import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; @@ -13,9 +16,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Tests for NegatableMatcher. * @@ -24,39 +24,39 @@ public class NegatableMatcherTest { @Test - public void works_all_keys() { + public void worksAllKeys() { AllKeysMatcher delegate = new AllKeysMatcher(); AttributeMatcher.NegatableMatcher matcher = new AttributeMatcher.NegatableMatcher(delegate, true); - test(matcher, "foo", false, Mockito.mock(SegmentCache.class)); + test(matcher, "foo", false, Mockito.mock(SegmentCache.class), Mockito.mock(RuleBasedSegmentCache.class)); } @Test - public void works_segment() { + public void worksSegment() { SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); segmentCache.updateSegment("foo", Stream.of("a","b").collect(Collectors.toList()), new ArrayList<>(), 1L); UserDefinedSegmentMatcher delegate = new UserDefinedSegmentMatcher("foo"); AttributeMatcher.NegatableMatcher matcher = new AttributeMatcher.NegatableMatcher(delegate, true); - test(matcher, "a", false, segmentCache); - test(matcher, "b", false, segmentCache); - test(matcher, "c", true, segmentCache); + test(matcher, "a", false, segmentCache, ruleBasedSegmentCache); + test(matcher, "b", false, segmentCache, ruleBasedSegmentCache); + test(matcher, "c", true, segmentCache, ruleBasedSegmentCache); } @Test - public void works_whitelist() { + public void worksWhitelist() { WhitelistMatcher delegate = new WhitelistMatcher(Lists.newArrayList("a", "b")); AttributeMatcher.NegatableMatcher matcher = new AttributeMatcher.NegatableMatcher(delegate, true); - test(matcher, "a", false, Mockito.mock(SegmentCache.class)); - test(matcher, "b", false, Mockito.mock(SegmentCache.class)); - test(matcher, "c", true, Mockito.mock(SegmentCache.class)); + test(matcher, "a", false, Mockito.mock(SegmentCache.class), Mockito.mock(RuleBasedSegmentCache.class)); + test(matcher, "b", false, Mockito.mock(SegmentCache.class), Mockito.mock(RuleBasedSegmentCache.class)); + test(matcher, "c", true, Mockito.mock(SegmentCache.class), Mockito.mock(RuleBasedSegmentCache.class)); } - private void test(AttributeMatcher.NegatableMatcher negationMatcher, String key, boolean expected, SegmentCache segmentCache) { - assertThat(negationMatcher.match(key, null, null, new EvaluationContext(Mockito.mock(Evaluator.class), segmentCache)), is(expected)); - assertThat(negationMatcher.delegate().match(key, null, null, new EvaluationContext(Mockito.mock(Evaluator.class), segmentCache)), is(!expected)); - + private void test(AttributeMatcher.NegatableMatcher negationMatcher, String key, boolean expected, SegmentCache segmentCache, RuleBasedSegmentCache ruleBasedSegmentCache) { + Assert.assertEquals(expected, negationMatcher.match(key, null, null, new EvaluationContext(Mockito.mock(Evaluator.class), segmentCache, ruleBasedSegmentCache))); + Assert.assertNotEquals(expected, negationMatcher.delegate().match(key, null, null, new EvaluationContext(Mockito.mock(Evaluator.class), segmentCache, ruleBasedSegmentCache))); } diff --git a/client/src/test/java/io/split/engine/matchers/PrerequisitesMatcherTest.java b/client/src/test/java/io/split/engine/matchers/PrerequisitesMatcherTest.java new file mode 100644 index 000000000..4fe92d045 --- /dev/null +++ b/client/src/test/java/io/split/engine/matchers/PrerequisitesMatcherTest.java @@ -0,0 +1,55 @@ +package io.split.engine.matchers; + +import io.split.client.dtos.Prerequisites; +import io.split.client.utils.Json; +import io.split.engine.evaluator.EvaluationContext; +import io.split.engine.evaluator.Evaluator; +import io.split.engine.evaluator.EvaluatorImp; +import io.split.storages.RuleBasedSegmentCache; +import io.split.storages.SegmentCache; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.util.Arrays; +import java.util.List; + +/** + * Tests for Prerequisites matcher + */ +public class PrerequisitesMatcherTest { + + @Test + public void works() { + Evaluator evaluator = Mockito.mock(Evaluator.class); + EvaluationContext evaluationContext = new EvaluationContext(evaluator, Mockito.mock(SegmentCache.class), Mockito.mock(RuleBasedSegmentCache.class)); + List prerequisites = Arrays.asList(Json.fromJson("{\"n\": \"split1\", \"ts\": [\"on\"]}", Prerequisites.class), Json.fromJson("{\"n\": \"split2\", \"ts\": [\"off\"]}", Prerequisites.class)); + PrerequisitesMatcher matcher = new PrerequisitesMatcher(prerequisites); + Assert.assertEquals("prerequisites: split1 [on], split2 [off]", matcher.toString()); + PrerequisitesMatcher matcher2 = new PrerequisitesMatcher(prerequisites); + Assert.assertTrue(matcher.equals(matcher2)); + Assert.assertTrue(matcher.hashCode() != 0); + + Mockito.when(evaluator.evaluateFeature("user", "user", "split1", null)).thenReturn(new EvaluatorImp.TreatmentLabelAndChangeNumber("on", "")); + Mockito.when(evaluator.evaluateFeature("user", "user", "split2", null)).thenReturn(new EvaluatorImp.TreatmentLabelAndChangeNumber("off", "")); + Assert.assertTrue(matcher.match("user", "user", null, evaluationContext)); + + Mockito.when(evaluator.evaluateFeature("user", "user", "split2", null)).thenReturn(new EvaluatorImp.TreatmentLabelAndChangeNumber("on", "")); + Assert.assertFalse(matcher.match("user", "user", null, evaluationContext)); + } + + @Test + public void invalidParams() { + Evaluator evaluator = Mockito.mock(Evaluator.class); + EvaluationContext evaluationContext = new EvaluationContext(evaluator, Mockito.mock(SegmentCache.class), Mockito.mock(RuleBasedSegmentCache.class)); + + List prerequisites = Arrays.asList(Json.fromJson("{\"n\": \"split1\", \"ts\": [\"on\"]}", Prerequisites.class), Json.fromJson("{\"n\": \"split2\", \"ts\": [\"off\"]}", Prerequisites.class)); + PrerequisitesMatcher matcher = new PrerequisitesMatcher(prerequisites); + Mockito.when(evaluator.evaluateFeature("user", "user", "split1", null)).thenReturn(new EvaluatorImp.TreatmentLabelAndChangeNumber("on", "")); + Assert.assertFalse(matcher.match(null, null, null, evaluationContext)); + Assert.assertFalse(matcher.match(123, null, null, evaluationContext)); + + matcher = new PrerequisitesMatcher(null); + Assert.assertFalse(matcher.match(123, null, null, evaluationContext)); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/matchers/RuleBasedSegmentMatcherTest.java b/client/src/test/java/io/split/engine/matchers/RuleBasedSegmentMatcherTest.java new file mode 100644 index 000000000..7d5d0c48b --- /dev/null +++ b/client/src/test/java/io/split/engine/matchers/RuleBasedSegmentMatcherTest.java @@ -0,0 +1,137 @@ +package io.split.engine.matchers; + +import com.google.common.collect.Lists; +import io.split.client.dtos.ConditionType; +import io.split.client.dtos.MatcherCombiner; +import io.split.client.dtos.SplitChange; +import io.split.client.utils.Json; +import io.split.client.utils.RuleBasedSegmentsToUpdate; +import io.split.engine.evaluator.EvaluationContext; +import io.split.engine.evaluator.Evaluator; +import io.split.engine.experiments.ParsedCondition; +import io.split.engine.experiments.ParsedRuleBasedSegment; +import io.split.engine.experiments.RuleBasedSegmentParser; +import io.split.engine.matchers.strings.WhitelistMatcher; +import io.split.storages.RuleBasedSegmentCache; +import io.split.storages.SegmentCache; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; +import io.split.storages.memory.SegmentCacheInMemoryImpl; +import org.junit.Test; +import org.mockito.Mockito; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; + +import static io.split.client.utils.RuleBasedSegmentProcessor.processRuleBasedSegmentChanges; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +public class RuleBasedSegmentMatcherTest { + @Test + public void works() { + Evaluator evaluator = Mockito.mock(Evaluator.class); + SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + EvaluationContext evaluationContext = new EvaluationContext(evaluator, segmentCache, ruleBasedSegmentCache); + AttributeMatcher whiteListMatcher = AttributeMatcher.vanilla(new WhitelistMatcher(Lists.newArrayList("test_1", "admin"))); + CombiningMatcher whitelistCombiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(whiteListMatcher)); + + AttributeMatcher ruleBasedSegmentMatcher = AttributeMatcher.vanilla(new RuleBasedSegmentMatcher("sample_rule_based_segment")); + CombiningMatcher ruleBasedSegmentCombinerMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(ruleBasedSegmentMatcher)); + ParsedCondition ruleBasedSegmentCondition = new ParsedCondition(ConditionType.ROLLOUT, ruleBasedSegmentCombinerMatcher, null, "test rbs rule"); + ParsedRuleBasedSegment parsedRuleBasedSegment = new ParsedRuleBasedSegment("sample_rule_based_segment", + Lists.newArrayList(new ParsedCondition(ConditionType.WHITELIST, whitelistCombiningMatcher, null, "whitelist label")),"user", + 123, Lists.newArrayList("mauro@test.io","gaston@test.io"), Lists.newArrayList()); + ruleBasedSegmentCache.update(Lists.newArrayList(parsedRuleBasedSegment), null, 123); + + RuleBasedSegmentMatcher matcher = new RuleBasedSegmentMatcher("sample_rule_based_segment"); + + assertThat(matcher.match("mauro@test.io", null, null, evaluationContext), is(false)); + assertThat(matcher.match("admin", null, null, evaluationContext), is(true)); + + assertThat(matcher.match("foo", null, null, evaluationContext), is(false)); + assertThat(matcher.match(null, null, null, evaluationContext), is(false)); + } + + @Test + public void usingRbsInConditionTest() throws IOException { + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/rule_base_segments.json")), StandardCharsets.UTF_8); + Evaluator evaluator = Mockito.mock(Evaluator.class); + SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + EvaluationContext evaluationContext = new EvaluationContext(evaluator, segmentCache, ruleBasedSegmentCache); + + SplitChange change = Json.fromJson(load, SplitChange.class); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + RuleBasedSegmentsToUpdate ruleBasedSegmentsToUpdate = processRuleBasedSegmentChanges(ruleBasedSegmentParser, + change.ruleBasedSegments.d); + ruleBasedSegmentCache.update(ruleBasedSegmentsToUpdate.getToAdd(), null, 123); + RuleBasedSegmentMatcher matcher = new RuleBasedSegmentMatcher("dependent_rbs"); + HashMap attrib1 = new HashMap() {{ + put("email", "mauro@@split.io"); + }}; + HashMap attrib2 = new HashMap() {{ + put("email", "bilal@@split.io"); + }}; + assertThat(matcher.match("mauro@split.io", null, attrib1, evaluationContext), is(false)); + assertThat(matcher.match("bilal@split.io", null, attrib2, evaluationContext), is(true)); + } + + @Test + public void usingSegmentInExcludedTest() throws IOException { + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/rule_base_segments3.json")), StandardCharsets.UTF_8); + Evaluator evaluator = Mockito.mock(Evaluator.class); + SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + segmentCache.updateSegment("segment1", Arrays.asList("bilal@split.io"), new ArrayList<>(), 123); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + EvaluationContext evaluationContext = new EvaluationContext(evaluator, segmentCache, ruleBasedSegmentCache); + + SplitChange change = Json.fromJson(load, SplitChange.class); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + RuleBasedSegmentsToUpdate ruleBasedSegmentsToUpdate = processRuleBasedSegmentChanges(ruleBasedSegmentParser, + change.ruleBasedSegments.d); + ruleBasedSegmentCache.update(ruleBasedSegmentsToUpdate.getToAdd(), null, 123); + RuleBasedSegmentMatcher matcher = new RuleBasedSegmentMatcher("sample_rule_based_segment"); + HashMap attrib1 = new HashMap() {{ + put("email", "mauro@split.io"); + }}; + HashMap attrib2 = new HashMap() {{ + put("email", "bilal@split.io"); + }}; + HashMap attrib3 = new HashMap() {{ + put("email", "pato@split.io"); + }}; + assertThat(matcher.match("mauro@split.io", null, attrib1, evaluationContext), is(false)); + assertThat(matcher.match("bilal@split.io", null, attrib2, evaluationContext), is(false)); + assertThat(matcher.match("pato@split.io", null, attrib3, evaluationContext), is(true)); + } + + @Test + public void usingRbsInExcludedTest() throws IOException { + String load = new String(Files.readAllBytes(Paths.get("src/test/resources/rule_base_segments2.json")), StandardCharsets.UTF_8); + Evaluator evaluator = Mockito.mock(Evaluator.class); + SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + EvaluationContext evaluationContext = new EvaluationContext(evaluator, segmentCache, ruleBasedSegmentCache); + + SplitChange change = Json.fromJson(load, SplitChange.class); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + RuleBasedSegmentsToUpdate ruleBasedSegmentsToUpdate = processRuleBasedSegmentChanges(ruleBasedSegmentParser, + change.ruleBasedSegments.d); + ruleBasedSegmentCache.update(ruleBasedSegmentsToUpdate.getToAdd(), null, 123); + RuleBasedSegmentMatcher matcher = new RuleBasedSegmentMatcher("sample_rule_based_segment"); + HashMap attrib1 = new HashMap() {{ + put("email", "mauro@split.io"); + }}; + HashMap attrib2 = new HashMap() {{ + put("email", "bilal@harness.io"); + }}; + assertThat(matcher.match("mauro", null, attrib1, evaluationContext), is(false)); + assertThat(matcher.match("bilal", null, attrib2, evaluationContext), is(true)); + } +} diff --git a/client/src/test/java/io/split/engine/matchers/SemverTest.java b/client/src/test/java/io/split/engine/matchers/SemverTest.java new file mode 100644 index 000000000..40da82643 --- /dev/null +++ b/client/src/test/java/io/split/engine/matchers/SemverTest.java @@ -0,0 +1,112 @@ +package io.split.engine.matchers; + +import org.junit.Test; + +import java.io.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertTrue; + +/** + * Tests for AllKeysMatcher + */ +public class SemverTest { + + @Test + public void testValidVersions() throws IOException { + List> versions = new ArrayList<>(); + BufferedReader br = new BufferedReader(new FileReader("src/test/resources/semver/valid-semantic-versions.csv")); + String line; + boolean firstLine = true; + while ((line = br.readLine()) != null) { + if (firstLine) {firstLine = false; continue; } + String[] values = line.split(","); + versions.add(Arrays.asList(values)); + } + for(List version : versions) { + assertTrue(Semver.build(version.get(0)) != null); + assertTrue(Semver.build(version.get(1)) != null); + } + } + + @Test + public void testInvalidVersions() throws IOException { + List> versions = new ArrayList<>(); + BufferedReader br = new BufferedReader(new FileReader("src/test/resources/semver/invalid-semantic-versions.csv")); + String line; + boolean firstLine = true; + while ((line = br.readLine()) != null) { + if (firstLine) {firstLine = false; continue; } + String[] values = line.split(","); + versions.add(Arrays.asList(values)); + } + for(List version : versions) { + assertTrue(Semver.build(version.get(0)) == null); + } + } + + @Test + public void testCompareVersions() throws IOException { + List> versions = new ArrayList<>(); + BufferedReader br = new BufferedReader(new FileReader("src/test/resources/semver/valid-semantic-versions.csv")); + String line; + boolean firstLine = true; + while ((line = br.readLine()) != null) { + if (firstLine) {firstLine = false; continue; } + String[] values = line.split(","); + versions.add(Arrays.asList(values)); + } + for(List version : versions) { + assertTrue(Semver.build(version.get(0)).compare(Semver.build(version.get(1))) == 1); + assertTrue(Semver.build(version.get(1)).compare(Semver.build(version.get(0))) == -1); + } + + versions.clear(); + br = new BufferedReader(new FileReader("src/test/resources/semver/equal-to-semver.csv")); + firstLine = true; + while ((line = br.readLine()) != null) { + if (firstLine) {firstLine = false; continue; } + String[] values = line.split(","); + versions.add(Arrays.asList(values)); + } + for(List version : versions) { + Semver version1 = Semver.build(version.get(0)); + Semver version2 = Semver.build(version.get(1)); + + if (version.get(2).equals("true")) { + assertTrue(version1.version().equals(version2.version())); + } else { + assertTrue(!version1.version().equals(version2.version())); + } + } + + versions.clear(); + br = new BufferedReader(new FileReader("src/test/resources/semver/between-semver.csv")); + firstLine = true; + while ((line = br.readLine()) != null) { + if (firstLine) {firstLine = false; continue; } + String[] values = line.split(","); + versions.add(Arrays.asList(values)); + } + for(List version : versions) { + Semver version1 = Semver.build(version.get(0)); + Semver version2 = Semver.build(version.get(1)); + Semver version3 = Semver.build(version.get(2)); + + if (version.get(3).equals("true")) { + assertTrue(version2.compare(version1) >= 0 && version3.compare(version2) >= 0); + } else { + assertTrue(version2.compare(version1) < 0 || version3.compare(version2) < 0); + } + } + + } + @Test + public void testLeadingZeros() { + assertTrue(Semver.build("1.01.2").version().equals("1\\.1\\.2")); + assertTrue(Semver.build("1.01.2-rc.01").version().equals("1\\.1\\.2-rc\\.1")); + } +} diff --git a/client/src/test/java/io/split/engine/matchers/UserDefinedSegmentMatcherTest.java b/client/src/test/java/io/split/engine/matchers/UserDefinedSegmentMatcherTest.java index 0d59e3c54..b957f73d0 100644 --- a/client/src/test/java/io/split/engine/matchers/UserDefinedSegmentMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/UserDefinedSegmentMatcherTest.java @@ -3,7 +3,10 @@ import com.google.common.collect.Sets; import io.split.engine.evaluator.EvaluationContext; import io.split.engine.evaluator.Evaluator; +import io.split.storages.RuleBasedSegmentCacheConsumer; +import io.split.storages.RuleBasedSegmentCacheProducer; import io.split.storages.SegmentCache; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; import io.split.storages.memory.SegmentCacheInMemoryImpl; import org.junit.Test; import org.mockito.Mockito; @@ -27,7 +30,8 @@ public void works() { Set keys = Sets.newHashSet("a", "b"); Evaluator evaluator = Mockito.mock(Evaluator.class); SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); - EvaluationContext evaluationContext = new EvaluationContext(evaluator, segmentCache); + RuleBasedSegmentCacheConsumer ruleBasedSegmentCacheConsumer = new RuleBasedSegmentCacheInMemoryImp(); + EvaluationContext evaluationContext = new EvaluationContext(evaluator, segmentCache, ruleBasedSegmentCacheConsumer); segmentCache.updateSegment("foo", Stream.of("a","b").collect(Collectors.toList()), new ArrayList<>(), 1L); UserDefinedSegmentMatcher matcher = new UserDefinedSegmentMatcher("foo"); diff --git a/client/src/test/java/io/split/engine/matchers/collections/ContainsAllOfSetMatcherTest.java b/client/src/test/java/io/split/engine/matchers/collections/ContainsAllOfSetMatcherTest.java index caa7b40da..1c84cf2e6 100644 --- a/client/src/test/java/io/split/engine/matchers/collections/ContainsAllOfSetMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/collections/ContainsAllOfSetMatcherTest.java @@ -1,5 +1,6 @@ package io.split.engine.matchers.collections; +import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; @@ -7,77 +8,75 @@ import java.util.List; import java.util.Set; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Created by adilaijaz on 4/18/17. */ public class ContainsAllOfSetMatcherTest { @Test - public void works_for_sets() { + public void worksForSets() { Set set = new HashSet<>(); set.add("first"); set.add("second"); ContainsAllOfSetMatcher matcher = new ContainsAllOfSetMatcher(set); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); Set argument = new HashSet<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); } @Test - public void works_for_lists() { + public void worksForLists() { List list = new ArrayList<>(); list.add("first"); list.add("second"); ContainsAllOfSetMatcher matcher = new ContainsAllOfSetMatcher(list); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); List argument = new ArrayList<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); } @Test - public void works_for_empty_paramter() { + public void worksForEmptyParamter() { List list = new ArrayList<>(); ContainsAllOfSetMatcher matcher = new ContainsAllOfSetMatcher(list); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); List argument = new ArrayList<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); } } diff --git a/client/src/test/java/io/split/engine/matchers/collections/ContainsAnyOfSetMatcherTest.java b/client/src/test/java/io/split/engine/matchers/collections/ContainsAnyOfSetMatcherTest.java index 2b54dcbaf..520959aee 100644 --- a/client/src/test/java/io/split/engine/matchers/collections/ContainsAnyOfSetMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/collections/ContainsAnyOfSetMatcherTest.java @@ -1,5 +1,6 @@ package io.split.engine.matchers.collections; +import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; @@ -7,77 +8,74 @@ import java.util.List; import java.util.Set; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Created by adilaijaz on 4/18/17. */ public class ContainsAnyOfSetMatcherTest { @Test - public void works_for_sets() { + public void worksForSets() { Set set = new HashSet<>(); set.add("first"); set.add("second"); ContainsAnyOfSetMatcher matcher = new ContainsAnyOfSetMatcher(set); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); Set argument = new HashSet<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); } @Test - public void works_for_lists() { + public void worksForLists() { List list = new ArrayList<>(); list.add("first"); list.add("second"); ContainsAnyOfSetMatcher matcher = new ContainsAnyOfSetMatcher(list); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); List argument = new ArrayList<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); } @Test - public void works_for_empty_paramter() { + public void worksForEmptyParamter() { List list = new ArrayList<>(); ContainsAnyOfSetMatcher matcher = new ContainsAnyOfSetMatcher(list); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); List argument = new ArrayList<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/matchers/collections/EqualToSetMatcherTest.java b/client/src/test/java/io/split/engine/matchers/collections/EqualToSetMatcherTest.java index 24c783c59..ceb3b4b11 100644 --- a/client/src/test/java/io/split/engine/matchers/collections/EqualToSetMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/collections/EqualToSetMatcherTest.java @@ -1,5 +1,6 @@ package io.split.engine.matchers.collections; +import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; @@ -7,100 +8,97 @@ import java.util.List; import java.util.Set; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Created by adilaijaz on 4/18/17. */ public class EqualToSetMatcherTest { @Test - public void works_for_sets() { + public void worksForSets() { Set set = new HashSet<>(); set.add("first"); set.add("second"); EqualToSetMatcher matcher = new EqualToSetMatcher(set); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); Set argument = new HashSet<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); } @Test - public void works_for_sets_same_order() { + public void worksForSetsSameOrder() { Set set = new HashSet<>(); set.add("first"); set.add("second"); EqualToSetMatcher matcher = new EqualToSetMatcher(set); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); Set argument = new HashSet<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); } @Test - public void works_for_lists() { + public void worksForLists() { List list = new ArrayList<>(); list.add("first"); list.add("second"); EqualToSetMatcher matcher = new EqualToSetMatcher(list); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); List argument = new ArrayList<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); } @Test - public void works_for_empty_paramter() { + public void worksForEmptyParamter() { List list = new ArrayList<>(); EqualToSetMatcher matcher = new EqualToSetMatcher(list); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); List argument = new ArrayList<>(); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); } } diff --git a/client/src/test/java/io/split/engine/matchers/collections/PartOfSetMatcherTest.java b/client/src/test/java/io/split/engine/matchers/collections/PartOfSetMatcherTest.java index ff75dc9fb..0a734e884 100644 --- a/client/src/test/java/io/split/engine/matchers/collections/PartOfSetMatcherTest.java +++ b/client/src/test/java/io/split/engine/matchers/collections/PartOfSetMatcherTest.java @@ -1,5 +1,6 @@ package io.split.engine.matchers.collections; +import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; @@ -7,77 +8,74 @@ import java.util.List; import java.util.Set; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - /** * Created by adilaijaz on 4/18/17. */ public class PartOfSetMatcherTest { @Test - public void works_for_sets() { + public void worksForSets() { Set set = new HashSet<>(); set.add("first"); set.add("second"); PartOfSetMatcher matcher = new PartOfSetMatcher(set); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); Set argument = new HashSet<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); } @Test - public void works_for_lists() { + public void worksForLists() { List list = new ArrayList<>(); list.add("first"); list.add("second"); PartOfSetMatcher matcher = new PartOfSetMatcher(list); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); List argument = new ArrayList<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(true)); + Assert.assertTrue(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); } @Test - public void works_for_empty_paramter() { + public void worksForEmptyParamter() { List list = new ArrayList<>(); PartOfSetMatcher matcher = new PartOfSetMatcher(list); - assertThat(matcher.match(null, null, null, null), is(false)); + Assert.assertFalse(matcher.match(null, null, null, null)); List argument = new ArrayList<>(); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("second"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("first"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); argument.add("third"); - assertThat(matcher.match(argument, null, null, null), is(false)); + Assert.assertFalse(matcher.match(argument, null, null, null)); } } diff --git a/client/src/test/java/io/split/engine/segments/SegmentFetcherImpTest.java b/client/src/test/java/io/split/engine/segments/SegmentFetcherImpTest.java index 17ca1d713..0872fb45b 100644 --- a/client/src/test/java/io/split/engine/segments/SegmentFetcherImpTest.java +++ b/client/src/test/java/io/split/engine/segments/SegmentFetcherImpTest.java @@ -1,11 +1,9 @@ package io.split.engine.segments; -import com.google.common.collect.Sets; import io.split.storages.SegmentCache; import io.split.storages.SegmentCacheProducer; import io.split.storages.memory.SegmentCacheInMemoryImpl; import io.split.client.dtos.SegmentChange; -import io.split.engine.SDKReadinessGates; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryRuntimeProducer; import io.split.telemetry.storage.TelemetryStorage; @@ -19,15 +17,12 @@ import java.util.ArrayList; import java.util.List; -import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; /** @@ -41,31 +36,28 @@ public class SegmentFetcherImpTest { private static final TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); @Test - public void works_when_we_start_without_state() throws InterruptedException { + public void worksWhenWeStartWithoutState() throws InterruptedException { works(-1L); } @Test - public void works_when_we_start_with_state() throws InterruptedException { + public void worksWhenWeStartWithState() throws InterruptedException { works(20L); - } @Test - public void works_when_there_are_no_changes() throws InterruptedException { - long startingChangeNumber = -1L; - SDKReadinessGates gates = new SDKReadinessGates(); + public void worksWhenThereAreNoChanges() throws InterruptedException { SegmentCache segmentCache = new SegmentCacheInMemoryImpl(); SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); SegmentChange segmentChange = getSegmentChange(-1L, 10L); Mockito.when(segmentChangeFetcher.fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.any())).thenReturn(segmentChange); - SegmentFetcherImp fetcher = new SegmentFetcherImp(SEGMENT_NAME, segmentChangeFetcher, gates, segmentCache, TELEMETRY_STORAGE); + SegmentFetcherImp fetcher = new SegmentFetcherImp(SEGMENT_NAME, segmentChangeFetcher, segmentCache, TELEMETRY_STORAGE); // execute the fetcher for a little bit. ScheduledExecutorService scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); - scheduledExecutorService.scheduleWithFixedDelay(fetcher::fetchAll, 0L, 100, TimeUnit.MICROSECONDS); + scheduledExecutorService.scheduleWithFixedDelay(() -> fetcher.fetch(new FetchOptions.Builder().build()), 0L, 100, TimeUnit.MICROSECONDS); Thread.currentThread().sleep(5 * 100); scheduledExecutorService.shutdown(); @@ -80,15 +72,11 @@ public void works_when_there_are_no_changes() throws InterruptedException { Thread.currentThread().interrupt(); } - Set expected = Sets.newHashSet("" + (startingChangeNumber + 1)); - assertNotNull(segmentCache.getChangeNumber(SEGMENT_NAME)); assertEquals(10L, segmentCache.getChangeNumber(SEGMENT_NAME)); - } private void works(long startingChangeNumber) throws InterruptedException { - SDKReadinessGates gates = new SDKReadinessGates(); String segmentName = SEGMENT_NAME; SegmentCacheProducer segmentCacheProducer = Mockito.mock(SegmentCacheProducer.class); Mockito.when(segmentCacheProducer.getChangeNumber(SEGMENT_NAME)).thenReturn(-1L).thenReturn(-1L) @@ -100,11 +88,11 @@ private void works(long startingChangeNumber) throws InterruptedException { Mockito.when(segmentChangeFetcher.fetch(Mockito.eq(SEGMENT_NAME),Mockito.eq( -1L), Mockito.any())).thenReturn(segmentChange); Mockito.when(segmentChangeFetcher.fetch(Mockito.eq(SEGMENT_NAME),Mockito.eq( 0L), Mockito.any())).thenReturn(segmentChange); - SegmentFetcher fetcher = new SegmentFetcherImp(segmentName, segmentChangeFetcher, gates, segmentCacheProducer, Mockito.mock(TelemetryRuntimeProducer.class)); + SegmentFetcher fetcher = new SegmentFetcherImp(segmentName, segmentChangeFetcher, segmentCacheProducer, Mockito.mock(TelemetryRuntimeProducer.class)); // execute the fetcher for a little bit. ScheduledExecutorService scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); - scheduledExecutorService.scheduleWithFixedDelay(fetcher::fetchAll, 0L, Integer.MAX_VALUE, TimeUnit.SECONDS); + scheduledExecutorService.scheduleWithFixedDelay(() -> fetcher.fetch(new FetchOptions.Builder().build()), 0L, Integer.MAX_VALUE, TimeUnit.SECONDS); Thread.currentThread().sleep(5 * 100); scheduledExecutorService.shutdown(); @@ -119,37 +107,27 @@ private void works(long startingChangeNumber) throws InterruptedException { Thread.currentThread().interrupt(); } Mockito.verify(segmentChangeFetcher, Mockito.times(2)).fetch(Mockito.anyString(), Mockito.anyLong(), Mockito.anyObject()); - } @Test(expected = NullPointerException.class) - public void does_not_work_if_segment_change_fetcher_is_null() { - SegmentCacheProducer segmentCacheProducer = Mockito.mock(SegmentCacheProducer.class); - SegmentFetcher fetcher = new SegmentFetcherImp(SEGMENT_NAME, null, new SDKReadinessGates(), segmentCacheProducer, TELEMETRY_STORAGE); - } - - @Test(expected = NullPointerException.class) - public void does_not_work_if_segment_name_is_null() { + public void doesNotWorkIfSegmentChangeFetcherIsNull() { SegmentCacheProducer segmentCacheProducer = Mockito.mock(SegmentCacheProducer.class); - SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); - SegmentFetcher fetcher = new SegmentFetcherImp(null, segmentChangeFetcher, new SDKReadinessGates(), segmentCacheProducer, TELEMETRY_STORAGE); + SegmentFetcher fetcher = new SegmentFetcherImp(SEGMENT_NAME, null, segmentCacheProducer, TELEMETRY_STORAGE); } @Test(expected = NullPointerException.class) - public void does_not_work_if_sdk_readiness_gates_are_null() { + public void doesNotWorkIfSegmentNameIsNull() { SegmentCacheProducer segmentCacheProducer = Mockito.mock(SegmentCacheProducer.class); SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); - SegmentFetcher fetcher = new SegmentFetcherImp(SEGMENT_NAME, segmentChangeFetcher, null, segmentCacheProducer, TELEMETRY_STORAGE); + SegmentFetcher fetcher = new SegmentFetcherImp(null, segmentChangeFetcher, segmentCacheProducer, TELEMETRY_STORAGE); } @Test public void testBypassCdnClearedAfterFirstHit() { SegmentChangeFetcher mockFetcher = Mockito.mock(SegmentChangeFetcher.class); - SegmentSynchronizationTask segmentSynchronizationTaskMock = Mockito.mock(SegmentSynchronizationTask.class); SegmentCache segmentCacheMock = new SegmentCacheInMemoryImpl(); - SDKReadinessGates mockGates = Mockito.mock(SDKReadinessGates.class); - SegmentFetcher fetcher = new SegmentFetcherImp("someSegment", mockFetcher, mockGates, segmentCacheMock, Mockito.mock(TelemetryRuntimeProducer.class)); + SegmentFetcher fetcher = new SegmentFetcherImp("someSegment", mockFetcher, segmentCacheMock, Mockito.mock(TelemetryRuntimeProducer.class)); SegmentChange response1 = new SegmentChange(); @@ -164,7 +142,7 @@ public void testBypassCdnClearedAfterFirstHit() { response2.added = new ArrayList<>(); response2.removed = new ArrayList<>(); response2.since = 1; - response1.till = 1; + response2.till = 1; ArgumentCaptor optionsCaptor = ArgumentCaptor.forClass(FetchOptions.class); ArgumentCaptor cnCaptor = ArgumentCaptor.forClass(Long.class); @@ -197,4 +175,4 @@ private SegmentChange getSegmentChange(long since, long till){ segmentChange.removed = new ArrayList<>(); return segmentChange; } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/segments/SegmentSynchronizationTaskImpTest.java b/client/src/test/java/io/split/engine/segments/SegmentSynchronizationTaskImpTest.java index 0b83ae09b..f6f7f04f4 100644 --- a/client/src/test/java/io/split/engine/segments/SegmentSynchronizationTaskImpTest.java +++ b/client/src/test/java/io/split/engine/segments/SegmentSynchronizationTaskImpTest.java @@ -1,20 +1,36 @@ package io.split.engine.segments; import com.google.common.collect.Maps; -import io.split.engine.SDKReadinessGates; -import io.split.storages.SegmentCacheProducer; -import io.split.storages.SplitCacheConsumer; +import io.split.Spec; +import io.split.client.LocalhostSegmentChangeFetcher; +import io.split.client.JsonLocalhostSplitChangeFetcher; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; +import io.split.client.utils.InputStreamProvider; +import io.split.client.utils.StaticContentInputStreamProvider; +import io.split.engine.common.FetchOptions; +import io.split.engine.experiments.*; +import io.split.storages.*; +import io.split.storages.memory.InMemoryCacheImp; +import io.split.storages.memory.RuleBasedSegmentCacheInMemoryImp; +import io.split.storages.memory.SegmentCacheInMemoryImpl; import io.split.telemetry.storage.InMemoryTelemetryStorage; +import io.split.telemetry.storage.NoopTelemetryStorage; import io.split.telemetry.storage.TelemetryStorage; import org.junit.Assert; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.InputStream; import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.util.HashSet; import java.util.List; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; @@ -22,8 +38,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertEquals; /** * Tests for SegmentSynchronizationTaskImp @@ -33,6 +48,7 @@ public class SegmentSynchronizationTaskImpTest { private static final Logger _log = LoggerFactory.getLogger(SegmentSynchronizationTaskImpTest.class); private static final TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); + private static final TelemetryStorage TELEMETRY_STORAGE_NOOP = Mockito.mock(NoopTelemetryStorage.class); private AtomicReference fetcher1 = null; private AtomicReference fetcher2 = null; @@ -45,12 +61,11 @@ public void beforeMethod() { @Test public void works() { - SDKReadinessGates gates = new SDKReadinessGates(); SegmentCacheProducer segmentCacheProducer = Mockito.mock(SegmentCacheProducer.class); SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); - final SegmentSynchronizationTaskImp fetchers = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1L, 1, gates, segmentCacheProducer, - TELEMETRY_STORAGE, Mockito.mock(SplitCacheConsumer.class)); + final SegmentSynchronizationTaskImp fetchers = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1L, 1, segmentCacheProducer, + TELEMETRY_STORAGE, Mockito.mock(SplitCacheConsumer.class), null, Mockito.mock(RuleBasedSegmentCache.class)); // create two tasks that will separately call segment and make sure @@ -82,13 +97,12 @@ public void run() { Thread.currentThread().interrupt(); } - assertThat(fetcher1.get(), is(notNullValue())); - assertThat(fetcher1.get(), is(sameInstance(fetcher2.get()))); + Assert.assertNotNull(fetcher1.get()); + assertEquals(fetcher1.get(), fetcher2.get()); } @Test public void testFetchAllAsynchronousAndGetFalse() throws NoSuchFieldException, IllegalAccessException { - SDKReadinessGates gates = new SDKReadinessGates(); SegmentCacheProducer segmentCacheProducer = Mockito.mock(SegmentCacheProducer.class); ConcurrentMap _segmentFetchers = Maps.newConcurrentMap(); @@ -96,11 +110,9 @@ public void testFetchAllAsynchronousAndGetFalse() throws NoSuchFieldException, I SegmentFetcherImp segmentFetcher = Mockito.mock(SegmentFetcherImp.class); _segmentFetchers.put("SF", segmentFetcher); final SegmentSynchronizationTaskImp fetchers = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1L, 1, - gates, segmentCacheProducer, TELEMETRY_STORAGE, Mockito.mock(SplitCacheConsumer.class)); - Mockito.doNothing().when(segmentFetcher).callLoopRun(Mockito.anyObject()); + segmentCacheProducer, TELEMETRY_STORAGE, Mockito.mock(SplitCacheConsumer.class), null, Mockito.mock(RuleBasedSegmentCache.class)); Mockito.when(segmentFetcher.runWhitCacheHeader()).thenReturn(false); - Mockito.when(segmentFetcher.fetchAndUpdate(Mockito.anyObject())).thenReturn(false); - Mockito.doNothing().when(segmentFetcher).callLoopRun(Mockito.anyObject()); + Mockito.when(segmentFetcher.fetch(Mockito.anyObject())).thenReturn(false); // Before executing, we'll update the map of segmentFecthers via reflection. Field segmentFetchersForced = SegmentSynchronizationTaskImp.class.getDeclaredField("_segmentFetchers"); @@ -116,14 +128,13 @@ public void testFetchAllAsynchronousAndGetFalse() throws NoSuchFieldException, I @Test public void testFetchAllAsynchronousAndGetTrue() throws NoSuchFieldException, IllegalAccessException { - SDKReadinessGates gates = new SDKReadinessGates(); SegmentCacheProducer segmentCacheProducer = Mockito.mock(SegmentCacheProducer.class); ConcurrentMap _segmentFetchers = Maps.newConcurrentMap(); SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(SegmentChangeFetcher.class); SegmentFetcherImp segmentFetcher = Mockito.mock(SegmentFetcherImp.class); - final SegmentSynchronizationTaskImp fetchers = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1L, 1, gates, segmentCacheProducer, - TELEMETRY_STORAGE, Mockito.mock(SplitCacheConsumer.class)); + final SegmentSynchronizationTaskImp fetchers = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1L, 1, segmentCacheProducer, + TELEMETRY_STORAGE, Mockito.mock(SplitCacheConsumer.class), null, Mockito.mock(RuleBasedSegmentCache.class)); // Before executing, we'll update the map of segmentFecthers via reflection. Field segmentFetchersForced = SegmentSynchronizationTaskImp.class.getDeclaredField("_segmentFetchers"); @@ -132,10 +143,45 @@ public void testFetchAllAsynchronousAndGetTrue() throws NoSuchFieldException, Il modifiersField.setAccessible(true); modifiersField.setInt(segmentFetchersForced, segmentFetchersForced.getModifiers() & ~Modifier.FINAL); segmentFetchersForced.set(fetchers, _segmentFetchers); - Mockito.doNothing().when(segmentFetcher).callLoopRun(Mockito.anyObject()); Mockito.when(segmentFetcher.runWhitCacheHeader()).thenReturn(true); - Mockito.when(segmentFetcher.fetchAndUpdate(Mockito.anyObject())).thenReturn(true); + Mockito.when(segmentFetcher.fetch(Mockito.anyObject())).thenReturn(true); boolean fetch = fetchers.fetchAllSynchronous(); Assert.assertEquals(true, fetch); } -} + + @Test + public void testLocalhostSegmentChangeFetcher() throws InterruptedException, FileNotFoundException { + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>()); + SplitCache splitCacheProducer = new InMemoryCacheImp(flagSetsFilter); + + InputStream inputStream = new FileInputStream("src/test/resources/split_init.json"); + InputStreamProvider inputStreamProvider = new StaticContentInputStreamProvider(inputStream); + SplitChangeFetcher splitChangeFetcher = new JsonLocalhostSplitChangeFetcher(inputStreamProvider); + SplitParser splitParser = new SplitParser(); + FetchOptions fetchOptions = new FetchOptions.Builder().build(); + RuleBasedSegmentCache ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + + SplitFetcher splitFetcher = new SplitFetcherImp(splitChangeFetcher, splitParser, splitCacheProducer, TELEMETRY_STORAGE_NOOP, flagSetsFilter, + ruleBasedSegmentParser, ruleBasedSegmentCache); + + SplitSynchronizationTask splitSynchronizationTask = new SplitSynchronizationTask(splitFetcher, splitCacheProducer, 1000, null); + + splitSynchronizationTask.start(); + + Thread.sleep(2000); + + SegmentChangeFetcher segmentChangeFetcher = Mockito.mock(LocalhostSegmentChangeFetcher.class); + SegmentCacheProducer segmentCacheProducer = new SegmentCacheInMemoryImpl(); + + SegmentSynchronizationTaskImp segmentSynchronizationTaskImp = new SegmentSynchronizationTaskImp(segmentChangeFetcher, 1000, 1, segmentCacheProducer, + TELEMETRY_STORAGE_NOOP, splitCacheProducer, null, ruleBasedSegmentCache); + + segmentSynchronizationTaskImp.start(); + + Thread.sleep(2000); + + Mockito.verify(segmentChangeFetcher, Mockito.times(1)).fetch("segment_1",-1, fetchOptions); + Mockito.verify(segmentChangeFetcher, Mockito.times(1)).fetch("segment_2",-1, fetchOptions); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/splitter/HashConsistencyTest.java b/client/src/test/java/io/split/engine/splitter/HashConsistencyTest.java index 34b0e557d..cb2200294 100644 --- a/client/src/test/java/io/split/engine/splitter/HashConsistencyTest.java +++ b/client/src/test/java/io/split/engine/splitter/HashConsistencyTest.java @@ -12,7 +12,6 @@ import java.io.FileReader; import java.io.IOException; import java.net.URL; -import java.nio.charset.Charset; public class HashConsistencyTest { @Test @@ -77,7 +76,7 @@ private void validateFileLegacyHash(File file) throws IOException { int expected_hash = Integer.parseInt(parts[2]); int expected_bucket = Integer.parseInt(parts[3]); - int hash = Splitter.legacy_hash(key, seed); + int hash = Splitter.legacyHash(key, seed); int bucket = Splitter.bucket(hash); Assert.assertEquals(expected_hash, hash); @@ -98,7 +97,7 @@ private void validateFileMurmur3Hash(File file) throws IOException { long expected_hash = Long.parseLong(parts[2]); int expected_bucket = Integer.parseInt(parts[3]); - long hash = Splitter.murmur_hash(key, seed); + long hash = Splitter.murmurHash(key, seed); int bucket = Splitter.bucket(hash); Assert.assertEquals(expected_hash, hash); diff --git a/client/src/test/java/io/split/engine/sse/AuthApiClientTest.java b/client/src/test/java/io/split/engine/sse/AuthApiClientTest.java index dab743602..b6f05e04c 100644 --- a/client/src/test/java/io/split/engine/sse/AuthApiClientTest.java +++ b/client/src/test/java/io/split/engine/sse/AuthApiClientTest.java @@ -1,7 +1,11 @@ package io.split.engine.sse; import io.split.TestHelper; +import io.split.client.RequestDecorator; +import io.split.client.utils.SDKMetadata; import io.split.engine.sse.dtos.AuthenticationResponse; +import io.split.service.SplitHttpClient; +import io.split.service.SplitHttpClientImpl; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryStorage; import org.apache.commons.lang3.StringUtils; @@ -14,6 +18,7 @@ import java.io.IOException; import java.lang.reflect.InvocationTargetException; +import java.net.URISyntaxException; public class AuthApiClientTest { private static TelemetryStorage TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); @@ -22,11 +27,15 @@ public class AuthApiClientTest { public void setUp() { TELEMETRY_STORAGE = Mockito.mock(InMemoryTelemetryStorage.class); } - @Test - public void authenticateWithPushEnabledShouldReturnSuccess() throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { - CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("streaming-auth-push-enabled.json", HttpStatus.SC_OK); - AuthApiClient authApiClient = new AuthApiClientImp( "www.split-test.io", httpClientMock, TELEMETRY_STORAGE); + @Test + public void authenticateWithPushEnabledShouldReturnSuccess() throws IOException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException, URISyntaxException { + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("streaming-auth-push-enabled.json", + HttpStatus.SC_OK); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", splitHttpClient, TELEMETRY_STORAGE); AuthenticationResponse result = authApiClient.Authenticate(); Assert.assertTrue(result.isPushEnabled()); @@ -36,15 +45,20 @@ public void authenticateWithPushEnabledShouldReturnSuccess() throws IOException, Assert.assertTrue(result.getExpiration() > 0); Mockito.verify(TELEMETRY_STORAGE, Mockito.times(1)).recordTokenRefreshes(); Mockito.verify(TELEMETRY_STORAGE, Mockito.times(1)).recordSyncLatency(Mockito.anyObject(), Mockito.anyLong()); - Mockito.verify(TELEMETRY_STORAGE, Mockito.times(1)).recordSuccessfulSync(Mockito.anyObject(), Mockito.anyLong()); + Mockito.verify(TELEMETRY_STORAGE, Mockito.times(1)).recordSuccessfulSync(Mockito.anyObject(), + Mockito.anyLong()); } @Test - public void authenticateWithPushEnabledWithWrongTokenShouldReturnError() throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { - CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("streaming-auth-push-enabled-wrong-token.json", HttpStatus.SC_OK); - - AuthApiClient authApiClient = new AuthApiClientImp( "www.split-test.io", httpClientMock, TELEMETRY_STORAGE); + public void authenticateWithPushEnabledWithWrongTokenShouldReturnError() throws IOException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException, URISyntaxException { + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("streaming-auth-push-enabled-wrong-token.json", + HttpStatus.SC_OK); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + + AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", splitHttpClient, TELEMETRY_STORAGE); AuthenticationResponse result = authApiClient.Authenticate(); Assert.assertFalse(result.isPushEnabled()); @@ -55,10 +69,31 @@ public void authenticateWithPushEnabledWithWrongTokenShouldReturnError() throws } @Test - public void authenticateWithPushDisabledShouldReturnSuccess() throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { - CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("streaming-auth-push-disabled.json", HttpStatus.SC_OK); + public void authenticateWithPushDisabledShouldReturnSuccess() throws IOException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException, URISyntaxException { + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("streaming-auth-push-disabled.json", + HttpStatus.SC_OK); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + + AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", splitHttpClient, TELEMETRY_STORAGE); + AuthenticationResponse result = authApiClient.Authenticate(); + + Assert.assertFalse(result.isPushEnabled()); + Assert.assertTrue(StringUtils.isEmpty(result.getChannels())); + Assert.assertFalse(result.isRetry()); + Assert.assertTrue(StringUtils.isEmpty(result.getToken())); + } - AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", httpClientMock, TELEMETRY_STORAGE); + @Test + public void authenticateWithPushDisabledWithEmptyTokenShouldReturnSuccess() throws IOException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException, URISyntaxException { + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("streaming-auth-push-disabled-empty-token.json", + HttpStatus.SC_OK); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + + AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", splitHttpClient, TELEMETRY_STORAGE); AuthenticationResponse result = authApiClient.Authenticate(); Assert.assertFalse(result.isPushEnabled()); @@ -68,10 +103,13 @@ public void authenticateWithPushDisabledShouldReturnSuccess() throws IOException } @Test - public void authenticateServerErrorShouldReturnErrorWithRetry() throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { + public void authenticateServerErrorShouldReturnErrorWithRetry() throws IOException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException, URISyntaxException { CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("", HttpStatus.SC_INTERNAL_SERVER_ERROR); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); - AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", httpClientMock, TELEMETRY_STORAGE); + AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", splitHttpClient, TELEMETRY_STORAGE); AuthenticationResponse result = authApiClient.Authenticate(); Assert.assertFalse(result.isPushEnabled()); @@ -81,10 +119,13 @@ public void authenticateServerErrorShouldReturnErrorWithRetry() throws IOExcepti } @Test - public void authenticateServerBadRequestShouldReturnErrorWithoutRetry() throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { + public void authenticateServerBadRequestShouldReturnErrorWithoutRetry() throws IOException, IllegalAccessException, + NoSuchMethodException, InvocationTargetException, URISyntaxException { CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("", HttpStatus.SC_BAD_REQUEST); - AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", httpClientMock, TELEMETRY_STORAGE); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); + AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", splitHttpClient, TELEMETRY_STORAGE); AuthenticationResponse result = authApiClient.Authenticate(); Assert.assertFalse(result.isPushEnabled()); @@ -94,10 +135,13 @@ public void authenticateServerBadRequestShouldReturnErrorWithoutRetry() throws I } @Test - public void authenticateServerUnauthorizedShouldReturnErrorWithoutRetry() throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { + public void authenticateServerUnauthorizedShouldReturnErrorWithoutRetry() throws IOException, + IllegalAccessException, NoSuchMethodException, InvocationTargetException, URISyntaxException { CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("", HttpStatus.SC_UNAUTHORIZED); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClientMock, new RequestDecorator(null), + "qwerty", metadata()); - AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", httpClientMock, TELEMETRY_STORAGE); + AuthApiClient authApiClient = new AuthApiClientImp("www.split-test.io", splitHttpClient, TELEMETRY_STORAGE); AuthenticationResponse result = authApiClient.Authenticate(); Assert.assertFalse(result.isPushEnabled()); @@ -106,4 +150,9 @@ public void authenticateServerUnauthorizedShouldReturnErrorWithoutRetry() throws Assert.assertFalse(result.isRetry()); Mockito.verify(TELEMETRY_STORAGE, Mockito.times(1)).recordAuthRejections(); } + + private SDKMetadata metadata() { + return new SDKMetadata("java-1.2.3", "1.2.3.4", "someIP"); + } + } diff --git a/client/src/test/java/io/split/engine/sse/CommonChangeNotificationTest.java b/client/src/test/java/io/split/engine/sse/CommonChangeNotificationTest.java new file mode 100644 index 000000000..f536870c1 --- /dev/null +++ b/client/src/test/java/io/split/engine/sse/CommonChangeNotificationTest.java @@ -0,0 +1,46 @@ +package io.split.engine.sse; + +import io.split.client.dtos.RuleBasedSegment; +import io.split.client.dtos.Split; +import io.split.client.dtos.Status; +import io.split.client.utils.Json; +import io.split.engine.sse.dtos.CommonChangeNotification; +import io.split.engine.sse.dtos.GenericNotificationData; +import io.split.engine.sse.dtos.IncomingNotification; +import io.split.engine.sse.dtos.RawMessageNotification; +import io.split.engine.sse.enums.CompressType; +import org.junit.Assert; +import org.junit.Test; + +public class CommonChangeNotificationTest { + + @Test + public void testFeatureFlagNotification() { + String notification = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1684265694505,\\\"pcn\\\":0,\\\"c\\\":2,\\\"d\\\":\\\"eJzMk99u2kwQxV8lOtdryQZj8N6hD5QPlThSTVNVEUKDPYZt1jZar1OlyO9emf8lVFWv2ss5zJyd82O8hTWUZSqZvW04opwhUVdsIKBSSKR+10vS1HWW7pIdz2NyBjRwHS8IXEopTLgbQqDYT+ZUm3LxlV4J4mg81LpMyKqygPRc94YeM6eQTtjphp4fegLVXvD6Qdjt9wPXF6gs2bqCxPC/2eRpDIEXpXXblpGuWCDljGptZ4bJ5lxYSJRZBoFkTcWKozpfsoH0goHfCXpB6PfcngDpVQnZEUjKIlOr2uwWqiC3zU5L1aF+3p7LFhUkPv8/mY2nk3gGgZxssmZzb8p6A9n25ktVtA9iGI3ODXunQ3HDp+AVWT6F+rZWlrWq7MN+YkSWWvuTDvkMSnNV7J6oTdl6qKTEvGnmjcCGjL2IYC/ovPYgUKnvvPtbmrmApiVryLM7p2jE++AfH6fTx09/HvuF32LWnNjStM0Xh3c8ukZcsZlEi3h8/zCObsBpJ0acqYLTmFdtqitK1V6NzrfpdPBbLmVx4uK26e27izpDu/r5yf/16AXun2Cr4u6w591xw7+LfDidLj6Mv8TXwP8xbofv/c7UmtHMmx8BAAD//0fclvU=\\\"}\"}"; + RawMessageNotification rawMessageNotification = Json.fromJson(notification, RawMessageNotification.class); + GenericNotificationData genericNotificationData = Json.fromJson(rawMessageNotification.getData(), GenericNotificationData.class); + + CommonChangeNotification featureFlagChangeNotification = new CommonChangeNotification(genericNotificationData, Split.class); + Assert.assertEquals(IncomingNotification.Type.SPLIT_UPDATE, featureFlagChangeNotification.getType()); + Assert.assertEquals(1684265694505L, featureFlagChangeNotification.getChangeNumber()); + Assert.assertEquals(CompressType.ZLIB, featureFlagChangeNotification.getCompressType()); + Assert.assertEquals(0L, featureFlagChangeNotification.getPreviousChangeNumber()); + Assert.assertEquals("mauro_java", featureFlagChangeNotification.getDefinition().name); + Assert.assertEquals(-1769377604, featureFlagChangeNotification.getDefinition().seed); + } + + @Test + public void testRuleBasedSegmentNotification() { + String notification = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"RB_SEGMENT_UPDATE\\\",\\\"changeNumber\\\":1684265694505,\\\"pcn\\\":0,\\\"c\\\":0,\\\"d\\\":\\\"eyJjaGFuZ2VOdW1iZXIiOiA1LCAibmFtZSI6ICJzYW1wbGVfcnVsZV9iYXNlZF9zZWdtZW50IiwgInN0YXR1cyI6ICJBQ1RJVkUiLCAidHJhZmZpY1R5cGVOYW1lIjogInVzZXIiLCAiZXhjbHVkZWQiOiB7ImtleXMiOiBbIm1hdXJvQHNwbGl0LmlvIiwgImdhc3RvbkBzcGxpdC5pbyJdLCAic2VnbWVudHMiOiBbXX0sICJjb25kaXRpb25zIjogW3sibWF0Y2hlckdyb3VwIjogeyJjb21iaW5lciI6ICJBTkQiLCAibWF0Y2hlcnMiOiBbeyJrZXlTZWxlY3RvciI6IHsidHJhZmZpY1R5cGUiOiAidXNlciIsICJhdHRyaWJ1dGUiOiAiZW1haWwifSwgIm1hdGNoZXJUeXBlIjogIkVORFNfV0lUSCIsICJuZWdhdGUiOiBmYWxzZSwgIndoaXRlbGlzdE1hdGNoZXJEYXRhIjogeyJ3aGl0ZWxpc3QiOiBbIkBzcGxpdC5pbyJdfX1dfX1dfQ==\\\"}\"}"; + RawMessageNotification rawMessageNotification = Json.fromJson(notification, RawMessageNotification.class); + GenericNotificationData genericNotificationData = Json.fromJson(rawMessageNotification.getData(), GenericNotificationData.class); + + CommonChangeNotification ruleBasedSegmentCommonChangeNotification = new CommonChangeNotification(genericNotificationData, RuleBasedSegment.class); + Assert.assertEquals(IncomingNotification.Type.RB_SEGMENT_UPDATE, ruleBasedSegmentCommonChangeNotification.getType()); + Assert.assertEquals(1684265694505L, ruleBasedSegmentCommonChangeNotification.getChangeNumber()); + Assert.assertEquals(CompressType.NOT_COMPRESSED, ruleBasedSegmentCommonChangeNotification.getCompressType()); + Assert.assertEquals(0L, ruleBasedSegmentCommonChangeNotification.getPreviousChangeNumber()); + Assert.assertEquals("sample_rule_based_segment", ruleBasedSegmentCommonChangeNotification.getDefinition().name); + Assert.assertEquals(Status.ACTIVE, ruleBasedSegmentCommonChangeNotification.getDefinition().status); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/sse/EventSourceClientTest.java b/client/src/test/java/io/split/engine/sse/EventSourceClientTest.java index 74acf65e3..e3a5050e1 100644 --- a/client/src/test/java/io/split/engine/sse/EventSourceClientTest.java +++ b/client/src/test/java/io/split/engine/sse/EventSourceClientTest.java @@ -1,9 +1,10 @@ package io.split.engine.sse; import io.split.SSEMockServer; +import io.split.client.RequestDecorator; import io.split.engine.sse.client.SSEClient; +import io.split.engine.sse.dtos.CommonChangeNotification; import io.split.engine.sse.dtos.ErrorNotification; -import io.split.engine.sse.dtos.SplitChangeNotification; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryRuntimeProducer; import org.apache.hc.client5.http.config.RequestConfig; @@ -42,9 +43,9 @@ public void startShouldConnect() throws IOException { TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(InMemoryTelemetryStorage.class); sseServer.start(); - EventSourceClient eventSourceClient = new EventSourceClientImp("http://localhost:" + sseServer.getPort(), _notificationParser, _notificationProcessor, _pushStatusTracker, buildHttpClient(), telemetryRuntimeProducer); + EventSourceClient eventSourceClient = new EventSourceClientImp("http://localhost:" + sseServer.getPort(), _notificationParser, _notificationProcessor, _pushStatusTracker, buildHttpClient(), telemetryRuntimeProducer, null, new RequestDecorator(null)); - boolean result = eventSourceClient.start("channel-test","token-test"); + boolean result = eventSourceClient.start("channel-test", "token-test"); Assert.assertTrue(result); @@ -52,20 +53,20 @@ public void startShouldConnect() throws IOException { } @Test - public void startShouldNotConnect() throws IOException { + public void startShouldReconnect() throws IOException { SSEMockServer.SseEventQueue eventQueue = new SSEMockServer.SseEventQueue(); SSEMockServer sseServer = buildSSEMockServer(eventQueue); TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(InMemoryTelemetryStorage.class); sseServer.start(); - EventSourceClient eventSourceClient = new EventSourceClientImp("http://fake:" + sseServer.getPort(), _notificationParser, _notificationProcessor, _pushStatusTracker, buildHttpClient(), telemetryRuntimeProducer); + EventSourceClient eventSourceClient = new EventSourceClientImp("http://fake:" + sseServer.getPort(), _notificationParser, _notificationProcessor, _pushStatusTracker, buildHttpClient(), telemetryRuntimeProducer, null, new RequestDecorator(null)); - boolean result = eventSourceClient.start("channel-test","token-test"); + boolean result = eventSourceClient.start("channel-test", "token-test"); Assert.assertFalse(result); Awaitility.await() .atMost(50L, TimeUnit.SECONDS) - .untilAsserted(() -> Mockito.verify(_pushStatusTracker, Mockito.times(1)).handleSseStatus(SSEClient.StatusMessage.NONRETRYABLE_ERROR)); + .untilAsserted(() -> Mockito.verify(_pushStatusTracker, Mockito.times(1)).handleSseStatus(SSEClient.StatusMessage.RETRYABLE_ERROR)); } @Test @@ -74,9 +75,9 @@ public void startAndReceiveNotification() throws IOException { SSEMockServer sseServer = buildSSEMockServer(eventQueue); TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(InMemoryTelemetryStorage.class); sseServer.start(); - EventSourceClient eventSourceClient = new EventSourceClientImp("http://localhost:" + sseServer.getPort(), _notificationParser, _notificationProcessor, _pushStatusTracker, buildHttpClient(), telemetryRuntimeProducer); + EventSourceClient eventSourceClient = new EventSourceClientImp("http://localhost:" + sseServer.getPort(), _notificationParser, _notificationProcessor, _pushStatusTracker, buildHttpClient(), telemetryRuntimeProducer, null, new RequestDecorator(null)); - boolean result = eventSourceClient.start("channel-test","token-test"); + boolean result = eventSourceClient.start("channel-test", "token-test"); Assert.assertTrue(result); @@ -95,7 +96,7 @@ public void startAndReceiveNotification() throws IOException { Awaitility.await() .atMost(50L, TimeUnit.SECONDS) - .untilAsserted(() -> Mockito.verify(_notificationProcessor, Mockito.times(1)).process(Mockito.any(SplitChangeNotification.class))); + .untilAsserted(() -> Mockito.verify(_notificationProcessor, Mockito.times(1)).process(Mockito.any(CommonChangeNotification.class))); OutboundSseEvent sseEventError = new OutboundEvent .Builder() @@ -138,4 +139,4 @@ private static CloseableHttpClient buildHttpClient() { .setDefaultRequestConfig(requestConfig) .build(); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/sse/NotificationParserImpTest.java b/client/src/test/java/io/split/engine/sse/NotificationParserImpTest.java new file mode 100644 index 000000000..b8df61320 --- /dev/null +++ b/client/src/test/java/io/split/engine/sse/NotificationParserImpTest.java @@ -0,0 +1,71 @@ +package io.split.engine.sse; + +import io.split.client.dtos.Split; +import io.split.engine.sse.dtos.CommonChangeNotification; +import io.split.engine.sse.enums.CompressType; +import io.split.engine.sse.exceptions.EventParsingException; + +import org.junit.Assert; +import org.junit.Test; + +public class NotificationParserImpTest { + + @Test + public void validateZlibCompressType() throws EventParsingException { + String payload = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1684265694505,\\\"pcn\\\":0,\\\"c\\\":2,\\\"d\\\":\\\"eJzMk99u2kwQxV8lOtdryQZj8N6hD5QPlThSTVNVEUKDPYZt1jZar1OlyO9emf8lVFWv2ss5zJyd82O8hTWUZSqZvW04opwhUVdsIKBSSKR+10vS1HWW7pIdz2NyBjRwHS8IXEopTLgbQqDYT+ZUm3LxlV4J4mg81LpMyKqygPRc94YeM6eQTtjphp4fegLVXvD6Qdjt9wPXF6gs2bqCxPC/2eRpDIEXpXXblpGuWCDljGptZ4bJ5lxYSJRZBoFkTcWKozpfsoH0goHfCXpB6PfcngDpVQnZEUjKIlOr2uwWqiC3zU5L1aF+3p7LFhUkPv8/mY2nk3gGgZxssmZzb8p6A9n25ktVtA9iGI3ODXunQ3HDp+AVWT6F+rZWlrWq7MN+YkSWWvuTDvkMSnNV7J6oTdl6qKTEvGnmjcCGjL2IYC/ovPYgUKnvvPtbmrmApiVryLM7p2jE++AfH6fTx09/HvuF32LWnNjStM0Xh3c8ukZcsZlEi3h8/zCObsBpJ0acqYLTmFdtqitK1V6NzrfpdPBbLmVx4uK26e27izpDu/r5yf/16AXun2Cr4u6w591xw7+LfDidLj6Mv8TXwP8xbofv/c7UmtHMmx8BAAD//0fclvU=\\\"}\"}"; + NotificationParserImp notificationParserImp = new NotificationParserImp(); + + CommonChangeNotification incomingNotification = (CommonChangeNotification) notificationParserImp.parseMessage(payload); + Split split = (Split) incomingNotification.getDefinition(); + Assert.assertEquals("mauro_java", split.name); + Assert.assertEquals(1684265694505L, split.changeNumber); + Assert.assertEquals(CompressType.ZLIB, incomingNotification.getCompressType()); + Assert.assertEquals(0, incomingNotification.getPreviousChangeNumber()); + } + + @Test + public void validateGzipCompressType() throws EventParsingException { + String payload = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1684265694505,\\\"pcn\\\":0,\\\"c\\\":1,\\\"d\\\":\\\"H4sIAAAAAAAA/8yT327aTBDFXyU612vJxoTgvUMfKB8qcaSapqoihAZ7DNusvWi9TpUiv3tl/pdQVb1qL+cwc3bOj/EGzlKeq3T6tuaYCoZEXbGFgMogkXXDIM0y31v4C/aCgMnrU9/3gl7Pp4yilMMIAuVusqDamvlXeiWIg/FAa5OSU6aEDHz/ip4wZ5Be1AmjoBsFAtVOCO56UXh31/O7ApUjV1eQGPw3HT+NIPCitG7bctIVC2ScU63d1DK5gksHCZPnEEhXVC45rosFW8ig1++GYej3g85tJEB6aSA7Aqkpc7Ws7XahCnLTbLVM7evnzalsUUHi8//j6WgyTqYQKMilK7b31tRryLa3WKiyfRCDeHhq2Dntiys+JS/J8THUt5VyrFXlHnYTQ3LU2h91yGdQVqhy+0RtTeuhUoNZ08wagTVZdxbBndF5vYVApb7z9m9pZgKaFqwhT+6coRHvg398nEweP/157Bd+S1hz6oxtm88O73B0jbhgM47nyej+YRRfgdNODDlXJWcJL9tUF5SqnRqfbtPr4LdcTHnk4rfp3buLOkG7+Pmp++vRM9w/wVblzX7Pm8OGfxf5YDKZfxh9SS6B/2Pc9t/7ja01o5k1PwIAAP//uTipVskEAAA=\\\"}\"}"; + NotificationParserImp notificationParserImp = new NotificationParserImp(); + + CommonChangeNotification incomingNotification = (CommonChangeNotification) notificationParserImp.parseMessage(payload); + Split split = (Split) incomingNotification.getDefinition(); + Assert.assertEquals("mauro_java", split.name); + Assert.assertEquals(1684333081259L, split.changeNumber); + Assert.assertEquals(CompressType.GZIP, incomingNotification.getCompressType()); + Assert.assertEquals(0, incomingNotification.getPreviousChangeNumber()); + } + + @Test + public void validateNotCompressType() throws EventParsingException { + String payload = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1684329854385,\\\"pcn\\\":0,\\\"c\\\":0,\\\"d\\\":\\\"eyJ0cmFmZmljVHlwZU5hbWUiOiJ1c2VyIiwiaWQiOiJkNDMxY2RkMC1iMGJlLTExZWEtOGE4MC0xNjYwYWRhOWNlMzkiLCJuYW1lIjoibWF1cm9famF2YSIsInRyYWZmaWNBbGxvY2F0aW9uIjoxMDAsInRyYWZmaWNBbGxvY2F0aW9uU2VlZCI6LTkyMzkxNDkxLCJzZWVkIjotMTc2OTM3NzYwNCwic3RhdHVzIjoiQUNUSVZFIiwia2lsbGVkIjpmYWxzZSwiZGVmYXVsdFRyZWF0bWVudCI6Im9mZiIsImNoYW5nZU51bWJlciI6MTY4NDMyOTg1NDM4NSwiYWxnbyI6MiwiY29uZmlndXJhdGlvbnMiOnt9LCJjb25kaXRpb25zIjpbeyJjb25kaXRpb25UeXBlIjoiV0hJVEVMSVNUIiwibWF0Y2hlckdyb3VwIjp7ImNvbWJpbmVyIjoiQU5EIiwibWF0Y2hlcnMiOlt7Im1hdGNoZXJUeXBlIjoiV0hJVEVMSVNUIiwibmVnYXRlIjpmYWxzZSwid2hpdGVsaXN0TWF0Y2hlckRhdGEiOnsid2hpdGVsaXN0IjpbImFkbWluIiwibWF1cm8iLCJuaWNvIl19fV19LCJwYXJ0aXRpb25zIjpbeyJ0cmVhdG1lbnQiOiJvZmYiLCJzaXplIjoxMDB9XSwibGFiZWwiOiJ3aGl0ZWxpc3RlZCJ9LHsiY29uZGl0aW9uVHlwZSI6IlJPTExPVVQiLCJtYXRjaGVyR3JvdXAiOnsiY29tYmluZXIiOiJBTkQiLCJtYXRjaGVycyI6W3sia2V5U2VsZWN0b3IiOnsidHJhZmZpY1R5cGUiOiJ1c2VyIn0sIm1hdGNoZXJUeXBlIjoiSU5fU0VHTUVOVCIsIm5lZ2F0ZSI6ZmFsc2UsInVzZXJEZWZpbmVkU2VnbWVudE1hdGNoZXJEYXRhIjp7InNlZ21lbnROYW1lIjoibWF1ci0yIn19XX0sInBhcnRpdGlvbnMiOlt7InRyZWF0bWVudCI6Im9uIiwic2l6ZSI6MH0seyJ0cmVhdG1lbnQiOiJvZmYiLCJzaXplIjoxMDB9LHsidHJlYXRtZW50IjoiVjQiLCJzaXplIjowfSx7InRyZWF0bWVudCI6InY1Iiwic2l6ZSI6MH1dLCJsYWJlbCI6ImluIHNlZ21lbnQgbWF1ci0yIn0seyJjb25kaXRpb25UeXBlIjoiUk9MTE9VVCIsIm1hdGNoZXJHcm91cCI6eyJjb21iaW5lciI6IkFORCIsIm1hdGNoZXJzIjpbeyJrZXlTZWxlY3RvciI6eyJ0cmFmZmljVHlwZSI6InVzZXIifSwibWF0Y2hlclR5cGUiOiJBTExfS0VZUyIsIm5lZ2F0ZSI6ZmFsc2V9XX0sInBhcnRpdGlvbnMiOlt7InRyZWF0bWVudCI6Im9uIiwic2l6ZSI6MH0seyJ0cmVhdG1lbnQiOiJvZmYiLCJzaXplIjoxMDB9LHsidHJlYXRtZW50IjoiVjQiLCJzaXplIjowfSx7InRyZWF0bWVudCI6InY1Iiwic2l6ZSI6MH1dLCJsYWJlbCI6ImRlZmF1bHQgcnVsZSJ9XX0=\\\"}\"}"; + NotificationParserImp notificationParserImp = new NotificationParserImp(); + + CommonChangeNotification incomingNotification = (CommonChangeNotification) notificationParserImp.parseMessage(payload); + Split split = (Split) incomingNotification.getDefinition(); + Assert.assertEquals("mauro_java", split.name); + Assert.assertEquals(1684329854385L, split.changeNumber); + Assert.assertEquals(CompressType.NOT_COMPRESSED, incomingNotification.getCompressType()); + Assert.assertEquals(0, incomingNotification.getPreviousChangeNumber()); + } + + @Test + public void validateCompressTypeIncorrect() throws EventParsingException { + String payload = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1684265694505,\\\"pcn\\\":0,\\\"c\\\":3,\\\"d\\\":\\\"eJzMk99u2kwQxV8lOtdryQZj8N6hD5QPlThSTVNVEUKDPYZt1jZar1OlyO9emf8lVFWv2ss5zJyd82O8hTWUZSqZvW04opwhUVdsIKBSSKR+10vS1HWW7pIdz2NyBjRwHS8IXEopTLgbQqDYT+ZUm3LxlV4J4mg81LpMyKqygPRc94YeM6eQTtjphp4fegLVXvD6Qdjt9wPXF6gs2bqCxPC/2eRpDIEXpXXblpGuWCDljGptZ4bJ5lxYSJRZBoFkTcWKozpfsoH0goHfCXpB6PfcngDpVQnZEUjKIlOr2uwWqiC3zU5L1aF+3p7LFhUkPv8/mY2nk3gGgZxssmZzb8p6A9n25ktVtA9iGI3ODXunQ3HDp+AVWT6F+rZWlrWq7MN+YkSWWvuTDvkMSnNV7J6oTdl6qKTEvGnmjcCGjL2IYC/ovPYgUKnvvPtbmrmApiVryLM7p2jE++AfH6fTx09/HvuF32LWnNjStM0Xh3c8ukZcsZlEi3h8/zCObsBpJ0acqYLTmFdtqitK1V6NzrfpdPBbLmVx4uK26e27izpDu/r5yf/16AXun2Cr4u6w591xw7+LfDidLj6Mv8TXwP8xbofv/c7UmtHMmx8BAAD//0fclvU=\\\"}\"}"; + NotificationParserImp notificationParserImp = new NotificationParserImp(); + + CommonChangeNotification incomingNotification = (CommonChangeNotification) notificationParserImp.parseMessage(payload); + Assert.assertNull(incomingNotification.getCompressType()); + Assert.assertEquals(0, incomingNotification.getPreviousChangeNumber()); + } + + @Test + public void validateCompressTypeNull() throws EventParsingException { + String payload = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1684265694505,\\\"pcn\\\":0,\\\"d\\\":\\\"eJzMk99u2kwQxV8lOtdryQZj8N6hD5QPlThSTVNVEUKDPYZt1jZar1OlyO9emf8lVFWv2ss5zJyd82O8hTWUZSqZvW04opwhUVdsIKBSSKR+10vS1HWW7pIdz2NyBjRwHS8IXEopTLgbQqDYT+ZUm3LxlV4J4mg81LpMyKqygPRc94YeM6eQTtjphp4fegLVXvD6Qdjt9wPXF6gs2bqCxPC/2eRpDIEXpXXblpGuWCDljGptZ4bJ5lxYSJRZBoFkTcWKozpfsoH0goHfCXpB6PfcngDpVQnZEUjKIlOr2uwWqiC3zU5L1aF+3p7LFhUkPv8/mY2nk3gGgZxssmZzb8p6A9n25ktVtA9iGI3ODXunQ3HDp+AVWT6F+rZWlrWq7MN+YkSWWvuTDvkMSnNV7J6oTdl6qKTEvGnmjcCGjL2IYC/ovPYgUKnvvPtbmrmApiVryLM7p2jE++AfH6fTx09/HvuF32LWnNjStM0Xh3c8ukZcsZlEi3h8/zCObsBpJ0acqYLTmFdtqitK1V6NzrfpdPBbLmVx4uK26e27izpDu/r5yf/16AXun2Cr4u6w591xw7+LfDidLj6Mv8TXwP8xbofv/c7UmtHMmx8BAAD//0fclvU=\\\"}\"}"; + NotificationParserImp notificationParserImp = new NotificationParserImp(); + + CommonChangeNotification incomingNotification = (CommonChangeNotification) notificationParserImp.parseMessage(payload); + Assert.assertNull(incomingNotification.getCompressType()); + Assert.assertEquals(0, incomingNotification.getPreviousChangeNumber()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/sse/NotificationParserTest.java b/client/src/test/java/io/split/engine/sse/NotificationParserTest.java index 1bbf355ae..ad0b4075e 100644 --- a/client/src/test/java/io/split/engine/sse/NotificationParserTest.java +++ b/client/src/test/java/io/split/engine/sse/NotificationParserTest.java @@ -5,7 +5,7 @@ import org.junit.Before; import org.junit.Test; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; public class NotificationParserTest { private NotificationParser notificationParser; @@ -22,7 +22,7 @@ public void parseSplitUpdateShouldReturnParsedEvent() throws EventParsingExcepti IncomingNotification result = notificationParser.parseMessage(payload); assertEquals(IncomingNotification.Type.SPLIT_UPDATE, result.getType()); assertEquals("xxxx_xxxx_splits", result.getChannel()); - assertEquals(1592590435115L, ((SplitChangeNotification) result).getChangeNumber()); + assertEquals(1592590435115L, ((CommonChangeNotification) result).getChangeNumber()); } @Test @@ -149,4 +149,4 @@ public void parseControlStreamingDisabledShouldReturnParsedEvent() throws EventP assertEquals("control_pri", result.getChannel()); assertEquals(ControlType.STREAMING_DISABLED, ((ControlNotification)result).getControlType()); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/sse/NotificationProcessorTest.java b/client/src/test/java/io/split/engine/sse/NotificationProcessorTest.java index 4bbbaab72..ea75ecb1d 100644 --- a/client/src/test/java/io/split/engine/sse/NotificationProcessorTest.java +++ b/client/src/test/java/io/split/engine/sse/NotificationProcessorTest.java @@ -1,38 +1,59 @@ package io.split.engine.sse; +import io.split.client.dtos.RuleBasedSegment; +import io.split.client.dtos.Split; import io.split.engine.sse.dtos.*; import io.split.engine.sse.workers.SegmentsWorkerImp; -import io.split.engine.sse.workers.SplitsWorker; +import io.split.engine.sse.workers.FeatureFlagsWorker; import io.split.engine.sse.workers.Worker; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; public class NotificationProcessorTest { - private SplitsWorker _splitsWorker; + private FeatureFlagsWorker _featureFlagsWorker; private Worker _segmentWorker; private NotificationProcessor _notificationProcessor; private PushStatusTracker _pushStatusTracker; @Before public void setUp() { - _splitsWorker = Mockito.mock(SplitsWorker.class); + _featureFlagsWorker = Mockito.mock(FeatureFlagsWorker.class); _segmentWorker = Mockito.mock(SegmentsWorkerImp.class); _pushStatusTracker = Mockito.mock(PushStatusTracker.class); - _notificationProcessor = new NotificationProcessorImp(_splitsWorker, _segmentWorker, _pushStatusTracker); + _notificationProcessor = new NotificationProcessorImp(_featureFlagsWorker, _segmentWorker, _pushStatusTracker); } @Test public void processSplitUpdateAddToQueueInWorker() { long changeNumber = 1585867723838L; String channel = "splits"; - GenericNotificationData genericNotificationData = new GenericNotificationData(changeNumber, null, null, null, null, null, null, channel); - SplitChangeNotification splitChangeNotification = new SplitChangeNotification(genericNotificationData); + GenericNotificationData genericNotificationData = GenericNotificationData.builder() + .changeNumber(changeNumber) + .channel(channel) + .build(); + CommonChangeNotification splitChangeNotification = new CommonChangeNotification(genericNotificationData, Split.class); _notificationProcessor.process(splitChangeNotification); - Mockito.verify(_splitsWorker, Mockito.times(1)).addToQueue(splitChangeNotification.getChangeNumber()); + Mockito.verify(_featureFlagsWorker, Mockito.times(1)).addToQueue(Mockito.anyObject()); + } + + @Test + public void processRuleBasedSegmentUpdateAddToQueueInWorker() { + long changeNumber = 1585867723838L; + String channel = "splits"; + GenericNotificationData genericNotificationData = GenericNotificationData.builder() + .changeNumber(changeNumber) + .channel(channel) + .type(IncomingNotification.Type.RB_SEGMENT_UPDATE) + .build(); + CommonChangeNotification ruleBasedSegmentChangeNotification = new CommonChangeNotification(genericNotificationData, RuleBasedSegment.class); + + _notificationProcessor.process(ruleBasedSegmentChangeNotification); + + Mockito.verify(_featureFlagsWorker, Mockito.times(1)).addToQueue(Mockito.anyObject()); } @Test @@ -41,13 +62,18 @@ public void processSplitKillAndAddToQueueInWorker() { String defaultTreatment = "off"; String splitName = "test-split"; String channel = "splits"; - GenericNotificationData genericNotificationData = new GenericNotificationData(changeNumber, defaultTreatment, splitName, null, null, null, null, channel); + GenericNotificationData genericNotificationData = GenericNotificationData.builder() + .changeNumber(changeNumber) + .defaultTreatment(defaultTreatment) + .featureFlagName(splitName) + .channel(channel) + .build(); SplitKillNotification splitKillNotification = new SplitKillNotification(genericNotificationData); _notificationProcessor.process(splitKillNotification); - Mockito.verify(_splitsWorker, Mockito.times(1)).killSplit(splitKillNotification.getChangeNumber(), splitKillNotification.getSplitName(), splitKillNotification.getDefaultTreatment()); - Mockito.verify(_splitsWorker, Mockito.times(1)).addToQueue(splitKillNotification.getChangeNumber()); + Mockito.verify(_featureFlagsWorker, Mockito.times(1)).kill(splitKillNotification); + Mockito.verify(_featureFlagsWorker, Mockito.times(1)).addToQueue(Mockito.anyObject()); } @Test @@ -55,7 +81,11 @@ public void processSegmentUpdateAddToQueueInWorker() { long changeNumber = 1585867723838L; String segmentName = "segment-test"; String channel = "segments"; - GenericNotificationData genericNotificationData = new GenericNotificationData(changeNumber, null, null, null, null, segmentName, null, channel); + GenericNotificationData genericNotificationData = GenericNotificationData.builder() + .changeNumber(changeNumber) + .segmentName(segmentName) + .channel(channel) + .build(); SegmentChangeNotification segmentChangeNotification = new SegmentChangeNotification(genericNotificationData); _notificationProcessor.process(segmentChangeNotification); @@ -75,11 +105,13 @@ public void processControlNotification() { @Test public void processOccupancyNotification() { - GenericNotificationData genericNotificationData = new GenericNotificationData(null, null, null, null, null, null, null, "control_pri"); + GenericNotificationData genericNotificationData = GenericNotificationData.builder() + .channel("control_pri") + .build(); OccupancyNotification occupancyNotification = new OccupancyNotification(genericNotificationData); _notificationProcessor.process(occupancyNotification); Mockito.verify(_pushStatusTracker, Mockito.times(1)).handleIncomingOccupancyEvent(Mockito.any(OccupancyNotification.class)); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/sse/PushStatusTrackerTest.java b/client/src/test/java/io/split/engine/sse/PushStatusTrackerTest.java index 8e245f5af..2660f9e1c 100644 --- a/client/src/test/java/io/split/engine/sse/PushStatusTrackerTest.java +++ b/client/src/test/java/io/split/engine/sse/PushStatusTrackerTest.java @@ -10,12 +10,6 @@ import org.mockito.Mockito; import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class PushStatusTrackerTest { private static final String CONTROL_PRI = "control_pri"; @@ -28,8 +22,8 @@ public void HandleControlEventStreamingPausedShouldNotifyEvent() { PushStatusTracker pushStatusTracker = new PushStatusTrackerImp(messages, telemetryStorage); pushStatusTracker.handleIncomingControlEvent(controlNotification); - assertThat(messages.size(), is(equalTo(1))); - assertThat(messages.peek(), is(equalTo(PushManager.Status.STREAMING_DOWN))); + Assert.assertEquals(1, messages.size()); + Assert.assertEquals(PushManager.Status.STREAMING_DOWN, messages.peek()); } @Test @@ -40,9 +34,9 @@ public void HandleControlEventStreamingResumedShouldNotifyEvent() throws Interru pushStatusTracker.handleIncomingControlEvent(buildControlNotification(ControlType.STREAMING_PAUSED)); pushStatusTracker.handleIncomingControlEvent(buildControlNotification(ControlType.STREAMING_RESUMED)); - assertThat(messages.size(), is(equalTo(2))); - assertThat(messages.take(), is(equalTo(PushManager.Status.STREAMING_DOWN))); - assertThat(messages.take(), is(equalTo(PushManager.Status.STREAMING_READY))); + Assert.assertEquals(2, messages.size()); + Assert.assertEquals(PushManager.Status.STREAMING_DOWN, messages.take()); + Assert.assertEquals(PushManager.Status.STREAMING_READY, messages.take()); } @Test @@ -57,8 +51,8 @@ public void HandleControlEventStreamingResumedShouldNotNotifyEvent() { pushStatusTracker.handleIncomingControlEvent(controlNotification); pushStatusTracker.handleIncomingControlEvent(controlNotification); - assertThat(messages.size(), is(equalTo(1))); - assertThat(messages.peek(), is(equalTo(PushManager.Status.STREAMING_DOWN))); + Assert.assertEquals(1, messages.size()); + Assert.assertEquals(PushManager.Status.STREAMING_DOWN, messages.peek()); Assert.assertEquals(1, telemetryStorage.popStreamingEvents().size()); } @@ -72,8 +66,8 @@ public void HandleControlEventStreamingDisabledShouldNotifyShutdownEvent() { pushStatusTracker.handleIncomingControlEvent(controlNotification); pushStatusTracker.handleIncomingControlEvent(controlNotification); - assertThat(messages.size(), is(equalTo(1))); - assertThat(messages.peek(), is(equalTo(PushManager.Status.STREAMING_OFF))); + Assert.assertEquals(1, messages.size()); + Assert.assertEquals(PushManager.Status.STREAMING_OFF, messages.peek()); } @Test @@ -84,7 +78,7 @@ public void HandleOccupancyEventWithPublishersFirstTimeShouldNotNotifyEvent() { PushStatusTracker pushStatusTracker = new PushStatusTrackerImp(messages, telemetryStorage); pushStatusTracker.handleIncomingOccupancyEvent(occupancyNotification); - assertThat(messages.size(), is(equalTo(0))); + Assert.assertEquals(0, messages.size()); Assert.assertEquals(1, telemetryStorage.popStreamingEvents().size()); } @@ -96,12 +90,12 @@ public void HandleOccupancyEventWithPublishersAndWithStreamingDisabledShouldNoti pushStatusTracker.handleIncomingOccupancyEvent(buildOccupancyNotification(0, null)); pushStatusTracker.handleIncomingOccupancyEvent(buildOccupancyNotification(2, null)); - assertThat(messages.size(), is(equalTo(2))); + Assert.assertEquals(2, messages.size()); PushManager.Status m1 = messages.take(); - assertThat(m1, is(equalTo(PushManager.Status.STREAMING_DOWN))); + Assert.assertEquals(PushManager.Status.STREAMING_DOWN, m1); PushManager.Status m2 = messages.take(); - assertThat(m2, is(equalTo(PushManager.Status.STREAMING_READY))); + Assert.assertEquals(PushManager.Status.STREAMING_READY, m2); } @Test @@ -112,12 +106,12 @@ public void HandleOccupancyEventWithDifferentChannelsPublishersShouldNotifyEvent pushStatusTracker.handleIncomingOccupancyEvent(buildOccupancyNotification(0, "control_pri")); pushStatusTracker.handleIncomingOccupancyEvent(buildOccupancyNotification(2, "control_sec")); - assertThat(messages.size(), is(equalTo(2))); + Assert.assertEquals(2, messages.size()); PushManager.Status m1 = messages.take(); - assertThat(m1, is(equalTo(PushManager.Status.STREAMING_DOWN))); + Assert.assertEquals(PushManager.Status.STREAMING_DOWN, m1); PushManager.Status m2 = messages.take(); - assertThat(m2, is(equalTo(PushManager.Status.STREAMING_READY))); + Assert.assertEquals(PushManager.Status.STREAMING_READY, m2); } @Test @@ -151,13 +145,12 @@ public void HandleTwoRetryableErrorInARow() throws InterruptedException { pushStatusTracker.handleSseStatus(SSEClient.StatusMessage.RETRYABLE_ERROR); pushStatusTracker.handleSseStatus(SSEClient.StatusMessage.RETRYABLE_ERROR); - - assertThat(messages.size(), is(equalTo(2))); + Assert.assertEquals(2, messages.size()); PushManager.Status m1 = messages.take(); - assertThat(m1, is(equalTo(PushManager.Status.STREAMING_BACKOFF))); + Assert.assertEquals(PushManager.Status.STREAMING_BACKOFF, m1); PushManager.Status m2 = messages.take(); - assertThat(m2, is(equalTo(PushManager.Status.STREAMING_BACKOFF))); + Assert.assertEquals(PushManager.Status.STREAMING_BACKOFF, m2); } private ControlNotification buildControlNotification(ControlType controlType) { @@ -169,14 +162,11 @@ private OccupancyNotification buildOccupancyNotification(int publishers, String } private GenericNotificationData buildGenericData(ControlType controlType, IncomingNotification.Type type, Integer publishers, String channel) { - return new GenericNotificationData( - null, - null, - null, - controlType, - publishers != null ? new OccupancyMetrics(publishers) : null, - null, - type, - channel == null ? "channel-test" : channel); + return GenericNotificationData.builder() + .controlType(controlType) + .metrics(publishers != null ? new OccupancyMetrics(publishers) : null) + .type(type) + .channel(channel == null ? "channel-test" : channel) + .build(); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/sse/SSEClientTest.java b/client/src/test/java/io/split/engine/sse/SSEClientTest.java index 4319ae101..15f13d3b3 100644 --- a/client/src/test/java/io/split/engine/sse/SSEClientTest.java +++ b/client/src/test/java/io/split/engine/sse/SSEClientTest.java @@ -1,5 +1,6 @@ package io.split.engine.sse; +import io.split.client.RequestDecorator; import io.split.engine.sse.client.SSEClient; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryRuntimeProducer; @@ -38,7 +39,7 @@ public void basicUsageTest() throws URISyntaxException, InterruptedException { CloseableHttpClient httpClient = httpClientbuilder.build(); SSEClient sse = new SSEClient(e -> null, - s -> null, httpClient, telemetryRuntimeProducer); + s -> null, httpClient, telemetryRuntimeProducer, null, new RequestDecorator(null)); sse.open(uri); Thread.sleep(5000); sse.close(); diff --git a/client/src/test/java/io/split/engine/sse/utils/DecompressionUtilTest.java b/client/src/test/java/io/split/engine/sse/utils/DecompressionUtilTest.java new file mode 100644 index 000000000..bc62b431f --- /dev/null +++ b/client/src/test/java/io/split/engine/sse/utils/DecompressionUtilTest.java @@ -0,0 +1,36 @@ +package io.split.engine.sse.utils; + +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.util.Base64; +import java.util.zip.DataFormatException; + +import static io.split.engine.sse.utils.DecompressionUtil.gZipDecompress; +import static io.split.engine.sse.utils.DecompressionUtil.zLibDecompress; + +public class DecompressionUtilTest { + + + @Test + public void testZLibDecompress() throws UnsupportedEncodingException, DataFormatException { + String toDecode = "eJzMk99u2kwQxV8lOtdryQZj8N6hD5QPlThSTVNVEUKDPYZt1jZar1OlyO9emf8lVFWv2ss5zJyd82O8hTWUZSqZvW04opwhUVdsIKBSSKR+10vS1HWW7pIdz2NyBjRwHS8IXEopTLgbQqDYT+ZUm3LxlV4J4mg81LpMyKqygPRc94YeM6eQTtjphp4fegLVXvD6Qdjt9wPXF6gs2bqCxPC/2eRpDIEXpXXblpGuWCDljGptZ4bJ5lxYSJRZBoFkTcWKozpfsoH0goHfCXpB6PfcngDpVQnZEUjKIlOr2uwWqiC3zU5L1aF+3p7LFhUkPv8/mY2nk3gGgZxssmZzb8p6A9n25ktVtA9iGI3ODXunQ3HDp+AVWT6F+rZWlrWq7MN+YkSWWvuTDvkMSnNV7J6oTdl6qKTEvGnmjcCGjL2IYC/ovPYgUKnvvPtbmrmApiVryLM7p2jE++AfH6fTx09/HvuF32LWnNjStM0Xh3c8ukZcsZlEi3h8/zCObsBpJ0acqYLTmFdtqitK1V6NzrfpdPBbLmVx4uK26e27izpDu/r5yf/16AXun2Cr4u6w591xw7+LfDidLj6Mv8TXwP8xbofv/c7UmtHMmx8BAAD//0fclvU="; + + byte[] decodedBytes = Base64.getDecoder().decode(toDecode); + byte[] decompressFeatureFlag = zLibDecompress(decodedBytes); + String featureFlag = new String(decompressFeatureFlag, 0, decompressFeatureFlag.length, "UTF-8"); + Assert.assertEquals("{\"trafficTypeName\":\"user\",\"id\":\"d431cdd0-b0be-11ea-8a80-1660ada9ce39\",\"name\":\"mauro_java\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-92391491,\"seed\":-1769377604,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1684265694505,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"WHITELIST\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"matcherType\":\"WHITELIST\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"admin\",\"mauro\",\"nico\"]}}]},\"partitions\":[{\"treatment\":\"v5\",\"size\":100}],\"label\":\"whitelisted\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"maur-2\"}}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"in segment maur-2\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"ALL_KEYS\",\"negate\":false}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"default rule\"}]}", featureFlag); + } + + @Test + public void testGZipDecompress() throws IOException { + String toDecode = "H4sIAAAAAAAA/8yT327aTBDFXyU612vJxoTgvUMfKB8qcaSapqoihAZ7DNusvWi9TpUiv3tl/pdQVb1qL+cwc3bOj/EGzlKeq3T6tuaYCoZEXbGFgMogkXXDIM0y31v4C/aCgMnrU9/3gl7Pp4yilMMIAuVusqDamvlXeiWIg/FAa5OSU6aEDHz/ip4wZ5Be1AmjoBsFAtVOCO56UXh31/O7ApUjV1eQGPw3HT+NIPCitG7bctIVC2ScU63d1DK5gksHCZPnEEhXVC45rosFW8ig1++GYej3g85tJEB6aSA7Aqkpc7Ws7XahCnLTbLVM7evnzalsUUHi8//j6WgyTqYQKMilK7b31tRryLa3WKiyfRCDeHhq2Dntiys+JS/J8THUt5VyrFXlHnYTQ3LU2h91yGdQVqhy+0RtTeuhUoNZ08wagTVZdxbBndF5vYVApb7z9m9pZgKaFqwhT+6coRHvg398nEweP/157Bd+S1hz6oxtm88O73B0jbhgM47nyej+YRRfgdNODDlXJWcJL9tUF5SqnRqfbtPr4LdcTHnk4rfp3buLOkG7+Pmp++vRM9w/wVblzX7Pm8OGfxf5YDKZfxh9SS6B/2Pc9t/7ja01o5k1PwIAAP//uTipVskEAAA="; + + byte[] decodedBytes = Base64.getDecoder().decode(toDecode); + byte[] decompressFeatureFlag = gZipDecompress(decodedBytes); + String featureFlag = new String(decompressFeatureFlag, 0, decompressFeatureFlag.length, "UTF-8"); + Assert.assertEquals("{\"trafficTypeName\":\"user\",\"id\":\"d431cdd0-b0be-11ea-8a80-1660ada9ce39\",\"name\":\"mauro_java\",\"trafficAllocation\":100,\"trafficAllocationSeed\":-92391491,\"seed\":-1769377604,\"status\":\"ACTIVE\",\"killed\":false,\"defaultTreatment\":\"off\",\"changeNumber\":1684333081259,\"algo\":2,\"configurations\":{},\"conditions\":[{\"conditionType\":\"WHITELIST\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"matcherType\":\"WHITELIST\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"admin\",\"mauro\",\"nico\"]}}]},\"partitions\":[{\"treatment\":\"v5\",\"size\":100}],\"label\":\"whitelisted\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"IN_SEGMENT\",\"negate\":false,\"userDefinedSegmentMatcherData\":{\"segmentName\":\"maur-2\"}}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"in segment maur-2\"},{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\"},\"matcherType\":\"ALL_KEYS\",\"negate\":false}]},\"partitions\":[{\"treatment\":\"on\",\"size\":0},{\"treatment\":\"off\",\"size\":100},{\"treatment\":\"V4\",\"size\":0},{\"treatment\":\"v5\",\"size\":0}],\"label\":\"default rule\"}]}", featureFlag); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/sse/workers/FeatureFlagWorkerImpTest.java b/client/src/test/java/io/split/engine/sse/workers/FeatureFlagWorkerImpTest.java new file mode 100644 index 000000000..1f7c9a8c7 --- /dev/null +++ b/client/src/test/java/io/split/engine/sse/workers/FeatureFlagWorkerImpTest.java @@ -0,0 +1,154 @@ +package io.split.engine.sse.workers; + +import io.split.client.dtos.ConditionType; +import io.split.client.dtos.Split; +import io.split.client.dtos.RuleBasedSegment; +import io.split.client.dtos.MatcherCombiner; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; +import io.split.client.utils.Json; +import io.split.engine.common.Synchronizer; +import io.split.engine.common.SynchronizerImp; +import io.split.engine.experiments.ParsedCondition; +import io.split.engine.experiments.ParsedRuleBasedSegment; +import io.split.engine.experiments.RuleBasedSegmentParser; +import io.split.engine.experiments.SplitParser; +import io.split.engine.matchers.AttributeMatcher; +import io.split.engine.matchers.CombiningMatcher; +import io.split.engine.sse.dtos.CommonChangeNotification; +import io.split.engine.sse.dtos.RawMessageNotification; +import io.split.engine.sse.dtos.GenericNotificationData; +import io.split.storages.RuleBasedSegmentCache; +import io.split.storages.SplitCacheProducer; +import io.split.storages.memory.InMemoryCacheImp; +import io.split.telemetry.domain.UpdatesFromSSE; +import io.split.telemetry.storage.InMemoryTelemetryStorage; +import io.split.telemetry.storage.TelemetryStorage; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; + +import static org.mockito.Mockito.when; + +public class FeatureFlagWorkerImpTest { + + private static final FlagSetsFilter FLAG_SETS_FILTER = new FlagSetsFilterImpl(new HashSet<>()); + + @Test + public void testRefreshSplitsWithCorrectFF() { + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + Synchronizer synchronizer = Mockito.mock(SynchronizerImp.class); + SplitCacheProducer splitCacheProducer = Mockito.mock(SplitCacheProducer.class); + RuleBasedSegmentCache ruleBasedSegmentCache = Mockito.mock(RuleBasedSegmentCache.class); + TelemetryStorage telemetryRuntimeProducer = new InMemoryTelemetryStorage(); + FeatureFlagWorkerImp featureFlagsWorker = new FeatureFlagWorkerImp(synchronizer, splitParser, ruleBasedSegmentParser, splitCacheProducer, ruleBasedSegmentCache, telemetryRuntimeProducer, FLAG_SETS_FILTER); + String notification = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1684265694505,\\\"pcn\\\":0,\\\"c\\\":2,\\\"d\\\":\\\"eJzMk99u2kwQxV8lOtdryQZj8N6hD5QPlThSTVNVEUKDPYZt1jZar1OlyO9emf8lVFWv2ss5zJyd82O8hTWUZSqZvW04opwhUVdsIKBSSKR+10vS1HWW7pIdz2NyBjRwHS8IXEopTLgbQqDYT+ZUm3LxlV4J4mg81LpMyKqygPRc94YeM6eQTtjphp4fegLVXvD6Qdjt9wPXF6gs2bqCxPC/2eRpDIEXpXXblpGuWCDljGptZ4bJ5lxYSJRZBoFkTcWKozpfsoH0goHfCXpB6PfcngDpVQnZEUjKIlOr2uwWqiC3zU5L1aF+3p7LFhUkPv8/mY2nk3gGgZxssmZzb8p6A9n25ktVtA9iGI3ODXunQ3HDp+AVWT6F+rZWlrWq7MN+YkSWWvuTDvkMSnNV7J6oTdl6qKTEvGnmjcCGjL2IYC/ovPYgUKnvvPtbmrmApiVryLM7p2jE++AfH6fTx09/HvuF32LWnNjStM0Xh3c8ukZcsZlEi3h8/zCObsBpJ0acqYLTmFdtqitK1V6NzrfpdPBbLmVx4uK26e27izpDu/r5yf/16AXun2Cr4u6w591xw7+LfDidLj6Mv8TXwP8xbofv/c7UmtHMmx8BAAD//0fclvU=\\\"}\"}"; + RawMessageNotification rawMessageNotification = Json.fromJson(notification, RawMessageNotification.class); + GenericNotificationData genericNotificationData = Json.fromJson(rawMessageNotification.getData(), GenericNotificationData.class); + + CommonChangeNotification featureFlagChangeNotification = new CommonChangeNotification(genericNotificationData, Split.class); + featureFlagsWorker.executeRefresh(featureFlagChangeNotification); + UpdatesFromSSE updatesFromSSE = telemetryRuntimeProducer.popUpdatesFromSSE(); + Assert.assertEquals(1, updatesFromSSE.getSplits()); + Mockito.verify(synchronizer, Mockito.times(0)).refreshSplits(1684265694505L, 0L); + Mockito.verify(synchronizer, Mockito.times(1)).forceRefreshSegment(Mockito.anyString()); + } + + @Test + public void testRefreshSplitsWithEmptyData() { + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + Synchronizer synchronizer = Mockito.mock(SynchronizerImp.class); + SplitCacheProducer splitCacheProducer = Mockito.mock(SplitCacheProducer.class); + RuleBasedSegmentCache ruleBasedSegmentCache = Mockito.mock(RuleBasedSegmentCache.class); + TelemetryStorage telemetryRuntimeProducer = new InMemoryTelemetryStorage(); + FeatureFlagWorkerImp featureFlagsWorker = new FeatureFlagWorkerImp(synchronizer, splitParser, ruleBasedSegmentParser, splitCacheProducer, ruleBasedSegmentCache, telemetryRuntimeProducer, FLAG_SETS_FILTER); + String notification = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1684265694505}\"}"; + RawMessageNotification rawMessageNotification = Json.fromJson(notification, RawMessageNotification.class); + GenericNotificationData genericNotificationData = Json.fromJson(rawMessageNotification.getData(), GenericNotificationData.class); + + CommonChangeNotification featureFlagChangeNotification = new CommonChangeNotification(genericNotificationData, Split.class); + featureFlagsWorker.executeRefresh(featureFlagChangeNotification); + UpdatesFromSSE updatesFromSSE = telemetryRuntimeProducer.popUpdatesFromSSE(); + Assert.assertEquals(0, updatesFromSSE.getSplits()); + Mockito.verify(synchronizer, Mockito.times(1)).refreshSplits(1684265694505L, 0L); + Mockito.verify(synchronizer, Mockito.times(0)).forceRefreshSegment(Mockito.anyString()); + } + + @Test + public void testRefreshSplitsArchiveFF() { + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + Synchronizer synchronizer = Mockito.mock(SynchronizerImp.class); + SplitCacheProducer splitCacheProducer = new InMemoryCacheImp(1686165614090L, FLAG_SETS_FILTER); + RuleBasedSegmentCache ruleBasedSegmentCache = Mockito.mock(RuleBasedSegmentCache.class); + TelemetryStorage telemetryRuntimeProducer = new InMemoryTelemetryStorage(); + FeatureFlagWorkerImp featureFlagsWorker = new FeatureFlagWorkerImp(synchronizer, splitParser, ruleBasedSegmentParser, splitCacheProducer, ruleBasedSegmentCache, telemetryRuntimeProducer, FLAG_SETS_FILTER); + String notification = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1686165617166,\\\"pcn\\\":1686165614090,\\\"c\\\":2,\\\"d\\\":\\\"eJxsUdFu4jAQ/JVqnx3JDjTh/JZCrj2JBh0EqtOBIuNswKqTIMeuxKH8+ykhiKrqiyXvzM7O7lzAGlEUSqbnEyaiRODgGjRAQOXAIQ/puPB96tHHIPQYQ/QmFNErxEgG44DKnI2AQHXtTOI0my6WcXZAmxoUtsTKvil7nNZVoQ5RYdFERh7VBwK5TY60rqWwqq6AM0q/qa8Qc+As/EHZ5HHMCDR9wQ/9kIajcEygscK6BjhEy+nLr008AwLvSuuOVgjdIIEcC+H03RZw2Hg/n88JEJBHUR0wceUeDXAWTAIWPAYsZEFAQOhDDdwnIPslnOk9NcAvNwEOly3IWtdmC3wLe+1wCy0Q2Hh/zNvTV9xg3sFtr5irQe3v5f7twgAOy8V8vlinQKAUVh7RPJvanbrBsi73qurMQpTM7oSrzjueV6hR2tp05E8J39MV1hq1d7YrWWxsZ2cQGYjzeLXK0pcoyRbLLP69juZZuuiyxoPo2oa7ukqYc+JKNEq+XgVmwopucC6sGMSS9etTvAQCH0I7BO7Ttt21BE7C2E8XsN+l06h/CJy25CveH/eGM0rbHQEt9qiHnR62jtKR7N/8wafQ7tr/AQAA//8S4fPB\\\"}\"}"; + RawMessageNotification rawMessageNotification = Json.fromJson(notification, RawMessageNotification.class); + GenericNotificationData genericNotificationData = Json.fromJson(rawMessageNotification.getData(), GenericNotificationData.class); + + CommonChangeNotification featureFlagChangeNotification = new CommonChangeNotification(genericNotificationData, Split.class); + featureFlagsWorker.executeRefresh(featureFlagChangeNotification); + UpdatesFromSSE updatesFromSSE = telemetryRuntimeProducer.popUpdatesFromSSE(); + Assert.assertEquals(1, updatesFromSSE.getSplits()); + Mockito.verify(synchronizer, Mockito.times(0)).refreshSplits(1686165617166L, 0L); + Mockito.verify(synchronizer, Mockito.times(0)).forceRefreshSegment(Mockito.anyString()); + } + + @Test + public void testUpdateRuleBasedSegmentsWithCorrectFF() { + io.split.engine.matchers.Matcher matcher = (matchValue, bucketingKey, attributes, evaluationContext) -> false; + ParsedCondition parsedCondition = new ParsedCondition(ConditionType.ROLLOUT, + new CombiningMatcher(MatcherCombiner.AND, Arrays.asList(new AttributeMatcher("email", matcher, false))), + null, + "my label"); + ParsedRuleBasedSegment parsedRBS = new ParsedRuleBasedSegment("sample_rule_based_segment", + Arrays.asList(parsedCondition), + "user", + 5, + Arrays.asList("mauro@split.io","gaston@split.io"), + new ArrayList<>()); + + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + Synchronizer synchronizer = Mockito.mock(SynchronizerImp.class); + SplitCacheProducer splitCacheProducer = Mockito.mock(SplitCacheProducer.class); + RuleBasedSegmentCache ruleBasedSegmentCache = Mockito.mock(RuleBasedSegmentCache.class); + TelemetryStorage telemetryRuntimeProducer = new InMemoryTelemetryStorage(); + FeatureFlagWorkerImp featureFlagsWorker = new FeatureFlagWorkerImp(synchronizer, splitParser, ruleBasedSegmentParser, splitCacheProducer, ruleBasedSegmentCache, telemetryRuntimeProducer, FLAG_SETS_FILTER); + String notification = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"RB_SEGMENT_UPDATE\\\",\\\"changeNumber\\\":1684265694505,\\\"pcn\\\":0,\\\"c\\\":0,\\\"d\\\":\\\"eyJjaGFuZ2VOdW1iZXIiOiA1LCAibmFtZSI6ICJzYW1wbGVfcnVsZV9iYXNlZF9zZWdtZW50IiwgInN0YXR1cyI6ICJBQ1RJVkUiLCAidHJhZmZpY1R5cGVOYW1lIjogInVzZXIiLCAiZXhjbHVkZWQiOiB7ImtleXMiOiBbIm1hdXJvQHNwbGl0LmlvIiwgImdhc3RvbkBzcGxpdC5pbyJdLCAic2VnbWVudHMiOiBbXX0sICJjb25kaXRpb25zIjogW3sibWF0Y2hlckdyb3VwIjogeyJjb21iaW5lciI6ICJBTkQiLCAibWF0Y2hlcnMiOiBbeyJrZXlTZWxlY3RvciI6IHsidHJhZmZpY1R5cGUiOiAidXNlciIsICJhdHRyaWJ1dGUiOiAiZW1haWwifSwgIm1hdGNoZXJUeXBlIjogIkVORFNfV0lUSCIsICJuZWdhdGUiOiBmYWxzZSwgIndoaXRlbGlzdE1hdGNoZXJEYXRhIjogeyJ3aGl0ZWxpc3QiOiBbIkBzcGxpdC5pbyJdfX1dfX1dfQ==\\\"}\"}"; + RawMessageNotification rawMessageNotification = Json.fromJson(notification, RawMessageNotification.class); + GenericNotificationData genericNotificationData = Json.fromJson(rawMessageNotification.getData(), GenericNotificationData.class); + + CommonChangeNotification ruleBasedSegmentChangeNotification = new CommonChangeNotification(genericNotificationData, RuleBasedSegment.class); + featureFlagsWorker.executeRefresh(ruleBasedSegmentChangeNotification); + Mockito.verify(ruleBasedSegmentCache, Mockito.times(1)).update(Arrays.asList(parsedRBS), new ArrayList<>(), 1684265694505L); + } + + @Test + public void testRefreshRuleBasedSegmentWithCorrectFF() { + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + Synchronizer synchronizer = Mockito.mock(SynchronizerImp.class); + SplitCacheProducer splitCacheProducer = Mockito.mock(SplitCacheProducer.class); + RuleBasedSegmentCache ruleBasedSegmentCache = Mockito.mock(RuleBasedSegmentCache.class); + HashSet rbs = new HashSet<>(); + rbs.add("sample_rule_based_segment"); + when(ruleBasedSegmentCache.contains(rbs)).thenReturn(false); + TelemetryStorage telemetryRuntimeProducer = new InMemoryTelemetryStorage(); + FeatureFlagWorkerImp featureFlagsWorker = new FeatureFlagWorkerImp(synchronizer, splitParser, ruleBasedSegmentParser, splitCacheProducer, ruleBasedSegmentCache, telemetryRuntimeProducer, FLAG_SETS_FILTER); + String notification = "{\"id\":\"vQQ61wzBRO:0:0\",\"clientId\":\"pri:MTUxNzg3MDg1OQ==\",\"timestamp\":1684265694676,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_MjkyNTIzNjczMw==_splits\",\"data\":\"{\\\"type\\\":\\\"SPLIT_UPDATE\\\",\\\"changeNumber\\\":1684265694505,\\\"pcn\\\":0,\\\"c\\\":0,\\\"d\\\":\\\"eyJjaGFuZ2VOdW1iZXIiOiAxMCwgInRyYWZmaWNUeXBlTmFtZSI6ICJ1c2VyIiwgIm5hbWUiOiAicmJzX2ZsYWciLCAidHJhZmZpY0FsbG9jYXRpb24iOiAxMDAsICJ0cmFmZmljQWxsb2NhdGlvblNlZWQiOiAxODI4Mzc3MzgwLCAic2VlZCI6IC0yODY2MTc5MjEsICJzdGF0dXMiOiAiQUNUSVZFIiwgImtpbGxlZCI6IGZhbHNlLCAiZGVmYXVsdFRyZWF0bWVudCI6ICJvZmYiLCAiYWxnbyI6IDIsICJjb25kaXRpb25zIjogW3siY29uZGl0aW9uVHlwZSI6ICJST0xMT1VUIiwgIm1hdGNoZXJHcm91cCI6IHsiY29tYmluZXIiOiAiQU5EIiwgIm1hdGNoZXJzIjogW3sia2V5U2VsZWN0b3IiOiB7InRyYWZmaWNUeXBlIjogInVzZXIifSwgIm1hdGNoZXJUeXBlIjogIklOX1JVTEVfQkFTRURfU0VHTUVOVCIsICJuZWdhdGUiOiBmYWxzZSwgInVzZXJEZWZpbmVkU2VnbWVudE1hdGNoZXJEYXRhIjogeyJzZWdtZW50TmFtZSI6ICJzYW1wbGVfcnVsZV9iYXNlZF9zZWdtZW50In19XX0sICJwYXJ0aXRpb25zIjogW3sidHJlYXRtZW50IjogIm9uIiwgInNpemUiOiAxMDB9LCB7InRyZWF0bWVudCI6ICJvZmYiLCAic2l6ZSI6IDB9XSwgImxhYmVsIjogImluIHJ1bGUgYmFzZWQgc2VnbWVudCBzYW1wbGVfcnVsZV9iYXNlZF9zZWdtZW50In0sIHsiY29uZGl0aW9uVHlwZSI6ICJST0xMT1VUIiwgIm1hdGNoZXJHcm91cCI6IHsiY29tYmluZXIiOiAiQU5EIiwgIm1hdGNoZXJzIjogW3sia2V5U2VsZWN0b3IiOiB7InRyYWZmaWNUeXBlIjogInVzZXIifSwgIm1hdGNoZXJUeXBlIjogIkFMTF9LRVlTIiwgIm5lZ2F0ZSI6IGZhbHNlfV19LCAicGFydGl0aW9ucyI6IFt7InRyZWF0bWVudCI6ICJvbiIsICJzaXplIjogMH0sIHsidHJlYXRtZW50IjogIm9mZiIsICJzaXplIjogMTAwfV0sICJsYWJlbCI6ICJkZWZhdWx0IHJ1bGUifV0sICJjb25maWd1cmF0aW9ucyI6IHt9LCAic2V0cyI6IFtdLCAiaW1wcmVzc2lvbnNEaXNhYmxlZCI6IGZhbHNlfQ==\\\"}\"}"; + RawMessageNotification rawMessageNotification = Json.fromJson(notification, RawMessageNotification.class); + GenericNotificationData genericNotificationData = Json.fromJson(rawMessageNotification.getData(), GenericNotificationData.class); + CommonChangeNotification featureFlagChangeNotification = new CommonChangeNotification(genericNotificationData, Split.class); + + featureFlagsWorker.executeRefresh(featureFlagChangeNotification); + Mockito.verify(synchronizer, Mockito.times(0)).refreshSplits(0L, 1684265694505L); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/engine/sse/workers/SplitsWorkerTest.java b/client/src/test/java/io/split/engine/sse/workers/SplitsWorkerTest.java index 8db49054b..7e63fa554 100644 --- a/client/src/test/java/io/split/engine/sse/workers/SplitsWorkerTest.java +++ b/client/src/test/java/io/split/engine/sse/workers/SplitsWorkerTest.java @@ -1,10 +1,24 @@ package io.split.engine.sse.workers; +import io.split.client.dtos.Split; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; import io.split.engine.common.Synchronizer; +import io.split.engine.experiments.RuleBasedSegmentParser; +import io.split.engine.experiments.SplitParser; +import io.split.engine.sse.dtos.CommonChangeNotification; +import io.split.engine.sse.dtos.GenericNotificationData; +import io.split.engine.sse.dtos.IncomingNotification; +import io.split.engine.sse.dtos.SplitKillNotification; +import io.split.storages.RuleBasedSegmentCache; +import io.split.storages.SplitCacheProducer; +import io.split.telemetry.storage.InMemoryTelemetryStorage; +import io.split.telemetry.storage.TelemetryRuntimeProducer; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import java.util.HashSet; import java.util.List; import static org.hamcrest.MatcherAssert.assertThat; @@ -12,73 +26,124 @@ public class SplitsWorkerTest { + private static final FlagSetsFilter FLAG_SETS_FILTER = new FlagSetsFilterImpl(new HashSet<>()); + @Test public void addToQueueWithoutElementsWShouldNotTriggerFetch() throws InterruptedException { Synchronizer splitFetcherMock = Mockito.mock(Synchronizer.class); + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + SplitCacheProducer splitCacheProducer = Mockito.mock(SplitCacheProducer.class); + RuleBasedSegmentCache ruleBasedSegmentCache = Mockito.mock(RuleBasedSegmentCache.class); + TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(InMemoryTelemetryStorage.class); - SplitsWorker splitsWorker = new SplitsWorkerImp(splitFetcherMock); - splitsWorker.start(); + FeatureFlagsWorker featureFlagsWorker = new FeatureFlagWorkerImp(splitFetcherMock, splitParser, ruleBasedSegmentParser, splitCacheProducer, ruleBasedSegmentCache, telemetryRuntimeProducer, FLAG_SETS_FILTER); + featureFlagsWorker.start(); Thread.sleep(500); - Mockito.verify(splitFetcherMock, Mockito.never()).refreshSplits(Mockito.anyLong()); - splitsWorker.stop(); + Mockito.verify(splitFetcherMock, Mockito.never()).refreshSplits(Mockito.anyObject(), Mockito.anyObject()); + featureFlagsWorker.stop(); } @Test public void addToQueueWithElementsWShouldTriggerFetch() throws InterruptedException { Synchronizer syncMock = Mockito.mock(Synchronizer.class); + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + SplitCacheProducer splitCacheProducer = Mockito.mock(SplitCacheProducer.class); + RuleBasedSegmentCache ruleBasedSegmentCache = Mockito.mock(RuleBasedSegmentCache.class); + TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(InMemoryTelemetryStorage.class); - SplitsWorker splitsWorker = new SplitsWorkerImp(syncMock); - splitsWorker.start(); + FeatureFlagsWorker featureFlagsWorker = new FeatureFlagWorkerImp(syncMock, splitParser, ruleBasedSegmentParser, splitCacheProducer, ruleBasedSegmentCache, telemetryRuntimeProducer, FLAG_SETS_FILTER); + featureFlagsWorker.start(); ArgumentCaptor cnCaptor = ArgumentCaptor.forClass(Long.class); - splitsWorker.addToQueue(1585956698457L); - splitsWorker.addToQueue(1585956698467L); - splitsWorker.addToQueue(1585956698477L); - splitsWorker.addToQueue(1585956698476L); + ArgumentCaptor cnCaptor2 = ArgumentCaptor.forClass(Long.class); + + featureFlagsWorker.addToQueue(new CommonChangeNotification(GenericNotificationData.builder() + .changeNumber(1585956698457L) + .type(IncomingNotification.Type.SPLIT_UPDATE) + .build(), Split.class)); + featureFlagsWorker.addToQueue(new CommonChangeNotification(GenericNotificationData.builder() + .changeNumber(1585956698467L) + .type(IncomingNotification.Type.SPLIT_UPDATE) + .build(), Split.class)); + featureFlagsWorker.addToQueue(new CommonChangeNotification(GenericNotificationData.builder() + .changeNumber(1585956698477L) + .type(IncomingNotification.Type.SPLIT_UPDATE) + .build(), Split.class)); + featureFlagsWorker.addToQueue(new CommonChangeNotification(GenericNotificationData.builder() + .changeNumber(1585956698476L) + .type(IncomingNotification.Type.SPLIT_UPDATE) + .build(), Split.class)); Thread.sleep(1000); - Mockito.verify(syncMock, Mockito.times(4)).refreshSplits(cnCaptor.capture()); + Mockito.verify(syncMock, Mockito.times(4)).refreshSplits(cnCaptor.capture(), cnCaptor2.capture()); List captured = cnCaptor.getAllValues(); assertThat(captured, contains(1585956698457L, 1585956698467L, 1585956698477L, 1585956698476L)); - splitsWorker.stop(); + featureFlagsWorker.stop(); } @Test public void killShouldTriggerFetch() { long changeNumber = 1585956698457L; - String splitName = "split-test"; + String featureFlagName = "feature-flag-test"; String defaultTreatment = "off"; Synchronizer syncMock = Mockito.mock(Synchronizer.class); - SplitsWorker splitsWorker = new SplitsWorkerImp(syncMock); - splitsWorker.start(); - - splitsWorker.killSplit(changeNumber, splitName, defaultTreatment); - Mockito.verify(syncMock, Mockito.times(1)).localKillSplit(splitName, defaultTreatment, changeNumber); - splitsWorker.stop(); + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + SplitCacheProducer splitCacheProducer = Mockito.mock(SplitCacheProducer.class); + RuleBasedSegmentCache ruleBasedSegmentCache = Mockito.mock(RuleBasedSegmentCache.class); + TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(InMemoryTelemetryStorage.class); + FeatureFlagsWorker featureFlagsWorker = new FeatureFlagWorkerImp(syncMock, splitParser, ruleBasedSegmentParser, splitCacheProducer, ruleBasedSegmentCache, telemetryRuntimeProducer, FLAG_SETS_FILTER) { + }; + featureFlagsWorker.start(); + SplitKillNotification splitKillNotification = new SplitKillNotification(GenericNotificationData.builder() + .changeNumber(changeNumber) + .defaultTreatment(defaultTreatment) + .featureFlagName(featureFlagName) + .build()); + + featureFlagsWorker.kill(splitKillNotification); + Mockito.verify(syncMock, Mockito.times(1)).localKillSplit(splitKillNotification); + featureFlagsWorker.stop(); } @Test public void messagesNotProcessedWhenWorkerStopped() throws InterruptedException { Synchronizer syncMock = Mockito.mock(Synchronizer.class); - SplitsWorker splitsWorker = new SplitsWorkerImp(syncMock); - splitsWorker.start(); - splitsWorker.addToQueue(1585956698457L); + SplitParser splitParser = new SplitParser(); + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + SplitCacheProducer splitCacheProducer = Mockito.mock(SplitCacheProducer.class); + RuleBasedSegmentCache ruleBasedSegmentCache = Mockito.mock(RuleBasedSegmentCache.class); + TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(InMemoryTelemetryStorage.class); + FeatureFlagsWorker featureFlagsWorker = new FeatureFlagWorkerImp(syncMock, splitParser, ruleBasedSegmentParser, splitCacheProducer, ruleBasedSegmentCache, telemetryRuntimeProducer, FLAG_SETS_FILTER); + featureFlagsWorker.start(); + featureFlagsWorker.addToQueue(new CommonChangeNotification(GenericNotificationData.builder() + .changeNumber(1585956698457L) + .type(IncomingNotification.Type.SPLIT_UPDATE) + .build(), Split.class)); Thread.sleep(500); - splitsWorker.stop(); + featureFlagsWorker.stop(); Thread.sleep(500); - splitsWorker.addToQueue(1585956698467L); - Mockito.verify(syncMock, Mockito.times(1)).refreshSplits(1585956698457L); // Previous one! + featureFlagsWorker.addToQueue(new CommonChangeNotification(GenericNotificationData.builder() + .changeNumber(1585956698467L) + .type(IncomingNotification.Type.SPLIT_UPDATE) + .build(), Split.class)); + Mockito.verify(syncMock, Mockito.times(1)).refreshSplits(Mockito.anyObject(), Mockito.anyObject()); // Previous one! Mockito.reset(syncMock); - splitsWorker.start(); - splitsWorker.addToQueue(1585956698477L); + featureFlagsWorker.start(); + featureFlagsWorker.addToQueue(new CommonChangeNotification(GenericNotificationData.builder() + .changeNumber(1585956698477L) + .type(IncomingNotification.Type.SPLIT_UPDATE) + .build(), Split.class)); Thread.sleep(500); - Mockito.verify(syncMock, Mockito.times(1)).refreshSplits(1585956698477L); - splitsWorker.stop(); + Mockito.verify(syncMock, Mockito.times(1)).refreshSplits(Mockito.anyObject(), Mockito.anyObject()); + featureFlagsWorker.stop(); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/inputValidation/FallbackTreatmentValidatorTest.java b/client/src/test/java/io/split/inputValidation/FallbackTreatmentValidatorTest.java new file mode 100644 index 000000000..6bfc4bd9d --- /dev/null +++ b/client/src/test/java/io/split/inputValidation/FallbackTreatmentValidatorTest.java @@ -0,0 +1,50 @@ +package io.split.inputValidation; + +import io.split.client.dtos.FallbackTreatment; +import org.junit.Assert; +import org.junit.Test; + +import java.util.HashMap; + +public class FallbackTreatmentValidatorTest { + + @Test + public void isValidTreatmentWorks() { + Assert.assertEquals("123asHs_-sdf", FallbackTreatmentValidator.isValidTreatment("123asHs_-sdf", "test")); + + Assert.assertEquals(null, FallbackTreatmentValidator.isValidTreatment(new String(new char[101]).replace('\0', 'w'), "test")); + Assert.assertEquals(null, FallbackTreatmentValidator.isValidTreatment("", "test")); + Assert.assertEquals(null, FallbackTreatmentValidator.isValidTreatment(null, "test")); + Assert.assertEquals(null, FallbackTreatmentValidator.isValidTreatment("12@3asHs_-sdf", "test")); + Assert.assertEquals(null, FallbackTreatmentValidator.isValidTreatment("12#3asHs_-sdf", "test")); + Assert.assertEquals(null, FallbackTreatmentValidator.isValidTreatment("12!3asHs_-sdf", "test")); + Assert.assertEquals(null, FallbackTreatmentValidator.isValidTreatment("12^3asHs_-sdf", "test")); + } + + @Test + public void isValidByFlagTreatmentWorks() { + HashMap byRef = new HashMap() {{ put("flag", new FallbackTreatment("12#2")); }}; + Assert.assertEquals(new HashMap<>(), FallbackTreatmentValidator.isValidByFlagTreatment(byRef, "test")); + + byRef = new HashMap() {{ put("flag", new FallbackTreatment("12%2")); }}; + Assert.assertEquals(new HashMap<>(), FallbackTreatmentValidator.isValidByFlagTreatment(byRef, "test")); + + byRef = new HashMap() {{ put("flag", new FallbackTreatment(new String(new char[101]).replace('\0', 'w'))); }}; + Assert.assertEquals(new HashMap<>(), FallbackTreatmentValidator.isValidByFlagTreatment(byRef, "test")); + + byRef = new HashMap() {{ put("flag", new FallbackTreatment("12&2")); }}; + Assert.assertEquals(new HashMap<>(), FallbackTreatmentValidator.isValidByFlagTreatment(byRef, "test")); + + byRef = new HashMap() {{ put("", new FallbackTreatment("on")); }}; + Assert.assertEquals(new HashMap<>(), FallbackTreatmentValidator.isValidByFlagTreatment(byRef, "test")); + + byRef = new HashMap() {{ put("12#dd", new FallbackTreatment("on")); }}; + Assert.assertEquals(new HashMap<>(), FallbackTreatmentValidator.isValidByFlagTreatment(byRef, "test")); + + byRef = new HashMap() {{ put(new String(new char[101]).replace('\0', 'w'), new FallbackTreatment("on")); }}; + Assert.assertEquals(new HashMap<>(), FallbackTreatmentValidator.isValidByFlagTreatment(byRef, "test")); + + byRef = new HashMap() {{ put("flag", new FallbackTreatment("123asHs_-sdf")); }}; + Assert.assertEquals("123asHs_-sdf", FallbackTreatmentValidator.isValidByFlagTreatment(byRef, "test").get("flag").getTreatment()); + } +} diff --git a/client/src/test/java/io/split/inputValidation/ImpressionPropertiesValidatorTest.java b/client/src/test/java/io/split/inputValidation/ImpressionPropertiesValidatorTest.java new file mode 100644 index 000000000..2ab6eb94b --- /dev/null +++ b/client/src/test/java/io/split/inputValidation/ImpressionPropertiesValidatorTest.java @@ -0,0 +1,42 @@ +package io.split.inputValidation; + +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import io.split.grammar.Treatments; +import org.junit.Assert; +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; + +public class ImpressionPropertiesValidatorTest { + + @Test(expected = IllegalStateException.class) + public void testConstructorException() { + ImpressionPropertiesValidator iv = new ImpressionPropertiesValidator(); + } + + @Test + public void propertiesAreValidWorks() { + Map properties = new HashMap() + {{ + put("prop1", 1); + put("prop2", 2L); + put("prop3", 7.56); + put("prop4", "something"); + put("prop5", true); + put("prop6", null); + }}; + ImpressionPropertiesValidator.ImpressionPropertiesValidatorResult result = ImpressionPropertiesValidator.propertiesAreValid(properties); + Assert.assertTrue(result.getSuccess()); + Assert.assertEquals(1063, result.getEventSize()); + Assert.assertEquals(6, result.getValue().size()); + + // when properties size is > Event.MAX_PROPERTIES_LENGTH_BYTES + for (int i = 7; i <= (32 * 1024); i++) { + properties.put("prop" + i, "something-" + i); + } + result = ImpressionPropertiesValidator.propertiesAreValid(properties); + Assert.assertFalse(result.getSuccess()); + } +} diff --git a/client/src/test/java/io/split/inputValidation/SplitNameValidatorTest.java b/client/src/test/java/io/split/inputValidation/SplitNameValidatorTest.java index d8db6567c..cb325dfc6 100644 --- a/client/src/test/java/io/split/inputValidation/SplitNameValidatorTest.java +++ b/client/src/test/java/io/split/inputValidation/SplitNameValidatorTest.java @@ -22,9 +22,29 @@ public void isValidWorks() { result = SplitNameValidator.isValid("", "test"); Assert.assertFalse(result.isPresent()); + // test regex + result = SplitNameValidator.isValid("te#fg", "test"); + Assert.assertFalse(result.isPresent()); + + // test regex + result = SplitNameValidator.isValid("te@fg", "test"); + Assert.assertFalse(result.isPresent()); + + // test regex + result = SplitNameValidator.isValid("te&fg", "test"); + Assert.assertFalse(result.isPresent()); + + // test regex + result = SplitNameValidator.isValid("te)fg", "test"); + Assert.assertFalse(result.isPresent()); + + // test length + result = SplitNameValidator.isValid(new String(new char[101]).replace('\0', 'w'), "test"); + Assert.assertFalse(result.isPresent()); + // when split name have empty spaces - result = SplitNameValidator.isValid(" split name test ", "test"); + result = SplitNameValidator.isValid(" split-name-test ", "test"); Assert.assertTrue(result.isPresent()); - Assert.assertEquals("split name test", result.get()); + Assert.assertEquals("split-name-test", result.get()); } } diff --git a/client/src/test/java/io/split/service/HttpPostImpTest.java b/client/src/test/java/io/split/service/HttpPostImpTest.java index 3f2b5396e..9efdb21cb 100644 --- a/client/src/test/java/io/split/service/HttpPostImpTest.java +++ b/client/src/test/java/io/split/service/HttpPostImpTest.java @@ -1,14 +1,11 @@ package io.split.service; import io.split.TestHelper; -import io.split.telemetry.domain.enums.HTTPLatenciesEnum; +import io.split.client.RequestDecorator; +import io.split.client.utils.SDKMetadata; import io.split.telemetry.domain.enums.HttpParamsWrapper; -import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; -import io.split.telemetry.domain.enums.ResourceEnum; import io.split.telemetry.storage.InMemoryTelemetryStorage; -import io.split.telemetry.storage.TelemetryRuntimeProducer; import io.split.telemetry.storage.TelemetryStorage; -import junit.framework.TestCase; import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import org.apache.hc.core5.http.HttpStatus; import org.junit.Assert; @@ -18,30 +15,41 @@ import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.URI; +import java.net.URISyntaxException; -public class HttpPostImpTest{ +public class HttpPostImpTest { private static final String URL = "www.split.io"; @Test - public void testPostWith200() throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, IOException { - CloseableHttpClient client =TestHelper.mockHttpClient(URL, HttpStatus.SC_OK); + public void testPostWith200() throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, + IOException, URISyntaxException { + CloseableHttpClient client = TestHelper.mockHttpClient(URL, HttpStatus.SC_OK); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(client, new RequestDecorator(null), "qwerty", + metadata()); TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); - HttpPostImp httpPostImp = new HttpPostImp(client, telemetryStorage); + HttpPostImp httpPostImp = new HttpPostImp(splitHttpClient, telemetryStorage); httpPostImp.post(URI.create(URL), new Object(), "Metrics", HttpParamsWrapper.TELEMETRY); Mockito.verify(client, Mockito.times(1)).execute(Mockito.any()); - Assert.assertNotEquals(0, telemetryStorage.getLastSynchronization().get_telemetry()); - Assert.assertEquals(1, telemetryStorage.popHTTPLatencies().get_telemetry().stream().mapToInt(Long::intValue).sum()); + Assert.assertNotEquals(0, telemetryStorage.getLastSynchronization().getTelemetry()); + Assert.assertEquals(1, telemetryStorage.popHTTPLatencies().getTelemetry().stream().mapToInt(Long::intValue).sum()); } @Test - public void testPostWith400() throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, IOException { - CloseableHttpClient client =TestHelper.mockHttpClient(URL, HttpStatus.SC_CLIENT_ERROR); + public void testPostWith400() throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, + IOException, URISyntaxException { + CloseableHttpClient client = TestHelper.mockHttpClient(URL, HttpStatus.SC_CLIENT_ERROR); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(client, new RequestDecorator(null), "qwerty", + metadata()); TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); - HttpPostImp httpPostImp = new HttpPostImp(client, telemetryStorage); + HttpPostImp httpPostImp = new HttpPostImp(splitHttpClient, telemetryStorage); httpPostImp.post(URI.create(URL), new Object(), "Metrics", HttpParamsWrapper.TELEMETRY); Mockito.verify(client, Mockito.times(1)).execute(Mockito.any()); + Assert.assertEquals(1, telemetryStorage.popHTTPErrors().getTelemetry().get(Long.valueOf(HttpStatus.SC_CLIENT_ERROR)).intValue()); + } - Assert.assertEquals(1, telemetryStorage.popHTTPErrors().get_telemetry().get(Long.valueOf(HttpStatus.SC_CLIENT_ERROR)).intValue()); + private SDKMetadata metadata() { + return new SDKMetadata("java-1.2.3", "1.2.3.4", "someIP"); } -} \ No newline at end of file + +} diff --git a/client/src/test/java/io/split/service/HttpSplitClientTest.java b/client/src/test/java/io/split/service/HttpSplitClientTest.java new file mode 100644 index 000000000..746ae5c01 --- /dev/null +++ b/client/src/test/java/io/split/service/HttpSplitClientTest.java @@ -0,0 +1,171 @@ +package io.split.service; + +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; +import io.split.TestHelper; +import io.split.client.RequestDecorator; +import io.split.client.dtos.*; +import io.split.client.impressions.Impression; +import io.split.client.utils.Json; +import io.split.client.utils.SDKMetadata; +import io.split.engine.common.FetchOptions; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpUriRequest; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.core5.http.HttpStatus; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.ArgumentCaptor; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.lang.reflect.InvocationTargetException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.*; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.mockito.Mockito.verify; + +public class HttpSplitClientTest { + + @Test + public void testGetWithSpecialCharacters() throws URISyntaxException, InvocationTargetException, + NoSuchMethodException, IllegalAccessException, IOException { + URI rootTarget = URI.create("https://api.split.io/splitChanges?since=1234567&rbSince=-1"); + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("split-change-special-characters.json", + HttpStatus.SC_OK); + RequestDecorator decorator = new RequestDecorator(null); + + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, decorator, "qwerty", metadata()); + Map> additionalHeaders = Collections.singletonMap("AdditionalHeader", + Collections.singletonList("add")); + + SplitHttpResponse splitHttpResponse = splitHtpClient.get(rootTarget, + new FetchOptions.Builder().cacheControlHeaders(true).build(), additionalHeaders); + SplitChange change = Json.fromJson(splitHttpResponse.body(), SplitChange.class); + + ArgumentCaptor captor = ArgumentCaptor.forClass(HttpUriRequest.class); + verify(httpClientMock).execute(captor.capture()); + HttpUriRequest request = captor.getValue(); + assertThat(request.getFirstHeader("AdditionalHeader").getValue(), is(equalTo("add"))); + + SplitHttpResponse.Header[] headers = splitHttpResponse.responseHeaders(); + assertThat(headers[0].getName(), is(equalTo("Via"))); + assertThat(headers[0].getValues().get(0), is(equalTo("HTTP/1.1 m_proxy_rio1"))); + Assert.assertNotNull(change); + Assert.assertEquals(1, change.featureFlags.d.size()); + Assert.assertNotNull(change.featureFlags.d.get(0)); + + Split split = change.featureFlags.d.get(0); + Map configs = split.configurations; + Assert.assertEquals(2, configs.size()); + Assert.assertEquals("{\"test\": \"blue\",\"grüne Straße\": 13}", configs.get("on")); + Assert.assertEquals("{\"test\": \"blue\",\"size\": 15}", configs.get("off")); + Assert.assertEquals(2, split.sets.size()); + } + + @Test + public void testGetError() throws URISyntaxException, InvocationTargetException, NoSuchMethodException, + IllegalAccessException, IOException { + URI rootTarget = URI.create("https://api.split.io/splitChanges?since=1234567"); + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("split-change-special-characters.json", + HttpStatus.SC_INTERNAL_SERVER_ERROR); + RequestDecorator decorator = new RequestDecorator(null); + + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, decorator, "qwerty", metadata()); + SplitHttpResponse splitHttpResponse = splitHtpClient.get(rootTarget, + new FetchOptions.Builder().cacheControlHeaders(true).build(), null); + Assert.assertEquals(HttpStatus.SC_INTERNAL_SERVER_ERROR, (long) splitHttpResponse.statusCode()); + } + + @Test(expected = IllegalStateException.class) + public void testException() throws URISyntaxException, InvocationTargetException, NoSuchMethodException, + IllegalAccessException, IOException { + URI rootTarget = URI.create("https://api.split.io/splitChanges?since=1234567"); + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("split-change-special-characters.json", + HttpStatus.SC_INTERNAL_SERVER_ERROR); + RequestDecorator decorator = null; + + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, decorator, "qwerty", metadata()); + splitHtpClient.get(rootTarget, + new FetchOptions.Builder().cacheControlHeaders(true).build(), null); + } + + @Test + public void testPost() throws URISyntaxException, IOException, IllegalAccessException, NoSuchMethodException, + InvocationTargetException { + URI rootTarget = URI.create("https://kubernetesturl.com/split/api/testImpressions/bulk"); + + // Setup response mock + CloseableHttpClient httpClient = TestHelper.mockHttpClient("", HttpStatus.SC_OK); + RequestDecorator decorator = new RequestDecorator(null); + + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClient, decorator, "qwerty", metadata()); + + // Send impressions + List toSend = Arrays.asList(new TestImpressions("t1", Arrays.asList( + KeyImpression.fromImpression(new Impression("k1", null, "t1", "on", 123L, "r1", 456L, null, null)), + KeyImpression.fromImpression(new Impression("k2", null, "t1", "on", 123L, "r1", 456L, null, null)), + KeyImpression.fromImpression(new Impression("k3", null, "t1", "on", 123L, "r1", 456L, null, null)))), + new TestImpressions("t2", Arrays.asList( + KeyImpression.fromImpression(new Impression("k1", null, "t2", "on", 123L, "r1", 456L, null, null)), + KeyImpression.fromImpression(new Impression("k2", null, "t2", "on", 123L, "r1", 456L, null, null)), + KeyImpression.fromImpression(new Impression("k3", null, "t2", "on", 123L, "r1", 456L, null, null))))); + + Map> additionalHeaders = Collections.singletonMap("SplitSDKImpressionsMode", + Collections.singletonList("OPTIMIZED")); + SplitHttpResponse splitHttpResponse = splitHtpClient.post(rootTarget, Json.toJson(toSend), + additionalHeaders); + + // Capture outgoing request and validate it + ArgumentCaptor captor = ArgumentCaptor.forClass(HttpUriRequest.class); + verify(httpClient).execute(captor.capture()); + HttpUriRequest request = captor.getValue(); + assertThat(request.getUri(), + is(equalTo(URI.create("https://kubernetesturl.com/split/api/testImpressions/bulk")))); + assertThat(request.getFirstHeader("SplitSDKImpressionsMode").getValue(), is(equalTo("OPTIMIZED"))); + assertThat(request, instanceOf(HttpPost.class)); + HttpPost asPostRequest = (HttpPost) request; + InputStreamReader reader = new InputStreamReader(asPostRequest.getEntity().getContent()); + Gson gson = new Gson(); + List payload = gson.fromJson(reader, new TypeToken>() { + }.getType()); + assertThat(payload.size(), is(equalTo(2))); + Assert.assertEquals(200, (long) splitHttpResponse.statusCode()); + } + + @Test + public void testPosttNoExceptionOnHttpErrorCode() throws URISyntaxException, InvocationTargetException, + NoSuchMethodException, IllegalAccessException, IOException { + URI rootTarget = URI.create("https://api.split.io/splitChanges?since=1234567"); + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("split-change-special-characters.json", + HttpStatus.SC_INTERNAL_SERVER_ERROR); + RequestDecorator decorator = new RequestDecorator(null); + + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, decorator, "qwerty", metadata()); + SplitHttpResponse splitHttpResponse = splitHtpClient.post(rootTarget, + Json.toJson(Arrays.asList(new String[] { "A", "B", "C", "D" })), null); + Assert.assertEquals(500, (long) splitHttpResponse.statusCode()); + + } + + @Test(expected = IOException.class) + public void testPosttException() throws URISyntaxException, InvocationTargetException, NoSuchMethodException, + IllegalAccessException, IOException { + URI rootTarget = URI.create("https://api.split.io/splitChanges?since=1234567"); + CloseableHttpClient httpClientMock = TestHelper.mockHttpClient("split-change-special-characters.json", + HttpStatus.SC_INTERNAL_SERVER_ERROR); + + SplitHttpClient splitHtpClient = SplitHttpClientImpl.create(httpClientMock, null, "qwerty", metadata()); + splitHtpClient.post(rootTarget, Json.toJson(Arrays.asList(new String[] { "A", "B", "C", "D" })), null); + } + + private SDKMetadata metadata() { + return new SDKMetadata("java-1.2.3", "1.2.3.4", "someIP"); + } + +} diff --git a/client/src/test/java/io/split/storages/memory/InMemoryCacheTest.java b/client/src/test/java/io/split/storages/memory/InMemoryCacheTest.java index 77dd9e653..5589d71da 100644 --- a/client/src/test/java/io/split/storages/memory/InMemoryCacheTest.java +++ b/client/src/test/java/io/split/storages/memory/InMemoryCacheTest.java @@ -2,6 +2,8 @@ import com.google.common.collect.Lists; import io.split.client.dtos.Partition; +import io.split.client.interceptors.FlagSetsFilter; +import io.split.client.interceptors.FlagSetsFilterImpl; import io.split.engine.ConditionsTestUtil; import io.split.engine.experiments.ParsedCondition; import io.split.engine.experiments.ParsedSplit; @@ -12,11 +14,18 @@ import org.junit.Before; import org.junit.Test; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; public class InMemoryCacheTest { @@ -25,12 +34,13 @@ public class InMemoryCacheTest { @Before public void before() { - _cache = new InMemoryCacheImp(); + FlagSetsFilter flagSetsFilter = new FlagSetsFilterImpl(new HashSet<>(Arrays.asList("set1", "set2"))); + _cache = new InMemoryCacheImp(flagSetsFilter); } @Test public void putAndGetSplit() { - ParsedSplit split = getParsedSplit("split_name"); + ParsedSplit split = getParsedSplitWithFlagSetsSameStorage("split_name"); _cache.putMany(Stream.of(split).collect(Collectors.toList())); ParsedSplit result = _cache.get("split_name"); @@ -42,8 +52,8 @@ public void putAndGetSplit() { @Test public void putDuplicateSplit() { - ParsedSplit split = getParsedSplit("split_name"); - ParsedSplit split2 = getParsedSplit("split_name"); + ParsedSplit split = getParsedSplitWithFlagSetsSameStorage("split_name"); + ParsedSplit split2 = getParsedSplitWithFlagSetsSameStorage("split_name"); _cache.putMany(Stream.of(split, split2).collect(Collectors.toList())); int result = _cache.getAll().size(); @@ -53,7 +63,7 @@ public void putDuplicateSplit() { @Test public void getInExistentSplit() { - ParsedSplit split = getParsedSplit("split_name"); + ParsedSplit split = getParsedSplitWithFlagSetsSameStorage("split_name"); _cache.putMany(Stream.of(split).collect(Collectors.toList())); ParsedSplit result = _cache.get("split_name_2"); @@ -62,15 +72,37 @@ public void getInExistentSplit() { @Test public void removeSplit() { - ParsedSplit split = getParsedSplit("split_name"); - ParsedSplit split2 = getParsedSplit("split_name_2"); - _cache.putMany(Stream.of(split, split2).collect(Collectors.toList())); + ParsedSplit splitWithFlagSetsSameStorage = getParsedSplitWithFlagSetsSameStorage("split_name"); + ParsedSplit split2WithFlagSetsSameStorage = getParsedSplitWithFlagSetsSameStorage("split_name_2"); + ParsedSplit splitWithFlagSetsNotSameStorage = getParsedSplitWithFlagSetsNotSameStorage("split_name_3"); + ParsedSplit splitFlagSetsEmpty = getParsedSplitFlagSetsEmpty("split_name_4"); + ParsedSplit splitFlagSetsNull = getParsedSplitFlagSetsNull("split_name_5"); + _cache.putMany(Stream.of(splitWithFlagSetsSameStorage, split2WithFlagSetsSameStorage, splitWithFlagSetsNotSameStorage, + splitFlagSetsEmpty, splitFlagSetsNull).collect(Collectors.toList())); int result = _cache.getAll().size(); - Assert.assertEquals(2, result); + Assert.assertEquals(5, result); + Map> namesByFlagSets = _cache.getNamesByFlagSets(Arrays.asList("set1", "set2")); + Assert.assertTrue(namesByFlagSets.get("set1").contains("split_name")); + Assert.assertTrue(namesByFlagSets.get("set2").contains("split_name")); _cache.remove("split_name"); result = _cache.getAll().size(); + Assert.assertEquals(4, result); + namesByFlagSets = _cache.getNamesByFlagSets(Arrays.asList("set1", "set2")); + Assert.assertFalse(namesByFlagSets.get("set1").contains("split_name")); + Assert.assertFalse(namesByFlagSets.get("set2").contains("split_name")); + + _cache.remove("split_name_3"); + result = _cache.getAll().size(); + Assert.assertEquals(3, result); + + _cache.remove("split_name_4"); + result = _cache.getAll().size(); + Assert.assertEquals(2, result); + + _cache.remove("split_name_5"); + result = _cache.getAll().size(); Assert.assertEquals(1, result); Assert.assertNull(_cache.get("split_name")); @@ -90,10 +122,10 @@ public void setAndGetChangeNumber() { @Test public void getMany() { - ParsedSplit split = getParsedSplit("split_name_1"); - ParsedSplit split2 = getParsedSplit("split_name_2"); - ParsedSplit split3 = getParsedSplit("split_name_3"); - ParsedSplit split4 = getParsedSplit("split_name_4"); + ParsedSplit split = getParsedSplitWithFlagSetsSameStorage("split_name_1"); + ParsedSplit split2 = getParsedSplitWithFlagSetsSameStorage("split_name_2"); + ParsedSplit split3 = getParsedSplitWithFlagSetsSameStorage("split_name_3"); + ParsedSplit split4 = getParsedSplitWithFlagSetsSameStorage("split_name_4"); _cache.putMany(Stream.of(split, split2, split3, split4).collect(Collectors.toList())); List names = new ArrayList<>(); @@ -107,11 +139,10 @@ public void getMany() { @Test public void trafficTypesExist() { - - ParsedSplit split = ParsedSplit.createParsedSplitForTests("splitName_1", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2); - ParsedSplit split2 = ParsedSplit.createParsedSplitForTests("splitName_2", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2); - ParsedSplit split3 = ParsedSplit.createParsedSplitForTests("splitName_3", 0, false, "default_treatment", new ArrayList<>(), "tt_2", 123, 2); - ParsedSplit split4 = ParsedSplit.createParsedSplitForTests("splitName_4", 0, false, "default_treatment", new ArrayList<>(), "tt_3", 123, 2); + ParsedSplit split = ParsedSplit.createParsedSplitForTests("splitName_1", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, null, true, null); + ParsedSplit split2 = ParsedSplit.createParsedSplitForTests("splitName_2", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, null, true, null); + ParsedSplit split3 = ParsedSplit.createParsedSplitForTests("splitName_3", 0, false, "default_treatment", new ArrayList<>(), "tt_2", 123, 2, null, true, null); + ParsedSplit split4 = ParsedSplit.createParsedSplitForTests("splitName_4", 0, false, "default_treatment", new ArrayList<>(), "tt_3", 123, 2, null, true, null); _cache.putMany(Stream.of(split, split2, split3, split4).collect(Collectors.toList())); assertTrue(_cache.trafficTypeExists("tt_2")); @@ -132,10 +163,10 @@ public void testSegmentNames() { ParsedCondition parsedCondition1 = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new UserDefinedSegmentMatcher(EMPLOYEES)), fullyRollout); ParsedCondition parsedCondition2 = ParsedCondition.createParsedConditionForTests(CombiningMatcher.of(new UserDefinedSegmentMatcher(EMPLOYEES+"2")), turnOff); - ParsedSplit split = ParsedSplit.createParsedSplitForTests("splitName_1", 0, false, "default_treatment", Stream.of(parsedCondition1).collect(Collectors.toList()), "tt", 123, 2); - ParsedSplit split2 = ParsedSplit.createParsedSplitForTests("splitName_2", 0, false, "default_treatment", Stream.of(parsedCondition2).collect(Collectors.toList()), "tt", 123, 2); - ParsedSplit split3 = ParsedSplit.createParsedSplitForTests("splitName_3", 0, false, "default_treatment", Stream.of(parsedCondition1).collect(Collectors.toList()), "tt_2", 123, 2); - ParsedSplit split4 = ParsedSplit.createParsedSplitForTests("splitName_4", 0, false, "default_treatment", Stream.of(parsedCondition2).collect(Collectors.toList()), "tt_3", 123, 2); + ParsedSplit split = ParsedSplit.createParsedSplitForTests("splitName_1", 0, false, "default_treatment", Stream.of(parsedCondition1).collect(Collectors.toList()), "tt", 123, 2, null, true, null); + ParsedSplit split2 = ParsedSplit.createParsedSplitForTests("splitName_2", 0, false, "default_treatment", Stream.of(parsedCondition2).collect(Collectors.toList()), "tt", 123, 2, null, true, null); + ParsedSplit split3 = ParsedSplit.createParsedSplitForTests("splitName_3", 0, false, "default_treatment", Stream.of(parsedCondition1).collect(Collectors.toList()), "tt_2", 123, 2, null, true, null); + ParsedSplit split4 = ParsedSplit.createParsedSplitForTests("splitName_4", 0, false, "default_treatment", Stream.of(parsedCondition2).collect(Collectors.toList()), "tt_3", 123, 2, null, true, null); _cache.putMany(Stream.of(split, split2, split3, split4).collect(Collectors.toList())); @@ -146,13 +177,25 @@ public void testSegmentNames() { } - private ParsedSplit getParsedSplit(String splitName) { - return ParsedSplit.createParsedSplitForTests(splitName, 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2); + private ParsedSplit getParsedSplitWithFlagSetsSameStorage(String splitName) { + return ParsedSplit.createParsedSplitForTests(splitName, 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, new HashSet<>(Arrays.asList("set1", "set2")), true, null); + } + + private ParsedSplit getParsedSplitWithFlagSetsNotSameStorage(String splitName) { + return ParsedSplit.createParsedSplitForTests(splitName, 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, new HashSet<>(Arrays.asList("set3")), true, null); + } + + private ParsedSplit getParsedSplitFlagSetsNull(String splitName) { + return ParsedSplit.createParsedSplitForTests(splitName, 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, null, true, null); + } + + private ParsedSplit getParsedSplitFlagSetsEmpty(String splitName) { + return ParsedSplit.createParsedSplitForTests(splitName, 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, new HashSet<>(), true, null); } @Test public void testPutMany() { - _cache.putMany(Stream.of(getParsedSplit("split_name_1"),getParsedSplit("split_name_2"),getParsedSplit("split_name_3"),getParsedSplit("split_name_4")).collect(Collectors.toList())); + _cache.putMany(Stream.of(getParsedSplitWithFlagSetsSameStorage("split_name_1"), getParsedSplitWithFlagSetsSameStorage("split_name_2"), getParsedSplitWithFlagSetsSameStorage("split_name_3"), getParsedSplitWithFlagSetsSameStorage("split_name_4")).collect(Collectors.toList())); List names = Stream.of("split_name_1","split_name_2","split_name_3","split_name_4").collect(Collectors.toList()); Map result = _cache.fetchMany(names); @@ -161,7 +204,7 @@ public void testPutMany() { @Test public void testIncreaseTrafficType() { - ParsedSplit split = ParsedSplit.createParsedSplitForTests("splitName_1", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2); + ParsedSplit split = ParsedSplit.createParsedSplitForTests("splitName_1", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, new HashSet<>(), true, null); _cache.putMany(Stream.of(split).collect(Collectors.toList())); _cache.increaseTrafficType("tt_2"); assertTrue(_cache.trafficTypeExists("tt_2")); @@ -169,9 +212,34 @@ public void testIncreaseTrafficType() { @Test public void testDecreaseTrafficType() { - ParsedSplit split = ParsedSplit.createParsedSplitForTests("splitName_1", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2); + ParsedSplit split = ParsedSplit.createParsedSplitForTests("splitName_1", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, new HashSet<>(), true, null); _cache.putMany(Stream.of(split).collect(Collectors.toList())); _cache.decreaseTrafficType("tt"); assertFalse(_cache.trafficTypeExists("tt_2")); } -} + + @Test + public void testGetNamesByFlagSets() { + ParsedSplit split = ParsedSplit.createParsedSplitForTests("splitName_1", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, new HashSet<>(Arrays.asList("set1", "set2", "set3")), true, null); + ParsedSplit split2 = ParsedSplit.createParsedSplitForTests("splitName_2", 0, false, "default_treatment", new ArrayList<>(), "tt", 123, 2, new HashSet<>(Arrays.asList("set1")), true, null); + ParsedSplit split3 = ParsedSplit.createParsedSplitForTests("splitName_3", 0, false, "default_treatment", new ArrayList<>(), "tt_2", 123, 2, new HashSet<>(Arrays.asList("set4")), true, null); + ParsedSplit split4 = ParsedSplit.createParsedSplitForTests("splitName_4", 0, false, "default_treatment", new ArrayList<>(), "tt_3", 123, 2, new HashSet<>(Arrays.asList("set2")), true, null); + + _cache.putMany(Stream.of(split, split2, split3, split4).collect(Collectors.toList())); + Map> namesByFlagSets = _cache.getNamesByFlagSets(new ArrayList<>(Arrays.asList("set1", "set2", "set3"))); + assertTrue(namesByFlagSets.get("set1").contains("splitName_1")); + assertTrue(namesByFlagSets.get("set1").contains("splitName_2")); + assertFalse(namesByFlagSets.get("set1").contains("splitName_3")); + assertFalse(namesByFlagSets.get("set1").contains("splitName_4")); + assertTrue(namesByFlagSets.keySet().contains("set3")); + assertNull(namesByFlagSets.get("set3")); + + _cache.remove("splitName_2"); + namesByFlagSets = _cache.getNamesByFlagSets(new ArrayList<>(Arrays.asList("set1", "set2", "set3"))); + assertFalse(namesByFlagSets.get("set1").contains("splitName_2")); + _cache.remove("splitName_1"); + namesByFlagSets = _cache.getNamesByFlagSets(new ArrayList<>(Arrays.asList("set1", "set2", "set3"))); + assertFalse(namesByFlagSets.get("set1").contains("splitName_1")); + assertTrue(namesByFlagSets.get("set1").isEmpty()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/memory/RuleBasedSegmentCacheInMemoryImplTest.java b/client/src/test/java/io/split/storages/memory/RuleBasedSegmentCacheInMemoryImplTest.java new file mode 100644 index 000000000..492cc8aeb --- /dev/null +++ b/client/src/test/java/io/split/storages/memory/RuleBasedSegmentCacheInMemoryImplTest.java @@ -0,0 +1,72 @@ +package io.split.storages.memory; + +import com.google.common.collect.Sets; +import io.split.client.dtos.ExcludedSegments; +import io.split.client.dtos.MatcherCombiner; +import io.split.engine.experiments.ParsedRuleBasedSegment; +import io.split.engine.experiments.ParsedCondition; +import io.split.client.dtos.ConditionType; + +import io.split.engine.matchers.AttributeMatcher; +import io.split.engine.matchers.CombiningMatcher; +import io.split.engine.matchers.UserDefinedSegmentMatcher; +import io.split.engine.matchers.strings.WhitelistMatcher; +import junit.framework.TestCase; +import org.junit.Test; +import com.google.common.collect.Lists; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; + +public class RuleBasedSegmentCacheInMemoryImplTest extends TestCase { + + @Test + public void testAddAndDeleteSegment(){ + RuleBasedSegmentCacheInMemoryImp ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + AttributeMatcher whiteListMatcher = AttributeMatcher.vanilla(new WhitelistMatcher(Lists.newArrayList("test_1", "admin"))); + CombiningMatcher whitelistCombiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(whiteListMatcher)); + ParsedRuleBasedSegment parsedRuleBasedSegment = new ParsedRuleBasedSegment("sample_rule_based_segment", + Lists.newArrayList(new ParsedCondition(ConditionType.WHITELIST, whitelistCombiningMatcher, null, "label")),"user", + 123, Lists.newArrayList("mauro@test.io","gaston@test.io"), Lists.newArrayList()); + ruleBasedSegmentCache.update(Lists.newArrayList(parsedRuleBasedSegment), null, 123); + assertEquals(123, ruleBasedSegmentCache.getChangeNumber()); + assertEquals(parsedRuleBasedSegment, ruleBasedSegmentCache.get("sample_rule_based_segment")); + assertTrue(ruleBasedSegmentCache.contains(new HashSet<>(Arrays.asList("sample_rule_based_segment")))); + assertFalse(ruleBasedSegmentCache.contains(new HashSet<>(Arrays.asList("sample_rule_based_segment", "123")))); + + ruleBasedSegmentCache.update(null, Lists.newArrayList("sample_rule_based_segment"), 124); + assertEquals(124, ruleBasedSegmentCache.getChangeNumber()); + assertEquals(null, ruleBasedSegmentCache.get("sample_rule_based_segment")); + } + + @Test + public void testMultipleSegment(){ + List excludedSegments = new ArrayList<>(); + excludedSegments.add(new ExcludedSegments("standard","segment1")); + excludedSegments.add(new ExcludedSegments("standard","segment3")); + + RuleBasedSegmentCacheInMemoryImp ruleBasedSegmentCache = new RuleBasedSegmentCacheInMemoryImp(); + AttributeMatcher whiteListMatcher = AttributeMatcher.vanilla(new WhitelistMatcher(Lists.newArrayList("test_1", "admin"))); + CombiningMatcher whitelistCombiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(whiteListMatcher)); + ParsedRuleBasedSegment parsedRuleBasedSegment1 = new ParsedRuleBasedSegment("sample_rule_based_segment", + Lists.newArrayList(new ParsedCondition(ConditionType.WHITELIST, whitelistCombiningMatcher, null, "label")),"user", + 123, Lists.newArrayList("mauro@test.io","gaston@test.io"), excludedSegments); + + excludedSegments.clear(); + excludedSegments.add(new ExcludedSegments("standard","segment1")); + excludedSegments.add(new ExcludedSegments("standard","segment2")); + AttributeMatcher segmentMatcher = AttributeMatcher.vanilla(new UserDefinedSegmentMatcher("employees")); + CombiningMatcher segmentCombiningMatcher = new CombiningMatcher(MatcherCombiner.AND, Lists.newArrayList(segmentMatcher)); + ParsedRuleBasedSegment parsedRuleBasedSegment2 = new ParsedRuleBasedSegment("another_rule_based_segment", + Lists.newArrayList(new ParsedCondition(ConditionType.WHITELIST, segmentCombiningMatcher, null, "label")),"user", + 123, Lists.newArrayList("mauro@test.io","gaston@test.io"), excludedSegments); + + ruleBasedSegmentCache.update(Lists.newArrayList(parsedRuleBasedSegment1, parsedRuleBasedSegment2), null, 123); + assertEquals(Lists.newArrayList("another_rule_based_segment", "sample_rule_based_segment"), ruleBasedSegmentCache.ruleBasedSegmentNames()); + assertEquals(Sets.newHashSet("segment2", "segment1", "employees"), ruleBasedSegmentCache.getSegments()); + assertTrue(ruleBasedSegmentCache.contains(new HashSet<>(Arrays.asList("sample_rule_based_segment", "another_rule_based_segment")))); + assertTrue(ruleBasedSegmentCache.contains(new HashSet<>(Arrays.asList("sample_rule_based_segment")))); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/CustomStorageWrapperHasPipeline.java b/client/src/test/java/io/split/storages/pluggable/CustomStorageWrapperHasPipeline.java new file mode 100644 index 000000000..4a40061ba --- /dev/null +++ b/client/src/test/java/io/split/storages/pluggable/CustomStorageWrapperHasPipeline.java @@ -0,0 +1,196 @@ +package io.split.storages.pluggable; + +import com.google.common.collect.Maps; +import pluggable.CustomStorageWrapper; +import pluggable.HasPipelineSupport; +import pluggable.Pipeline; +import pluggable.Result; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentMap; + +public class CustomStorageWrapperHasPipeline implements CustomStorageWrapper, HasPipelineSupport { + + private static final String COUNTS = "SPLITIO.impressions.count"; + private static final String FLAG_SET = "SPLITIO.flagSet"; + private final ConcurrentMap _impressionsCount = Maps.newConcurrentMap(); + private final ConcurrentMap> _flagSets = Maps.newConcurrentMap(); + + public CustomStorageWrapperHasPipeline() { + _flagSets.put("SPLITIO.flagSet.set1", new HashSet<>(new ArrayList<>(Arrays.asList("flag1", "flag2")))); + } + + @Override + public String get(String key) throws Exception { + return null; + } + + @Override + public List getMany(List keys) { + return null; + } + + @Override + public void set(String key, String item) throws Exception { + + } + + @Override + public void hSet(String key, String field, String item) { + + } + + @Override + public void delete(List keys) { + + } + + @Override + public String getAndSet(String key, String item) { + return null; + } + + @Override + public Set getKeysByPrefix(String prefix) { + return null; + } + + @Override + public long increment(String key, long value) { + return 0; + } + + @Override + public long decrement(String key, long value) { + return 0; + } + + @Override + public long hIncrement(String key, String field, long value) { + return 0; + } + + @Override + public long pushItems(String key, List items) { + return 0; + } + + @Override + public List popItems(String key, long count) { + return null; + } + + @Override + public long getItemsCount(String key) { + return 0; + } + + @Override + public boolean itemContains(String key, String item) { + return false; + } + + @Override + public void addItems(String key, List items) { + + } + + @Override + public void removeItems(String key, List items) { + + } + + @Override + public List getItems(List keys) { + return null; + } + + @Override + public Set getMembers(String key) { + return null; + } + + @Override + public boolean connect() { + return false; + } + + @Override + public boolean disconnect() { + return false; + } + + @Override + public Pipeline pipeline() { + return new CustomPipeline(); + } + + public ConcurrentMap getImpressionsCount(){ + return _impressionsCount; + } + private class CustomPipeline implements Pipeline { + + private List> methodsToExecute; + + public CustomPipeline() { + this.methodsToExecute = new ArrayList<>(); + } + + @Override + public List exec() throws Exception { + List result = new ArrayList<>(); + for (Callable method : methodsToExecute) { + result.add(new Result(method.call())); + } + return result; + } + + @Override + public void hIncrement(String key, String field, long value) { + methodsToExecute.add(() -> { return hIncrementToExecute(key, field, value);}); + } + + public long hIncrementToExecute(String key, String field, long value){ + String storageKey = getStorage(key); + Long count = 0L; + if (storageKey.equals(COUNTS)){ + if(_impressionsCount.containsKey(field)){ + count = _impressionsCount.get(field); + } + count += value; + _impressionsCount.put(field, count); + } + return count; + } + + @Override + public void getMembers(String key) { + methodsToExecute.add(() -> { return getMembersToExecute(key);}); + } + + private HashSet getMembersToExecute(String key) { + String storageKey = getStorage(key); + if(storageKey.equals(FLAG_SET)) { + return _flagSets.get(key); + } + return new HashSet<>(); + } + + private String getStorage(String key) { + if(key.startsWith(COUNTS)) + return COUNTS; + if(key.startsWith(FLAG_SET)) + return FLAG_SET; + return ""; + } + + public ConcurrentMap getImpressionsCount(){ + return _impressionsCount; + } + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/CustomStorageWrapperImp.java b/client/src/test/java/io/split/storages/pluggable/CustomStorageWrapperImp.java index ceb95b7fc..8733f3d12 100644 --- a/client/src/test/java/io/split/storages/pluggable/CustomStorageWrapperImp.java +++ b/client/src/test/java/io/split/storages/pluggable/CustomStorageWrapperImp.java @@ -10,21 +10,25 @@ import io.split.client.dtos.ConditionType; import io.split.client.dtos.Split; import io.split.client.dtos.Status; +import io.split.client.dtos.RuleBasedSegment; import io.split.client.utils.Json; import io.split.engine.ConditionsTestUtil; import io.split.engine.segments.SegmentImp; import io.split.grammar.Treatments; -import io.split.storages.pluggable.domain.ConfigConsumer; import io.split.storages.pluggable.domain.EventConsumer; import io.split.storages.pluggable.domain.ImpressionConsumer; import io.split.storages.pluggable.domain.PrefixAdapter; import io.split.telemetry.domain.enums.MethodEnum; import io.split.telemetry.utils.AtomicLongArray; import pluggable.CustomStorageWrapper; +import pluggable.NotPipelinedImpl; +import pluggable.Pipeline; import java.lang.reflect.Modifier; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -34,15 +38,26 @@ public class CustomStorageWrapperImp implements CustomStorageWrapper { public static final int MAX_LATENCY_BUCKET_COUNT = 23; private static final String TELEMETRY = "SPLITIO.telemetry"; + + private static final String TELEMETRY_INIT = "SPLITIO.telemetry.init"; + private static final String LATENCIES = "SPLITIO.telemetry.latencies"; private static final String SPLIT = "SPLITIO.split."; + private static final String RULE_BASED_SEGMENT = "SPLITIO.rbsegment"; + private static final String RULE_BASED_SEGMENTS = "SPLITIO.rbsegments"; private static final String SPLITS = "SPLITIO.splits.*"; private static final String SEGMENT = "SPLITIO.segment."; private static final String IMPRESSIONS = "SPLITIO.impressions"; private static final String EVENTS = "SPLITIO.events"; + private static final String COUNTS = "SPLITIO.impressions.counts"; + private static final String FLAG_SET = "SPLITIO.flagSet"; private Map splitsStorage = new HashMap<>(); + private Map ruleBasedSegmentStorage = new HashMap<>(); private Map segmentStorage = new HashMap<>(); private final ConcurrentMap _methodLatencies = Maps.newConcurrentMap(); - private ConfigConsumer _telemetryInit = null; + private final ConcurrentMap _latencies = Maps.newConcurrentMap(); + private final ConcurrentMap _impressionsCount = Maps.newConcurrentMap(); + private final ConcurrentMap _config = Maps.newConcurrentMap(); + private final ConcurrentMap> _flagSets = Maps.newConcurrentMap(); private List imps = new ArrayList<>(); private List events = new ArrayList<>(); private final Gson _json = new GsonBuilder() @@ -70,6 +85,9 @@ public String get(String key) throws Exception { if(value.equals(SPLIT)){ return _json.toJson(splitsStorage.get(key)); } + if(value.equals(RULE_BASED_SEGMENT)){ + return _json.toJson(ruleBasedSegmentStorage.get(key)); + } return ""; } @@ -89,11 +107,14 @@ public List getMany(List keys) throws Exception { @Override public void set(String key, String item) throws Exception { + + } + + @Override + public void hSet(String key, String field, String item) { String value = getStorage(key); - if(value.equals(TELEMETRY)) { - if (key.contains("init")) { - _telemetryInit = _json.fromJson(item, ConfigConsumer.class); - } + if (value.equals(TELEMETRY_INIT)){ + _config.put(field, item); } } @@ -118,16 +139,6 @@ public Set getKeysByPrefix(String prefix) throws Exception { @Override public long increment(String key, long value) throws Exception { - String keyValue = getStorage(key); - if(keyValue.equals(TELEMETRY)){ - if(key.contains("latencies")){ - String[] items = key.substring(key.indexOf("::")).replace("{", "").replace("}", "").split("/"); - if(_methodLatencies.containsKey(items[3])) { - _methodLatencies.get(items[3]).increment(Integer.parseInt(items[4])); - } - } - - } return 0; } @@ -136,6 +147,28 @@ public long decrement(String key, long value) throws Exception { return 0; } + @Override + public long hIncrement(String key, String field, long value) throws Exception { + String storageKey = getStorage(key); + Long count = 0L; + if (storageKey.equals(COUNTS)){ + if(_impressionsCount.containsKey(field)){ + count = _impressionsCount.get(field); + } + count += value; + _impressionsCount.put(field, count); + return count; + } + if(storageKey.equals(LATENCIES)){ + if(_latencies.containsKey(field)){ + count = _latencies.get(field); + } + count += value; + _latencies.put(field, count); + } + return count; + } + @Override public long pushItems(String key, List items) throws Exception { String value = getStorage(key); @@ -183,6 +216,15 @@ public List getItems(List keys) throws Exception { return null; } + @Override + public Set getMembers(String key) { + String storageKey = getStorage(key); + if(storageKey.equals(FLAG_SET)) { + return _flagSets.get(key); + } + return new HashSet<>(); + } + @Override public boolean connect() throws Exception { return true; @@ -193,27 +235,47 @@ public boolean disconnect() throws Exception { return false; } + @Override + public Pipeline pipeline() throws Exception { + return new NotPipelinedImpl(this); + } + private String getStorage(String key) { if(key.startsWith(SPLITS)) return SPLITS; else if(key.startsWith(SPLIT)) return SPLIT; + else if(key.startsWith(RULE_BASED_SEGMENT)) + return RULE_BASED_SEGMENT; + else if(key.startsWith(RULE_BASED_SEGMENTS)) + return RULE_BASED_SEGMENTS; + else if (key.startsWith(LATENCIES)) + return LATENCIES; + else if (key.startsWith(TELEMETRY_INIT)) + return TELEMETRY_INIT; else if(key.startsWith(TELEMETRY)) return TELEMETRY; else if(key.startsWith(SEGMENT)) return SEGMENT; + else if(key.startsWith(COUNTS)) + return COUNTS; else if(key.startsWith(IMPRESSIONS)) return IMPRESSIONS; else if(key.startsWith(EVENTS)) return EVENTS; + else if(key.startsWith(FLAG_SET)) + return FLAG_SET; return ""; } private void updateCache(){ Condition condition = ConditionsTestUtil.makeUserDefinedSegmentCondition(ConditionType.WHITELIST,"segmentName" , Lists.newArrayList(ConditionsTestUtil.partition("on", 100))); segmentStorage.put(PrefixAdapter.buildSegment("segmentName"), new SegmentImp(9874654L, "segmentName", Lists.newArrayList("key", "key2"))); - splitsStorage.put(PrefixAdapter.buildSplitKey("first.name"), makeSplit("first.name", 123, Lists.newArrayList(condition), 456478976L)); - splitsStorage.put(PrefixAdapter.buildSplitKey("second.name"), makeSplit("second.name", 321, Lists.newArrayList(), 568613L)); + splitsStorage.put(PrefixAdapter.buildSplitKey("first-name"), makeSplit("first-name", 123, Lists.newArrayList(condition), 456478976L)); + splitsStorage.put(PrefixAdapter.buildSplitKey("second-name"), makeSplit("second-name", 321, Lists.newArrayList(), 568613L)); + splitsStorage.put(PrefixAdapter.buildSplitKey("rbs_flag"), Json.fromJson("{\"changeNumber\": 10, \"trafficTypeName\": \"user\", \"name\": \"rbs_flag\", \"trafficAllocation\": 100, \"trafficAllocationSeed\": 1828377380, \"seed\": -286617921, \"status\": \"ACTIVE\", \"killed\": false, \"defaultTreatment\": \"off\", \"algo\": 2, \"conditions\": [{\"conditionType\": \"ROLLOUT\", \"matcherGroup\": {\"combiner\": \"AND\", \"matchers\": [{\"keySelector\": {\"trafficType\": \"user\"},\"matcherType\": \"IN_RULE_BASED_SEGMENT\", \"negate\": false, \"userDefinedSegmentMatcherData\": {\"segmentName\": \"sample_rule_based_segment\"}}]},\"partitions\": [{\"treatment\": \"on\", \"size\": 100},{\"treatment\": \"off\", \"size\": 0}],\"label\": \"in rule based segment sample_rule_based_segment\"},{\"conditionType\": \"ROLLOUT\", \"matcherGroup\": {\"combiner\": \"AND\", \"matchers\": [{\"keySelector\": {\"trafficType\": \"user\"},\"matcherType\": \"ALL_KEYS\", \"negate\": false}]},\"partitions\": [{\"treatment\": \"on\", \"size\": 0},{\"treatment\": \"off\", \"size\": 100}],\"label\": \"default rule\"}],\"configurations\": {},\"sets\": [],\"impressionsDisabled\": false}", Split.class)); + ruleBasedSegmentStorage.put(PrefixAdapter.buildRuleBasedSegmentKey("sample_rule_based_segment"), Json.fromJson( "{\"changeNumber\":5,\"name\":\"sample_rule_based_segment\",\"status\":\"ACTIVE\",\"trafficTypeName\":\"user\",\"excluded\":{\"keys\":[\"mauro@split.io\"],\"segments\":[]},\"conditions\":[{\"conditionType\":\"ROLLOUT\",\"matcherGroup\":{\"combiner\":\"AND\",\"matchers\":[{\"keySelector\":{\"trafficType\":\"user\",\"attribute\":\"email\"},\"matcherType\":\"ENDS_WITH\",\"negate\":false,\"whitelistMatcherData\":{\"whitelist\":[\"@split.io\"]}}]}}]}", RuleBasedSegment.class)); + _flagSets.put("SPLITIO.flagSet.set1", new HashSet<>(new ArrayList<>(Arrays.asList("flag1", "flag2")))); } private Split makeSplit(String name, int seed, List conditions, long changeNumber) { @@ -232,8 +294,12 @@ private Split makeSplit(String name, int seed, List conditions, long return split; } - public ConcurrentMap get_methodLatencies() { - return _methodLatencies; + public ConcurrentMap getLatencies() { + return _latencies; + } + + public ConcurrentMap getImpressionsCount() { + return _impressionsCount; } public List getImps() { @@ -244,7 +310,7 @@ public List getEvents() { return events; } - public ConfigConsumer get_telemetryInit() { - return _telemetryInit; + public ConcurrentMap getConfig() { + return _config; } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomImpressionAdapterProducerTest.java b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomImpressionAdapterProducerTest.java index 79127d4c6..2926d1ad3 100644 --- a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomImpressionAdapterProducerTest.java +++ b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomImpressionAdapterProducerTest.java @@ -2,7 +2,7 @@ import io.split.client.dtos.KeyImpression; import io.split.client.dtos.Metadata; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -18,19 +18,19 @@ public class UserCustomImpressionAdapterProducerTest { private CustomStorageWrapper _customStorageWrapper; private UserCustomImpressionAdapterProducer _impressionAdapterProducer; - private SafeUserStorageWrapper _safeUserStorageWrapper; + private UserStorageWrapper _userStorageWrapper; @Before public void setUp() throws NoSuchFieldException, IllegalAccessException { _customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); - _safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); + _userStorageWrapper = Mockito.mock(UserStorageWrapper.class); _impressionAdapterProducer = new UserCustomImpressionAdapterProducer(_customStorageWrapper, Mockito.mock(Metadata.class)); - Field userCustomImpressionAdapterProducer = UserCustomImpressionAdapterProducer.class.getDeclaredField("_safeUserStorageWrapper"); + Field userCustomImpressionAdapterProducer = UserCustomImpressionAdapterProducer.class.getDeclaredField("_userStorageWrapper"); userCustomImpressionAdapterProducer.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(userCustomImpressionAdapterProducer, userCustomImpressionAdapterProducer.getModifiers() & ~Modifier.FINAL); - userCustomImpressionAdapterProducer.set(_impressionAdapterProducer, _safeUserStorageWrapper); + userCustomImpressionAdapterProducer.set(_impressionAdapterProducer, _userStorageWrapper); Metadata metadata = new Metadata(true, "SDK-version"); Field userCustomMetadata = UserCustomImpressionAdapterProducer.class.getDeclaredField("_metadata"); userCustomMetadata.setAccessible(true); @@ -43,17 +43,17 @@ public void setUp() throws NoSuchFieldException, IllegalAccessException { @Test public void testPut() { KeyImpression keyImpression = new KeyImpression(); - Mockito.when(_safeUserStorageWrapper.pushItems(Mockito.anyString(), Mockito.anyObject())).thenReturn(1L); + Mockito.when(_userStorageWrapper.pushItems(Mockito.anyString(), Mockito.anyObject())).thenReturn(1L); Assert.assertEquals(1L, _impressionAdapterProducer.put(Stream.of(keyImpression).collect(Collectors.toList()))); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).pushItems(Mockito.anyString(), Mockito.anyObject()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).pushItems(Mockito.anyString(), Mockito.anyObject()); } @Test public void testPutMany() { KeyImpression keyImpression = new KeyImpression(); - Mockito.when(_safeUserStorageWrapper.pushItems(Mockito.anyString(), Mockito.anyObject())).thenReturn(1L); + Mockito.when(_userStorageWrapper.pushItems(Mockito.anyString(), Mockito.anyObject())).thenReturn(1L); Assert.assertEquals(1L, _impressionAdapterProducer.put(Stream.of(keyImpression).collect(Collectors.toList()))); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).pushItems(Mockito.anyString(), Mockito.anyObject()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).pushItems(Mockito.anyString(), Mockito.anyObject()); } } \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomRuleBasedSegmentAdapterConsumerTest.java b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomRuleBasedSegmentAdapterConsumerTest.java new file mode 100644 index 000000000..b9baee955 --- /dev/null +++ b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomRuleBasedSegmentAdapterConsumerTest.java @@ -0,0 +1,188 @@ +package io.split.storages.pluggable.adapters; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import io.split.client.dtos.*; +import io.split.client.utils.Json; +import io.split.engine.ConditionsTestUtil; +import io.split.engine.experiments.*; +import io.split.storages.pluggable.domain.PrefixAdapter; +import io.split.storages.pluggable.domain.UserStorageWrapper; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import pluggable.CustomStorageWrapper; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.*; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static io.split.TestHelper.makeRuleBasedSegment; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class UserCustomRuleBasedSegmentAdapterConsumerTest { + + private static final String RULE_BASED_SEGMENT_NAME = "RuleBasedSegmentName"; + private CustomStorageWrapper _customStorageWrapper; + private UserStorageWrapper _userStorageWrapper; + private UserCustomRuleBasedSegmentAdapterConsumer _userCustomRuleBasedSegmentAdapterConsumer; + + @Before + public void setUp() throws NoSuchFieldException, IllegalAccessException { + _customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); + _userStorageWrapper = Mockito.mock(UserStorageWrapper.class); + _userCustomRuleBasedSegmentAdapterConsumer = new UserCustomRuleBasedSegmentAdapterConsumer(_customStorageWrapper); + Field userCustomRuleBasedSegmentAdapterConsumer = UserCustomRuleBasedSegmentAdapterConsumer.class.getDeclaredField("_userStorageWrapper"); + userCustomRuleBasedSegmentAdapterConsumer.setAccessible(true); + Field modifiersField = Field.class.getDeclaredField("modifiers"); + modifiersField.setAccessible(true); + modifiersField.setInt(userCustomRuleBasedSegmentAdapterConsumer, userCustomRuleBasedSegmentAdapterConsumer.getModifiers() & ~Modifier.FINAL); + userCustomRuleBasedSegmentAdapterConsumer.set(_userCustomRuleBasedSegmentAdapterConsumer, _userStorageWrapper); + } + + @Test + public void testGetChangeNumber() { + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildRuleBasedSegmentChangeNumber())).thenReturn(getLongAsJson(120L)); + Assert.assertEquals(120L, _userCustomRuleBasedSegmentAdapterConsumer.getChangeNumber()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + } + + @Test + public void testGetChangeNumberWithWrapperFailing() { + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildRuleBasedSegmentChangeNumber())).thenReturn(null); + Assert.assertEquals(-1L, _userCustomRuleBasedSegmentAdapterConsumer.getChangeNumber()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + } + + @Test + public void testGetChangeNumberWithGsonFailing() { + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildRuleBasedSegmentChangeNumber())).thenReturn("a"); + Assert.assertEquals(-1L, _userCustomRuleBasedSegmentAdapterConsumer.getChangeNumber()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + } + + @Test + public void testGetRuleBasedSegment() { + RuleBasedSegmentParser ruleBasedSegmentParser = new RuleBasedSegmentParser(); + RuleBasedSegment ruleBasedSegment = getRuleBasedSegment(RULE_BASED_SEGMENT_NAME); + ParsedRuleBasedSegment expected = ruleBasedSegmentParser.parse(ruleBasedSegment); + ConcurrentMap rbsCollection = Maps.newConcurrentMap(); + rbsCollection.put(RULE_BASED_SEGMENT_NAME, expected); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildRuleBasedSegmentKey(RULE_BASED_SEGMENT_NAME))).thenReturn(getRuleBasedSegmentAsJson(ruleBasedSegment)); + Mockito.when(_userStorageWrapper.getKeysByPrefix("SPLITIO.rbsegment*")).thenReturn(new HashSet<>(Arrays.asList(RULE_BASED_SEGMENT_NAME))); + ParsedRuleBasedSegment result = _userCustomRuleBasedSegmentAdapterConsumer.get(RULE_BASED_SEGMENT_NAME); + Assert.assertEquals(expected, result); + assertTrue(_userCustomRuleBasedSegmentAdapterConsumer.contains(new HashSet<>(Arrays.asList(RULE_BASED_SEGMENT_NAME)))); + assertFalse(_userCustomRuleBasedSegmentAdapterConsumer.contains(new HashSet<>(Arrays.asList(RULE_BASED_SEGMENT_NAME, "123")))); + + } + + @Test + public void testGetRuleBasedSegmentNotFound() { + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildRuleBasedSegmentKey(RULE_BASED_SEGMENT_NAME))).thenReturn(null); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildRuleBasedSegmentKey(RULE_BASED_SEGMENT_NAME))).thenReturn(null); + ParsedRuleBasedSegment result = _userCustomRuleBasedSegmentAdapterConsumer.get(RULE_BASED_SEGMENT_NAME); + Assert.assertNull(result); + } + + @Test + public void testGetAll() { + RuleBasedSegment ruleBasedSegment = getRuleBasedSegment(RULE_BASED_SEGMENT_NAME); + RuleBasedSegment ruleBasedSegment2 = getRuleBasedSegment(RULE_BASED_SEGMENT_NAME+"2"); + List listResultExpected = Stream.of(ruleBasedSegment, ruleBasedSegment2).collect(Collectors.toList()); + Set keysResult = Stream.of(RULE_BASED_SEGMENT_NAME, RULE_BASED_SEGMENT_NAME+"2").collect(Collectors.toSet()); + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyObject())). + thenReturn(keysResult); + List getManyExpected = Stream.of(Json.toJson(ruleBasedSegment), Json.toJson(ruleBasedSegment2)).collect(Collectors.toList()); + Mockito.when(_userStorageWrapper.getMany(Mockito.anyObject())). + thenReturn(getManyExpected); + List ruleBasedSegmentsResult = (List) _userCustomRuleBasedSegmentAdapterConsumer.getAll(); + Assert.assertNotNull(ruleBasedSegmentsResult); + Assert.assertEquals(listResultExpected.size(), ruleBasedSegmentsResult.size()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).getMany(Mockito.anyObject()); + } + + @Test + public void testGetAllWithWrapperFailing() { + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildGetAllSplit())). + thenReturn(null); + List ruleBasedSegmentsResult = (List) _userCustomRuleBasedSegmentAdapterConsumer.getAll(); + Assert.assertNotNull(ruleBasedSegmentsResult); + Assert.assertEquals(0, ruleBasedSegmentsResult.size()); + } + + @Test + public void testGetAllNullOnWrappers() { + Mockito.when(_userStorageWrapper.getKeysByPrefix(PrefixAdapter.buildGetAllRuleBasedSegment())). + thenReturn(null); + List ruleBasedSegmentsResult = (List) _userCustomRuleBasedSegmentAdapterConsumer.getAll(); + Assert.assertEquals(0, ruleBasedSegmentsResult.size()); + } + + @Test + public void testGetAllNullOnGetMany() { + Set keysResult = Stream.of(RULE_BASED_SEGMENT_NAME, RULE_BASED_SEGMENT_NAME+"2").collect(Collectors.toSet()); + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyObject())). + thenReturn(keysResult); + Mockito.when(_userStorageWrapper.getMany(Mockito.anyObject())). + thenReturn(null); + List ruleBasedSegmentsResult = (List) _userCustomRuleBasedSegmentAdapterConsumer.getAll(); + Assert.assertEquals(0, ruleBasedSegmentsResult.size()); + } + + @Test + public void testGetSegments() { + Condition condition = ConditionsTestUtil.makeUserDefinedSegmentCondition(ConditionType.WHITELIST, "employee", + null, false); + RuleBasedSegment ruleBasedSegment = makeRuleBasedSegment("rbs", Arrays.asList(condition), 1); + List getManyExpected = Stream.of(Json.toJson(ruleBasedSegment)).collect(Collectors.toList()); + Mockito.when(_userStorageWrapper.getMany(Mockito.anyObject())). + thenReturn(getManyExpected); + HashSet segmentResult = (HashSet) _userCustomRuleBasedSegmentAdapterConsumer.getSegments(); + assertTrue(segmentResult.contains("employee")); + } + + @Test + public void testGetruleBasedSegmentNames() { + RuleBasedSegment ruleBasedSegment = getRuleBasedSegment(RULE_BASED_SEGMENT_NAME); + RuleBasedSegment ruleBasedSegment2 = getRuleBasedSegment(RULE_BASED_SEGMENT_NAME+"2"); + Set keysResult = Stream.of(RULE_BASED_SEGMENT_NAME, RULE_BASED_SEGMENT_NAME+"2").collect(Collectors.toSet()); + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyObject())). + thenReturn(keysResult); + List getManyExpected = Stream.of(Json.toJson(ruleBasedSegment), Json.toJson(ruleBasedSegment2)).collect(Collectors.toList()); + Mockito.when(_userStorageWrapper.getMany(Mockito.anyObject())). + thenReturn(getManyExpected); + List ruleBasedSegmentsResult = _userCustomRuleBasedSegmentAdapterConsumer.ruleBasedSegmentNames(); + Assert.assertNotNull(ruleBasedSegmentsResult); + Assert.assertEquals(keysResult.size(), ruleBasedSegmentsResult.size()); + Assert.assertEquals(keysResult, new HashSet<>(ruleBasedSegmentsResult)); + } + + public static String getLongAsJson(long value) { + return Json.toJson(value); + } + + public static String getRuleBasedSegmentAsJson(RuleBasedSegment ruleBasedSegment) { + return Json.toJson(ruleBasedSegment); + } + + private RuleBasedSegment getRuleBasedSegment(String name) { + ArrayList set = Lists.newArrayList("sms", "voice"); + Condition c = ConditionsTestUtil.containsString("user", + "products", + set, + false, + null + ); + + List conditions = Lists.newArrayList(c); + return makeRuleBasedSegment(name, conditions, 1); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterConsumerTest.java b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterConsumerTest.java index 2b2e1fba4..a6ae64510 100644 --- a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterConsumerTest.java +++ b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterConsumerTest.java @@ -2,7 +2,7 @@ import io.split.client.utils.Json; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -19,58 +19,58 @@ public class UserCustomSegmentAdapterConsumerTest { private static final String SEGMENT_NAME = "SegmentName"; private CustomStorageWrapper _customStorageWrapper; - private SafeUserStorageWrapper _safeUserStorageWrapper; + private UserStorageWrapper _userStorageWrapper; private UserCustomSegmentAdapterConsumer _userCustomSegmentAdapterConsumer; @Before public void setUp() throws NoSuchFieldException, IllegalAccessException { _customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); - _safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); + _userStorageWrapper = Mockito.mock(UserStorageWrapper.class); _userCustomSegmentAdapterConsumer = new UserCustomSegmentAdapterConsumer(_customStorageWrapper); - Field userCustomSegmentAdapterConsumer = UserCustomSegmentAdapterConsumer.class.getDeclaredField("_safeUserStorageWrapper"); + Field userCustomSegmentAdapterConsumer = UserCustomSegmentAdapterConsumer.class.getDeclaredField("_userStorageWrapper"); userCustomSegmentAdapterConsumer.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(userCustomSegmentAdapterConsumer, userCustomSegmentAdapterConsumer.getModifiers() & ~Modifier.FINAL); - userCustomSegmentAdapterConsumer.set(_userCustomSegmentAdapterConsumer, _safeUserStorageWrapper); + userCustomSegmentAdapterConsumer.set(_userCustomSegmentAdapterConsumer, _userStorageWrapper); } @Test public void testGetChangeNumber() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSegment(SEGMENT_NAME))).thenReturn(Json.toJson(120L)); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSegment(SEGMENT_NAME))).thenReturn(Json.toJson(120L)); Assert.assertEquals(120L, _userCustomSegmentAdapterConsumer.getChangeNumber(SEGMENT_NAME)); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testIsInSegment() { - Mockito.when(_safeUserStorageWrapper.itemContains(Mockito.anyString(), Mockito.anyString())).thenReturn(true); + Mockito.when(_userStorageWrapper.itemContains(Mockito.anyString(), Mockito.anyString())).thenReturn(true); Assert.assertTrue(_userCustomSegmentAdapterConsumer.isInSegment(SEGMENT_NAME, "item")); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).itemContains(Mockito.anyString(), Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).itemContains(Mockito.anyString(), Mockito.anyString()); } @Test public void testGetSegmentCount() { - Mockito.when(_safeUserStorageWrapper.getKeysByPrefix(Mockito.anyString())).thenReturn(Collections.singleton(SEGMENT_NAME)); + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyString())).thenReturn(Collections.singleton(SEGMENT_NAME)); Assert.assertEquals(1, _userCustomSegmentAdapterConsumer.getSegmentCount()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); } @Test public void testGetKeyCount() { - Mockito.when(_safeUserStorageWrapper.getKeysByPrefix(Mockito.anyString())).thenReturn(Stream.of(SEGMENT_NAME, SEGMENT_NAME+"2").collect(Collectors.toSet())); - Mockito.when(_safeUserStorageWrapper.getItemsCount(Mockito.anyString())).thenReturn(1L).thenReturn(3L); + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyString())).thenReturn(Stream.of(SEGMENT_NAME, SEGMENT_NAME+"2").collect(Collectors.toSet())); + Mockito.when(_userStorageWrapper.getItemsCount(Mockito.anyString())).thenReturn(1L).thenReturn(3L); Assert.assertEquals(4, _userCustomSegmentAdapterConsumer.getKeyCount()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(2)).getItemsCount(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(2)).getItemsCount(Mockito.anyString()); } @Test public void testGetKeyCountNullResponse() { - Mockito.when(_safeUserStorageWrapper.getKeysByPrefix(Mockito.anyString())).thenReturn(null); - Mockito.when(_safeUserStorageWrapper.getItemsCount(Mockito.anyString())).thenReturn(1L).thenReturn(3L); + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyString())).thenReturn(null); + Mockito.when(_userStorageWrapper.getItemsCount(Mockito.anyString())).thenReturn(1L).thenReturn(3L); Assert.assertEquals(0, _userCustomSegmentAdapterConsumer.getKeyCount()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(0)).getItemsCount(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(0)).getItemsCount(Mockito.anyString()); } } \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterProducerTest.java b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterProducerTest.java index 41d05546b..3adcf9b39 100644 --- a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterProducerTest.java +++ b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSegmentAdapterProducerTest.java @@ -2,7 +2,7 @@ import io.split.client.utils.Json; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -17,40 +17,40 @@ public class UserCustomSegmentAdapterProducerTest { private static final String SEGMENT_NAME = "SegmentName"; private CustomStorageWrapper _customStorageWrapper; - private SafeUserStorageWrapper _safeUserStorageWrapper; + private UserStorageWrapper _userStorageWrapper; private UserCustomSegmentAdapterProducer _userCustomSegmentAdapterProducer; @Before public void setUp() throws NoSuchFieldException, IllegalAccessException { _customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); - _safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); + _userStorageWrapper = Mockito.mock(UserStorageWrapper.class); _userCustomSegmentAdapterProducer = new UserCustomSegmentAdapterProducer(_customStorageWrapper); - Field userCustomSegmentAdapterProducer = UserCustomSegmentAdapterProducer.class.getDeclaredField("_safeUserStorageWrapper"); + Field userCustomSegmentAdapterProducer = UserCustomSegmentAdapterProducer.class.getDeclaredField("_userStorageWrapper"); userCustomSegmentAdapterProducer.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(userCustomSegmentAdapterProducer, userCustomSegmentAdapterProducer.getModifiers() & ~Modifier.FINAL); - userCustomSegmentAdapterProducer.set(_userCustomSegmentAdapterProducer, _safeUserStorageWrapper); + userCustomSegmentAdapterProducer.set(_userCustomSegmentAdapterProducer, _userStorageWrapper); } @Test public void testGetChangeNumber() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSegment(SEGMENT_NAME))).thenReturn(Json.toJson(120L)); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSegment(SEGMENT_NAME))).thenReturn(Json.toJson(120L)); Assert.assertEquals(120L, _userCustomSegmentAdapterProducer.getChangeNumber(SEGMENT_NAME)); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testUpdateSegment() { _userCustomSegmentAdapterProducer.updateSegment(SEGMENT_NAME, new ArrayList<>(), new ArrayList<>(), 12L); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).addItems(Mockito.anyString(), Mockito.anyObject()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).removeItems(Mockito.anyString(), Mockito.anyObject()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).set(Mockito.anyString(), Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).addItems(Mockito.anyString(), Mockito.anyObject()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).removeItems(Mockito.anyString(), Mockito.anyObject()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).set(Mockito.anyString(), Mockito.anyString()); } @Test public void testSetChangeNumber() { _userCustomSegmentAdapterProducer.setChangeNumber(SEGMENT_NAME, 1L); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).set(Mockito.anyString(), Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).set(Mockito.anyString(), Mockito.anyString()); } } \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterConsumerTest.java b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterConsumerTest.java index a8b09fb70..befe96452 100644 --- a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterConsumerTest.java +++ b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterConsumerTest.java @@ -1,14 +1,18 @@ package io.split.storages.pluggable.adapters; import com.google.common.collect.Lists; -import io.split.client.dtos.*; +import io.split.client.dtos.Condition; +import io.split.client.dtos.ConditionType; +import io.split.client.dtos.Split; +import io.split.client.dtos.Status; import io.split.client.utils.Json; import io.split.engine.ConditionsTestUtil; import io.split.engine.experiments.ParsedSplit; import io.split.engine.experiments.SplitParser; import io.split.grammar.Treatments; +import io.split.storages.pluggable.CustomStorageWrapperHasPipeline; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -17,6 +21,9 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -27,81 +34,123 @@ public class UserCustomSplitAdapterConsumerTest { private static final String SPLIT_NAME = "SplitName"; private CustomStorageWrapper _customStorageWrapper; - private SafeUserStorageWrapper _safeUserStorageWrapper; + private UserStorageWrapper _userStorageWrapper; private UserCustomSplitAdapterConsumer _userCustomSplitAdapterConsumer; @Before public void setUp() throws NoSuchFieldException, IllegalAccessException { _customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); - _safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); + _userStorageWrapper = Mockito.mock(UserStorageWrapper.class); _userCustomSplitAdapterConsumer = new UserCustomSplitAdapterConsumer(_customStorageWrapper); - Field userCustomSplitAdapterConsumer = UserCustomSplitAdapterConsumer.class.getDeclaredField("_safeUserStorageWrapper"); + Field userCustomSplitAdapterConsumer = UserCustomSplitAdapterConsumer.class.getDeclaredField("_userStorageWrapper"); userCustomSplitAdapterConsumer.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(userCustomSplitAdapterConsumer, userCustomSplitAdapterConsumer.getModifiers() & ~Modifier.FINAL); - userCustomSplitAdapterConsumer.set(_userCustomSplitAdapterConsumer, _safeUserStorageWrapper); + userCustomSplitAdapterConsumer.set(_userCustomSplitAdapterConsumer, _userStorageWrapper); } @Test public void testGetChangeNumber() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn(getLongAsJson(120L)); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn(getLongAsJson(120L)); Assert.assertEquals(120L, _userCustomSplitAdapterConsumer.getChangeNumber()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testGetChangeNumberWithWrapperFailing() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn(null); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn(null); Assert.assertEquals(-1L, _userCustomSplitAdapterConsumer.getChangeNumber()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testGetChangeNumberWithGsonFailing() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn("a"); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn("a"); Assert.assertEquals(-1L, _userCustomSplitAdapterConsumer.getChangeNumber()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testGetSplit() { SplitParser splitParser = new SplitParser(); Split split = getSplit(SPLIT_NAME); - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))).thenReturn(getSplitAsJson(split)); - ParsedSplit result = _userCustomSplitAdapterConsumer.get(SPLIT_NAME); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))).thenReturn(getSplitAsJson(split)); + ParsedSplit actual = _userCustomSplitAdapterConsumer.get(SPLIT_NAME); ParsedSplit expected = splitParser.parse(split); - Assert.assertEquals(expected, result); + Assert.assertEquals(actual.getRuleBasedSegmentsNames(), expected.getRuleBasedSegmentsNames()); + Assert.assertEquals(actual.seed(), expected.seed()); + Assert.assertEquals(actual.algo(), expected.algo()); + Assert.assertEquals(actual.trafficAllocationSeed(), expected.trafficAllocationSeed()); + Assert.assertEquals(actual.flagSets(), expected.flagSets()); + Assert.assertEquals(actual.parsedConditions(), expected.parsedConditions()); + Assert.assertEquals(actual.trafficAllocation(), expected.trafficAllocation()); + Assert.assertEquals(actual.getSegmentsNames(), expected.getSegmentsNames()); + Assert.assertEquals(actual.impressionsDisabled(), expected.impressionsDisabled()); + Assert.assertEquals(actual.killed(), expected.killed()); + Assert.assertEquals(actual.defaultTreatment(), expected.defaultTreatment()); + Assert.assertEquals(actual.changeNumber(), expected.changeNumber()); + Assert.assertEquals(actual.feature(), expected.feature()); + Assert.assertEquals(actual.configurations(), expected.configurations()); + Assert.assertEquals(actual.prerequisitesMatcher().toString(), expected.prerequisitesMatcher().toString()); } @Test public void testGetSplitNotFound() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))).thenReturn(null); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))).thenReturn(null); ParsedSplit result = _userCustomSplitAdapterConsumer.get(SPLIT_NAME); Assert.assertNull(result); } @Test - public void testGetAll(){ + public void testGetAll() { Split split = getSplit(SPLIT_NAME); Split split2 = getSplit(SPLIT_NAME+"2"); List listResultExpected = Stream.of(split, split2).collect(Collectors.toList()); Set keysResult = Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toSet()); - Mockito.when(_safeUserStorageWrapper.getKeysByPrefix(Mockito.anyObject())). + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyObject())). thenReturn(keysResult); List getManyExpected = Stream.of(Json.toJson(split), Json.toJson(split2)).collect(Collectors.toList()); - Mockito.when(_safeUserStorageWrapper.getMany(Mockito.anyObject())). + Mockito.when(_userStorageWrapper.getMany(Mockito.anyObject())). thenReturn(getManyExpected); List splitsResult = (List) _userCustomSplitAdapterConsumer.getAll(); Assert.assertNotNull(splitsResult); Assert.assertEquals(listResultExpected.size(), splitsResult.size()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).getMany(Mockito.anyObject()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).getMany(Mockito.anyObject()); } @Test - public void testGetAllWithWrapperFailing(){ - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildGetAllSplit())). + public void testGetSplitNames() { + Split split = getSplit(SPLIT_NAME); + Split split2 = getSplit(SPLIT_NAME+"2"); + List listResultExpected = Stream.of(split, split2).collect(Collectors.toList()); + Set keysResult = Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toSet()); + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyObject())). + thenReturn(keysResult); + List splitsResult = _userCustomSplitAdapterConsumer.splitNames(); + Assert.assertNotNull(splitsResult); + Assert.assertEquals(listResultExpected.size(), splitsResult.size()); + Assert.assertEquals(SPLIT_NAME, splitsResult.get(1)); + Assert.assertEquals(SPLIT_NAME+"2", splitsResult.get(0)); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).getKeysByPrefix(Mockito.anyString()); + + // default prefix + listResultExpected = Stream.of(split, split2).collect(Collectors.toList()); + keysResult = Stream.of("SPLITIO.split." + SPLIT_NAME, "SPLITIO.split." + SPLIT_NAME+"2").collect(Collectors.toSet()); + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyObject())). + thenReturn(keysResult); + + splitsResult = _userCustomSplitAdapterConsumer.splitNames(); + Assert.assertNotNull(splitsResult); + Assert.assertEquals(listResultExpected.size(), splitsResult.size()); + Assert.assertEquals(SPLIT_NAME, splitsResult.get(1)); + Assert.assertEquals(SPLIT_NAME+"2", splitsResult.get(0)); + } + + @Test + public void testGetAllWithWrapperFailing() { + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildGetAllSplit())). thenReturn(null); List splitsResult = (List) _userCustomSplitAdapterConsumer.getAll(); Assert.assertNotNull(splitsResult); @@ -110,7 +159,7 @@ public void testGetAllWithWrapperFailing(){ @Test public void testGetAllNullOnWrappers() { - Mockito.when(_safeUserStorageWrapper.getKeysByPrefix(PrefixAdapter.buildGetAllSplit())). + Mockito.when(_userStorageWrapper.getKeysByPrefix(PrefixAdapter.buildGetAllSplit())). thenReturn(null); List splitsResult = (List) _userCustomSplitAdapterConsumer.getAll(); Assert.assertEquals(0, splitsResult.size()); @@ -119,9 +168,9 @@ public void testGetAllNullOnWrappers() { @Test public void testGetAllNullOnGetMany() { Set keysResult = Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toSet()); - Mockito.when(_safeUserStorageWrapper.getKeysByPrefix(Mockito.anyObject())). + Mockito.when(_userStorageWrapper.getKeysByPrefix(Mockito.anyObject())). thenReturn(keysResult); - Mockito.when(_safeUserStorageWrapper.getMany(Mockito.anyObject())). + Mockito.when(_userStorageWrapper.getMany(Mockito.anyObject())). thenReturn(null); List splitsResult = (List) _userCustomSplitAdapterConsumer.getAll(); Assert.assertEquals(0, splitsResult.size()); @@ -130,7 +179,7 @@ public void testGetAllNullOnGetMany() { @Test public void testTrafficTypeExists() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildTrafficTypeExists("TrafficType"))). + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildTrafficTypeExists("TrafficType"))). thenReturn(getLongAsJson(2)); boolean result = _userCustomSplitAdapterConsumer.trafficTypeExists("TrafficType"); Assert.assertTrue(result); @@ -138,7 +187,7 @@ public void testTrafficTypeExists() { @Test public void testTrafficTypeExistsWithWrapperFailing() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildTrafficTypeExists("TrafficType"))). + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildTrafficTypeExists("TrafficType"))). thenReturn(null); boolean result = _userCustomSplitAdapterConsumer.trafficTypeExists("TrafficType"); Assert.assertFalse(result); @@ -146,18 +195,18 @@ public void testTrafficTypeExistsWithWrapperFailing() { @Test public void testTrafficTypeExistsWithGsonFailing() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildTrafficTypeExists("TrafficType"))). + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildTrafficTypeExists("TrafficType"))). thenReturn("true"); boolean result = _userCustomSplitAdapterConsumer.trafficTypeExists("TrafficType"); Assert.assertFalse(result); } @Test - public void testFetchMany(){ + public void testFetchMany() { Split split = getSplit(SPLIT_NAME); Split split2 = getSplit(SPLIT_NAME+"2"); List listResultExpected = Stream.of(Json.toJson(split), Json.toJson(split2)).collect(Collectors.toList()); - Mockito.when(_safeUserStorageWrapper.getItems(PrefixAdapter.buildFetchManySplits(Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toList())))). + Mockito.when(_userStorageWrapper.getItems(PrefixAdapter.buildFetchManySplits(Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toList())))). thenReturn(listResultExpected); Map splitsResult = _userCustomSplitAdapterConsumer.fetchMany(Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toList())); Assert.assertNotNull(splitsResult); @@ -165,8 +214,8 @@ public void testFetchMany(){ } @Test - public void testFetchManyWithWrapperFailing(){ - Mockito.when(_safeUserStorageWrapper.getItems(PrefixAdapter.buildFetchManySplits(Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toList())))). + public void testFetchManyWithWrapperFailing() { + Mockito.when(_userStorageWrapper.getItems(PrefixAdapter.buildFetchManySplits(Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toList())))). thenReturn(null); Map splitsResult = _userCustomSplitAdapterConsumer.fetchMany(Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toList())); Assert.assertNotNull(splitsResult); @@ -175,8 +224,8 @@ public void testFetchManyWithWrapperFailing(){ } @Test - public void testFetchManyNotFound(){ - Mockito.when(_safeUserStorageWrapper.getItems(PrefixAdapter.buildFetchManySplits(Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toList())))). + public void testFetchManyNotFound() { + Mockito.when(_userStorageWrapper.getItems(PrefixAdapter.buildFetchManySplits(Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toList())))). thenReturn(null); Map splitsResult = _userCustomSplitAdapterConsumer.fetchMany(Stream.of(SPLIT_NAME, SPLIT_NAME+"2").collect(Collectors.toList())); Assert.assertNotNull(splitsResult); @@ -184,6 +233,14 @@ public void testFetchManyNotFound(){ Assert.assertNull(splitsResult.get(SPLIT_NAME+"2")); } + @Test + public void testGetNamesByFlagSets() { + CustomStorageWrapper customStorageWrapper = new CustomStorageWrapperHasPipeline(); + UserCustomSplitAdapterConsumer userCustomSplitAdapterConsumer = new UserCustomSplitAdapterConsumer(customStorageWrapper); + Map> flagSets = userCustomSplitAdapterConsumer.getNamesByFlagSets(new ArrayList<>(Arrays.asList("set1"))); + Assert.assertEquals(2, flagSets.get("set1").size()); + } + @Test public void testGetSegments() { //NoOp diff --git a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterProducerTest.java b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterProducerTest.java index baa0f17d6..ba80577d3 100644 --- a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterProducerTest.java +++ b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomSplitAdapterProducerTest.java @@ -10,7 +10,7 @@ import io.split.engine.experiments.SplitParser; import io.split.grammar.Treatments; import io.split.storages.pluggable.domain.PrefixAdapter; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -26,99 +26,99 @@ public class UserCustomSplitAdapterProducerTest{ private static final String SPLIT_NAME = "SplitName"; private CustomStorageWrapper _customStorageWrapper; - private SafeUserStorageWrapper _safeUserStorageWrapper; + private UserStorageWrapper _userStorageWrapper; private UserCustomSplitAdapterProducer _userCustomSplitAdapterProducer; @Before public void setUp() throws NoSuchFieldException, IllegalAccessException { _customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); - _safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); + _userStorageWrapper = Mockito.mock(UserStorageWrapper.class); _userCustomSplitAdapterProducer = new UserCustomSplitAdapterProducer(_customStorageWrapper); - Field userCustomSplitAdapterProducer = UserCustomSplitAdapterProducer.class.getDeclaredField("_safeUserStorageWrapper"); + Field userCustomSplitAdapterProducer = UserCustomSplitAdapterProducer.class.getDeclaredField("_userStorageWrapper"); userCustomSplitAdapterProducer.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(userCustomSplitAdapterProducer, userCustomSplitAdapterProducer.getModifiers() & ~Modifier.FINAL); - userCustomSplitAdapterProducer.set(_userCustomSplitAdapterProducer, _safeUserStorageWrapper); + userCustomSplitAdapterProducer.set(_userCustomSplitAdapterProducer, _userStorageWrapper); } @Test public void testGetChangeNumber() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn(UserCustomSplitAdapterConsumerTest.getLongAsJson(120L)); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn(UserCustomSplitAdapterConsumerTest.getLongAsJson(120L)); Assert.assertEquals(120L, _userCustomSplitAdapterProducer.getChangeNumber()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testGetChangeNumberWithWrapperFailing() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn(null); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn(null); Assert.assertEquals(-1L, _userCustomSplitAdapterProducer.getChangeNumber()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testGetChangeNumberWithGsonFailing() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn("a"); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitChangeNumber())).thenReturn("a"); Assert.assertEquals(-1L, _userCustomSplitAdapterProducer.getChangeNumber()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testRemove() { Split split = getSplit(SPLIT_NAME); - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))) + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))) .thenReturn(UserCustomSplitAdapterConsumerTest.getSplitAsJson(split)); - Mockito.when(_safeUserStorageWrapper.decrement(Mockito.anyString(), Mockito.anyLong())) + Mockito.when(_userStorageWrapper.decrement(Mockito.anyString(), Mockito.anyLong())) .thenReturn(0L); _userCustomSplitAdapterProducer.remove(SPLIT_NAME); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(2)).delete(Mockito.anyObject()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).decrement(Mockito.anyObject(), Mockito.anyLong()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(2)).delete(Mockito.anyObject()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).decrement(Mockito.anyObject(), Mockito.anyLong()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testRemoveWithNoDelete() { Split split = getSplit(SPLIT_NAME); - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))) + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))) .thenReturn(UserCustomSplitAdapterConsumerTest.getSplitAsJson(split)); - Mockito.when(_safeUserStorageWrapper.decrement(Mockito.anyString(), Mockito.anyLong())) + Mockito.when(_userStorageWrapper.decrement(Mockito.anyString(), Mockito.anyLong())) .thenReturn(1L); _userCustomSplitAdapterProducer.remove(SPLIT_NAME); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).delete(Mockito.anyObject()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).decrement(Mockito.anyObject(), Mockito.anyLong()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).delete(Mockito.anyObject()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).decrement(Mockito.anyObject(), Mockito.anyLong()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testRemoveWithWrapperFailing() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))) + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))) .thenReturn(null); Assert.assertFalse(_userCustomSplitAdapterProducer.remove(SPLIT_NAME)); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(0)).delete(Mockito.anyObject()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(0)).decrement(Mockito.anyObject(), Mockito.anyLong()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(0)).delete(Mockito.anyObject()); + Mockito.verify(_userStorageWrapper, Mockito.times(0)).decrement(Mockito.anyObject(), Mockito.anyLong()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); } @Test public void testSetChangeNumber() { _userCustomSplitAdapterProducer.setChangeNumber(1L); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).set(Mockito.anyString(), Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).set(Mockito.anyString(), Mockito.anyString()); } @Test public void testKill() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))).thenReturn(UserCustomSplitAdapterConsumerTest.getSplitAsJson(getSplit(SPLIT_NAME))); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))).thenReturn(UserCustomSplitAdapterConsumerTest.getSplitAsJson(getSplit(SPLIT_NAME))); _userCustomSplitAdapterProducer.kill(SPLIT_NAME, "DefaultTreatment", 2L); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).set(Mockito.anyString(), Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).set(Mockito.anyString(), Mockito.anyString()); } @Test public void testKillSplitNotFound() { - Mockito.when(_safeUserStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))).thenReturn(null); + Mockito.when(_userStorageWrapper.get(PrefixAdapter.buildSplitKey(SPLIT_NAME))).thenReturn(null); _userCustomSplitAdapterProducer.kill(SPLIT_NAME, "DefaultTreatment", 2L); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(0)).set(Mockito.anyString(), Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).get(Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(0)).set(Mockito.anyString(), Mockito.anyString()); } @Test @@ -127,22 +127,22 @@ public void testPutMany() { ParsedSplit parsedSplit = splitParser.parse(getSplit(SPLIT_NAME)); ParsedSplit parsedSplit2 = splitParser.parse(getSplit(SPLIT_NAME+"2")); _userCustomSplitAdapterProducer.putMany(Stream.of(parsedSplit, parsedSplit2).collect(Collectors.toList())); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(2)).set(Mockito.anyString(), Mockito.anyString()); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(2)).increment(Mockito.anyString(), Mockito.anyLong()); + Mockito.verify(_userStorageWrapper, Mockito.times(2)).set(Mockito.anyString(), Mockito.anyString()); + Mockito.verify(_userStorageWrapper, Mockito.times(2)).increment(Mockito.anyString(), Mockito.anyLong()); } @Test public void testIncreaseTrafficType() { _userCustomSplitAdapterProducer.increaseTrafficType("TrafficType"); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).increment(PrefixAdapter.buildTrafficTypeExists("TrafficType"), 1); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).increment(PrefixAdapter.buildTrafficTypeExists("TrafficType"), 1); } @Test public void testDecreaseTrafficType() { _userCustomSplitAdapterProducer.decreaseTrafficType("TrafficType"); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).decrement(PrefixAdapter.buildTrafficTypeExists("TrafficType"), 1); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).delete(Mockito.anyObject()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).decrement(PrefixAdapter.buildTrafficTypeExists("TrafficType"), 1); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).delete(Mockito.anyObject()); } diff --git a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomTelemetryAdapterProducerTest.java b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomTelemetryAdapterProducerTest.java index 0c778fb32..b6aeca5ea 100644 --- a/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomTelemetryAdapterProducerTest.java +++ b/client/src/test/java/io/split/storages/pluggable/adapters/UserCustomTelemetryAdapterProducerTest.java @@ -1,7 +1,7 @@ package io.split.storages.pluggable.adapters; import io.split.client.utils.SDKMetadata; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import io.split.telemetry.domain.enums.MethodEnum; import org.junit.Before; import org.junit.Test; @@ -14,32 +14,38 @@ public class UserCustomTelemetryAdapterProducerTest { private CustomStorageWrapper _customStorageWrapper; - private SafeUserStorageWrapper _safeUserStorageWrapper; + private UserStorageWrapper _userStorageWrapper; private UserCustomTelemetryAdapterProducer _userCustomTelemetryAdapterProducer; @Before public void setUp() throws NoSuchFieldException, IllegalAccessException { _customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); - _safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); + _userStorageWrapper = Mockito.mock(UserStorageWrapper.class); _userCustomTelemetryAdapterProducer = new UserCustomTelemetryAdapterProducer(_customStorageWrapper, Mockito.mock(SDKMetadata.class)); - Field userCustomTelemetryAdapterProducer = UserCustomTelemetryAdapterProducer.class.getDeclaredField("_safeUserStorageWrapper"); + Field userCustomTelemetryAdapterProducer = UserCustomTelemetryAdapterProducer.class.getDeclaredField("_userStorageWrapper"); userCustomTelemetryAdapterProducer.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(userCustomTelemetryAdapterProducer, userCustomTelemetryAdapterProducer.getModifiers() & ~Modifier.FINAL); - userCustomTelemetryAdapterProducer.set(_userCustomTelemetryAdapterProducer, _safeUserStorageWrapper); + userCustomTelemetryAdapterProducer.set(_userCustomTelemetryAdapterProducer, _userStorageWrapper); } @Test public void testRecordLatency() { _userCustomTelemetryAdapterProducer.recordLatency(MethodEnum.TRACK, 10l); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).increment(Mockito.anyString(), Mockito.anyLong()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).hIncrement(Mockito.anyString(), Mockito.anyString(), Mockito.anyLong()); + } + + @Test + public void testRecordExceptionTreatmentByFlagSet() { + _userCustomTelemetryAdapterProducer.recordException(MethodEnum.TREATMENTS_BY_FLAG_SET); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).hIncrement(Mockito.anyString(), Mockito.anyString(), Mockito.anyLong()); } @Test public void testRecordException() { _userCustomTelemetryAdapterProducer.recordException(MethodEnum.TRACK); - Mockito.verify(_safeUserStorageWrapper, Mockito.times(1)).increment(Mockito.anyString(), Mockito.anyLong()); + Mockito.verify(_userStorageWrapper, Mockito.times(1)).hIncrement(Mockito.anyString(), Mockito.anyString(), Mockito.anyLong()); } } \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/domain/PrefixAdapterTest.java b/client/src/test/java/io/split/storages/pluggable/domain/PrefixAdapterTest.java index 5fb02532c..8112e3fe1 100644 --- a/client/src/test/java/io/split/storages/pluggable/domain/PrefixAdapterTest.java +++ b/client/src/test/java/io/split/storages/pluggable/domain/PrefixAdapterTest.java @@ -78,14 +78,14 @@ public void testBuildSegments() { @Test public void testBuildTelemetryLatencies() { - String expectedPrefix = "SPLITIO.telemetry.latencies::sv/mn/mi/getTreatment/2"; - Assert.assertEquals(expectedPrefix, PrefixAdapter.buildTelemetryLatenciesPrefix("getTreatment", 2, "sv", "mi","mn")); + String expectedPrefix = "SPLITIO.telemetry.latencies"; + Assert.assertEquals(expectedPrefix, PrefixAdapter.buildTelemetryLatenciesPrefix()); } @Test public void testBuildTelemetryExceptions() { - String expectedPrefix = "SPLITIO.telemetry.exceptions::sv/mn/mi/getTreatment"; - Assert.assertEquals(expectedPrefix, PrefixAdapter.buildTelemetryExceptionsPrefix("getTreatment", "sv", "mi","mn")); + String expectedPrefix = "SPLITIO.telemetry.exceptions"; + Assert.assertEquals(expectedPrefix, PrefixAdapter.buildTelemetryExceptionsPrefix()); } } \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/domain/UserPipelineWrapperTest.java b/client/src/test/java/io/split/storages/pluggable/domain/UserPipelineWrapperTest.java new file mode 100644 index 000000000..eaa7561b1 --- /dev/null +++ b/client/src/test/java/io/split/storages/pluggable/domain/UserPipelineWrapperTest.java @@ -0,0 +1,54 @@ +package io.split.storages.pluggable.domain; + +import io.split.storages.pluggable.CustomStorageWrapperHasPipeline; +import io.split.storages.pluggable.CustomStorageWrapperImp; +import org.junit.Assert; +import org.junit.Test; +import pluggable.NotPipelinedImpl; +import pluggable.Result; + +import java.util.List; +import java.util.Optional; + +public class UserPipelineWrapperTest { + private static final String KEY = "SPLITIO.impressions.counts"; + private static final String SET_KET = "SPLITIO.flagSet"; + private static final String HASH_COUNT_KEY = "countKey"; + + @Test + public void testHincrementWithPipeline() throws Exception { + CustomStorageWrapperHasPipeline customStorageWrapper = new CustomStorageWrapperHasPipeline(); + UserPipelineWrapper userPipelineWrapper = new UserPipelineWrapper(customStorageWrapper.pipeline()); + userPipelineWrapper.hIncrement(KEY, HASH_COUNT_KEY, 1); + List results = userPipelineWrapper.exec(); + Assert.assertEquals(Optional.of(1L), results.get(0).asLong()); + } + + @Test + public void testHincrementWithoutPipeline() throws Exception { + CustomStorageWrapperImp customStorageWrapper = new CustomStorageWrapperImp(); + NotPipelinedImpl notPipelined = new NotPipelinedImpl(customStorageWrapper); + UserPipelineWrapper userPipelineWrapper = new UserPipelineWrapper(notPipelined); + userPipelineWrapper.hIncrement(KEY, HASH_COUNT_KEY, 1); + List results = userPipelineWrapper.exec(); + Assert.assertEquals(Optional.of(1L), results.get(0).asLong()); + } + + @Test + public void testGetMembersWithPipeline() throws Exception { + CustomStorageWrapperHasPipeline customStorageWrapper = new CustomStorageWrapperHasPipeline(); + UserPipelineWrapper userPipelineWrapper = new UserPipelineWrapper(customStorageWrapper.pipeline()); + userPipelineWrapper.getMembers(SET_KET + ".set1"); + List results = userPipelineWrapper.exec(); + Assert.assertEquals(2, results.get(0).asHash().get().size()); + } + + @Test + public void testGetMembersWithoutPipeline() throws Exception { + CustomStorageWrapperImp customStorageWrapper = new CustomStorageWrapperImp(); + NotPipelinedImpl notPipelined = new NotPipelinedImpl(customStorageWrapper); + notPipelined.getMembers(SET_KET + ".set1"); + List results = notPipelined.exec(); + Assert.assertEquals(2, results.get(0).asHash().get().size()); + } +} \ No newline at end of file diff --git a/client/src/test/java/io/split/storages/pluggable/domain/SafeUserStorageWrapperTest.java b/client/src/test/java/io/split/storages/pluggable/domain/UserStorageWrapperTest.java similarity index 71% rename from client/src/test/java/io/split/storages/pluggable/domain/SafeUserStorageWrapperTest.java rename to client/src/test/java/io/split/storages/pluggable/domain/UserStorageWrapperTest.java index b06539073..709d9f03b 100644 --- a/client/src/test/java/io/split/storages/pluggable/domain/SafeUserStorageWrapperTest.java +++ b/client/src/test/java/io/split/storages/pluggable/domain/UserStorageWrapperTest.java @@ -13,32 +13,33 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class SafeUserStorageWrapperTest{ +public class UserStorageWrapperTest { private static final String KEY = "KEY"; private static final String RESPONSE = "Response"; + private static final String HASH_COUNT_KEY = "countKey"; private static final String ITEM = "Item"; private CustomStorageWrapper _customStorageWrapper; - private SafeUserStorageWrapper _safeUserStorageWrapper; + private UserStorageWrapper _userStorageWrapper; private Logger _log; @Before public void setUp() throws NoSuchFieldException, IllegalAccessException { _customStorageWrapper = Mockito.mock(CustomStorageWrapper.class); _log = Mockito.mock(Logger.class); - _safeUserStorageWrapper = new SafeUserStorageWrapper(_customStorageWrapper); - Field safeUserStorageWrapper = SafeUserStorageWrapper.class.getDeclaredField("_log"); - safeUserStorageWrapper.setAccessible(true); + _userStorageWrapper = new UserStorageWrapper(_customStorageWrapper); + Field userStorageWrapper = UserStorageWrapper.class.getDeclaredField("_log"); + userStorageWrapper.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); - modifiersField.setInt(safeUserStorageWrapper, safeUserStorageWrapper.getModifiers() & ~Modifier.FINAL); - safeUserStorageWrapper.set(_safeUserStorageWrapper, _log); + modifiersField.setInt(userStorageWrapper, userStorageWrapper.getModifiers() & ~Modifier.FINAL); + userStorageWrapper.set(_userStorageWrapper, _log); } @Test public void testGet() throws Exception { Mockito.when(_customStorageWrapper.get(Mockito.anyString())).thenReturn(RESPONSE); - String result = _safeUserStorageWrapper.get(KEY); + String result = _userStorageWrapper.get(KEY); Assert.assertNotNull(result); Assert.assertEquals(RESPONSE, result); } @@ -46,14 +47,14 @@ public void testGet() throws Exception { @Test public void testGetException() throws Exception { Mockito.when(_customStorageWrapper.get(Mockito.anyString())).thenThrow(Exception.class); - String result = _safeUserStorageWrapper.get(KEY); + String result = _userStorageWrapper.get(KEY); Assert.assertNull(result); } @Test public void testGetMany() throws Exception { Mockito.when(_customStorageWrapper.getMany(Mockito.anyObject())).thenReturn(Stream.of(RESPONSE).collect(Collectors.toList())); - List result = _safeUserStorageWrapper.getMany(Stream.of(KEY).collect(Collectors.toList())); + List result = _userStorageWrapper.getMany(Stream.of(KEY).collect(Collectors.toList())); Assert.assertNotNull(result); Assert.assertEquals(1, result.size()); Assert.assertEquals(RESPONSE, result.get(0)); @@ -62,41 +63,41 @@ public void testGetMany() throws Exception { @Test public void testGetManyException() throws Exception { Mockito.when(_customStorageWrapper.getMany(Mockito.anyObject())).thenThrow(Exception.class); - List result = _safeUserStorageWrapper.getMany(Stream.of(KEY).collect(Collectors.toList())); + List result = _userStorageWrapper.getMany(Stream.of(KEY).collect(Collectors.toList())); Assert.assertNull(result); } @Test public void testSet() { - _safeUserStorageWrapper.set(KEY, ITEM); + _userStorageWrapper.set(KEY, ITEM); Mockito.verify(_log, Mockito.times(0)).error(Mockito.anyString()); } @Test public void testSetException() throws Exception { Mockito.doThrow(Exception.class).when(_customStorageWrapper).set(Mockito.anyString(), Mockito.anyString()); - _safeUserStorageWrapper.set(KEY, ITEM); + _userStorageWrapper.set(KEY, ITEM); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @Test public void testDelete() { - _safeUserStorageWrapper.delete(Stream.of(KEY).collect(Collectors.toList())); + _userStorageWrapper.delete(Stream.of(KEY).collect(Collectors.toList())); Mockito.verify(_log, Mockito.times(0)).error(Mockito.anyString()); } @Test public void testDeleteException() throws Exception { Mockito.doThrow(Exception.class).when(_customStorageWrapper).delete(Mockito.anyObject()); - _safeUserStorageWrapper.delete(Stream.of(KEY).collect(Collectors.toList())); + _userStorageWrapper.delete(Stream.of(KEY).collect(Collectors.toList())); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @Test public void testGetAndSet() throws Exception { Mockito.when(_customStorageWrapper.getAndSet(Mockito.anyString(), Mockito.anyObject())).thenReturn(RESPONSE); - String result = _safeUserStorageWrapper.getAndSet(KEY, ITEM); + String result = _userStorageWrapper.getAndSet(KEY, ITEM); Assert.assertNotNull(result); Assert.assertEquals(RESPONSE, result); } @@ -104,7 +105,7 @@ public void testGetAndSet() throws Exception { @Test public void testGetAndSetException() throws Exception { Mockito.when(_customStorageWrapper.getAndSet(Mockito.anyString(), Mockito.anyObject())).thenThrow(Exception.class); - String result = _safeUserStorageWrapper.getAndSet(KEY, ITEM); + String result = _userStorageWrapper.getAndSet(KEY, ITEM); Assert.assertNull(result); } @@ -113,7 +114,7 @@ public void testGetKeysByPrefix() throws Exception { Set response =new HashSet<>(); response.add(RESPONSE); Mockito.when(_customStorageWrapper.getKeysByPrefix(Mockito.anyString())).thenReturn(response); - Set result = _safeUserStorageWrapper.getKeysByPrefix(KEY); + Set result = _userStorageWrapper.getKeysByPrefix(KEY); Assert.assertNotNull(result); Assert.assertTrue(result.contains(RESPONSE)); } @@ -121,7 +122,7 @@ public void testGetKeysByPrefix() throws Exception { @Test public void testGetKeysByPrefixException() throws Exception { Mockito.when(_customStorageWrapper.getKeysByPrefix(Mockito.anyString())).thenThrow(Exception.class); - Set result = _safeUserStorageWrapper.getKeysByPrefix(KEY); + Set result = _userStorageWrapper.getKeysByPrefix(KEY); Assert.assertNull(result); } @@ -129,14 +130,30 @@ public void testGetKeysByPrefixException() throws Exception { public void testIncrement() throws Exception { long response = 2L; Mockito.when(_customStorageWrapper.increment(Mockito.anyString(), Mockito.anyLong())).thenReturn(response); - long result = _safeUserStorageWrapper.increment(KEY, 1); + long result = _userStorageWrapper.increment(KEY, 1); Assert.assertEquals(response, result); } @Test public void testIncrementException() throws Exception { Mockito.when(_customStorageWrapper.increment(Mockito.anyString(), Mockito.anyLong())).thenThrow(Exception.class); - long result = _safeUserStorageWrapper.increment(KEY, 1); + long result = _userStorageWrapper.increment(KEY, 1); + Assert.assertEquals(0L, result); + Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); + } + + @Test + public void testHIncrement() throws Exception { + long response = 2L; + Mockito.when(_customStorageWrapper.hIncrement(Mockito.anyString(), Mockito.anyString(), Mockito.anyLong())).thenReturn(response); + long result = _userStorageWrapper.hIncrement(KEY, HASH_COUNT_KEY,1); + Assert.assertEquals(response, result); + } + + @Test + public void testHIncrementException() throws Exception { + Mockito.when(_customStorageWrapper.hIncrement(Mockito.anyString(), Mockito.anyString(), Mockito.anyLong())).thenThrow(Exception.class); + long result = _userStorageWrapper.hIncrement(KEY, HASH_COUNT_KEY, 1); Assert.assertEquals(0L, result); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @@ -145,28 +162,28 @@ public void testIncrementException() throws Exception { public void testDecrement() throws Exception { long response = 2L; Mockito.when(_customStorageWrapper.decrement(Mockito.anyString(), Mockito.anyLong())).thenReturn(response); - long result = _safeUserStorageWrapper.decrement(KEY, 1); + long result = _userStorageWrapper.decrement(KEY, 1); Assert.assertEquals(response, result); } @Test public void testDecrementException() throws Exception { Mockito.when(_customStorageWrapper.decrement(Mockito.anyString(), Mockito.anyLong())).thenThrow(Exception.class); - long result = _safeUserStorageWrapper.decrement(KEY, 1); + long result = _userStorageWrapper.decrement(KEY, 1); Assert.assertEquals(0L, result); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @Test public void testPushItems() { - _safeUserStorageWrapper.pushItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); + _userStorageWrapper.pushItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); Mockito.verify(_log, Mockito.times(0)).error(Mockito.anyString()); } @Test public void testPushItemsException() throws Exception { Mockito.doThrow(Exception.class).when(_customStorageWrapper).pushItems(Mockito.anyString(), Mockito.anyObject()); - _safeUserStorageWrapper.pushItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); + _userStorageWrapper.pushItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @@ -174,7 +191,7 @@ public void testPushItemsException() throws Exception { public void testPopItems() throws Exception { Mockito.when(_customStorageWrapper.popItems(Mockito.anyString(), Mockito.anyLong())) .thenReturn(Stream.of(RESPONSE).collect(Collectors.toList())); - List result = _safeUserStorageWrapper.popItems(KEY, 1L); + List result = _userStorageWrapper.popItems(KEY, 1L); Assert.assertNotNull(result); Assert.assertEquals(RESPONSE, result.get(0)); } @@ -182,7 +199,7 @@ public void testPopItems() throws Exception { @Test public void testPopItemsException() throws Exception { Mockito.when(_customStorageWrapper.popItems(Mockito.anyString(), Mockito.anyLong())).thenThrow(Exception.class); - List result = _safeUserStorageWrapper.popItems(KEY, 1L); + List result = _userStorageWrapper.popItems(KEY, 1L); Assert.assertNull(result); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @@ -191,14 +208,14 @@ public void testPopItemsException() throws Exception { public void testGetItemsCount() throws Exception { long response = 2L; Mockito.when(_customStorageWrapper.getItemsCount(Mockito.anyString())).thenReturn(response); - long result = _safeUserStorageWrapper.getItemsCount(KEY); + long result = _userStorageWrapper.getItemsCount(KEY); Assert.assertEquals(response, result); } @Test public void testGetItemsCountException() throws Exception { Mockito.when(_customStorageWrapper.getItemsCount(Mockito.anyString())).thenThrow(Exception.class); - long result = _safeUserStorageWrapper.getItemsCount(KEY); + long result = _userStorageWrapper.getItemsCount(KEY); Assert.assertEquals(-1L, result); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @@ -206,48 +223,48 @@ public void testGetItemsCountException() throws Exception { @Test public void testItemContains() throws Exception { Mockito.when(_customStorageWrapper.itemContains(Mockito.anyString(), Mockito.anyString())).thenReturn(true); - boolean result = _safeUserStorageWrapper.itemContains(KEY, ITEM); + boolean result = _userStorageWrapper.itemContains(KEY, ITEM); Assert.assertTrue(result); } @Test public void testItemContainsException() throws Exception { Mockito.when(_customStorageWrapper.itemContains(Mockito.anyString(), Mockito.anyString())).thenThrow(Exception.class); - boolean result = _safeUserStorageWrapper.itemContains(KEY, ITEM); + boolean result = _userStorageWrapper.itemContains(KEY, ITEM); Assert.assertFalse(result); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @Test public void testAddItems() { - _safeUserStorageWrapper.addItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); + _userStorageWrapper.addItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); Mockito.verify(_log, Mockito.times(0)).error(Mockito.anyString()); } @Test public void testAddItemsException() throws Exception { Mockito.doThrow(Exception.class).when(_customStorageWrapper).addItems(Mockito.anyString(), Mockito.anyObject()); - _safeUserStorageWrapper.addItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); + _userStorageWrapper.addItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @Test public void testRemoveItems() { - _safeUserStorageWrapper.removeItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); + _userStorageWrapper.removeItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); Mockito.verify(_log, Mockito.times(0)).error(Mockito.anyString()); } @Test public void testRemoveItemsException() throws Exception { Mockito.doThrow(Exception.class).when(_customStorageWrapper).removeItems(Mockito.anyString(), Mockito.anyObject()); - _safeUserStorageWrapper.removeItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); + _userStorageWrapper.removeItems(KEY, Stream.of(ITEM).collect(Collectors.toList())); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @Test public void testGetItems() throws Exception { Mockito.when(_customStorageWrapper.getItems(Mockito.anyObject())).thenReturn(Stream.of(RESPONSE).collect(Collectors.toList())); - List result = _safeUserStorageWrapper.getItems(Stream.of(KEY).collect(Collectors.toList())); + List result = _userStorageWrapper.getItems(Stream.of(KEY).collect(Collectors.toList())); Assert.assertNotNull(result); Assert.assertEquals(RESPONSE, result.get(0)); } @@ -255,7 +272,7 @@ public void testGetItems() throws Exception { @Test public void testGetItemsException() throws Exception { Mockito.when(_customStorageWrapper.getItems(Mockito.anyObject())).thenThrow(Exception.class); - List result = _safeUserStorageWrapper.getItems(Stream.of(KEY).collect(Collectors.toList())); + List result = _userStorageWrapper.getItems(Stream.of(KEY).collect(Collectors.toList())); Assert.assertNull(result); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @@ -263,14 +280,14 @@ public void testGetItemsException() throws Exception { @Test public void testConnect() throws Exception { Mockito.when(_customStorageWrapper.connect()).thenReturn(true); - boolean result = _safeUserStorageWrapper.connect(); + boolean result = _userStorageWrapper.connect(); Assert.assertTrue(result); } @Test public void testConnectFailed() throws Exception { Mockito.when(_customStorageWrapper.connect()).thenThrow(Exception.class); - boolean result = _safeUserStorageWrapper.connect(); + boolean result = _userStorageWrapper.connect(); Assert.assertFalse(result); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } @@ -278,14 +295,14 @@ public void testConnectFailed() throws Exception { @Test public void testDisconnect() throws Exception { Mockito.when(_customStorageWrapper.disconnect()).thenReturn(true); - boolean result = _safeUserStorageWrapper.disconnect(); + boolean result = _userStorageWrapper.disconnect(); Assert.assertTrue(result); } @Test public void testDisconnectFailed() throws Exception { Mockito.when(_customStorageWrapper.disconnect()).thenThrow(Exception.class); - boolean result = _safeUserStorageWrapper.disconnect(); + boolean result = _userStorageWrapper.disconnect(); Assert.assertFalse(result); Mockito.verify(_log, Mockito.times(1)).error(Mockito.anyString()); } diff --git a/client/src/test/java/io/split/storages/pluggable/synchronizer/TelemetryConsumerSubmitterTest.java b/client/src/test/java/io/split/storages/pluggable/synchronizer/TelemetryConsumerSubmitterTest.java index 74bc6ebd9..77dba1aa4 100644 --- a/client/src/test/java/io/split/storages/pluggable/synchronizer/TelemetryConsumerSubmitterTest.java +++ b/client/src/test/java/io/split/storages/pluggable/synchronizer/TelemetryConsumerSubmitterTest.java @@ -2,9 +2,10 @@ import io.split.client.ApiKeyCounter; import io.split.client.SplitClientConfig; +import io.split.client.dtos.UniqueKeys; import io.split.client.utils.SDKMetadata; import io.split.storages.pluggable.domain.ConfigConsumer; -import io.split.storages.pluggable.domain.SafeUserStorageWrapper; +import io.split.storages.pluggable.domain.UserStorageWrapper; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; @@ -13,7 +14,10 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -30,24 +34,44 @@ public void testSynchronizeConfig() { ApiKeyCounter.getApiKeyCounterInstance().add(SECOND_KEY); ApiKeyCounter.getApiKeyCounterInstance().add(SECOND_KEY); TelemetryConsumerSubmitter telemetrySynchronizer = new TelemetryConsumerSubmitter(Mockito.mock(CustomStorageWrapper.class), new SDKMetadata("SDK 4.2.x", "22.215135.1", "testMachine")); - SplitClientConfig splitClientConfig = SplitClientConfig.builder().build(); + SplitClientConfig splitClientConfig = SplitClientConfig.builder().flagSetsFilter(Arrays.asList("set1", "set2", "set-3")).build(); ConfigConsumer config = telemetrySynchronizer.generateConfig(splitClientConfig, ApiKeyCounter.getApiKeyCounterInstance().getFactoryInstances(), Stream.of("tag1", "tag2").collect(Collectors.toList())); - Assert.assertEquals(3, config.get_redundantFactories()); - Assert.assertEquals(2, config.get_tags().size()); + Assert.assertEquals(3, config.getRedundantFactories()); + Assert.assertEquals(2, config.getTags().size()); } @Test public void testTestSynchronizeConfig() throws NoSuchFieldException, IllegalAccessException { - SafeUserStorageWrapper safeUserStorageWrapper = Mockito.mock(SafeUserStorageWrapper.class); + UserStorageWrapper userStorageWrapper = Mockito.mock(UserStorageWrapper.class); TelemetryConsumerSubmitter telemetrySynchronizer = new TelemetryConsumerSubmitter(Mockito.mock(CustomStorageWrapper.class), new SDKMetadata("SDK 4.2.x", "22.215135.1", "testMachine")); SplitClientConfig splitClientConfig = SplitClientConfig.builder().build(); - Field telemetryConsumerSubmitterHolder = TelemetryConsumerSubmitter.class.getDeclaredField("_safeUserStorageWrapper"); + Field telemetryConsumerSubmitterHolder = TelemetryConsumerSubmitter.class.getDeclaredField("_userStorageWrapper"); telemetryConsumerSubmitterHolder.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(telemetryConsumerSubmitterHolder, telemetryConsumerSubmitterHolder.getModifiers() & ~Modifier.FINAL); - telemetryConsumerSubmitterHolder.set(telemetrySynchronizer, safeUserStorageWrapper); + telemetryConsumerSubmitterHolder.set(telemetrySynchronizer, userStorageWrapper); telemetrySynchronizer.synchronizeConfig(splitClientConfig, 10L, new HashMap<>(), new ArrayList<>()); - Mockito.verify(safeUserStorageWrapper, Mockito.times(1)).set(Mockito.anyString(), Mockito.anyObject()); + Mockito.verify(userStorageWrapper, Mockito.times(1)).hSet(Mockito.eq("SPLITIO.telemetry.init"), Mockito.eq("SDK 4.2.x/testMachine/22.215135.1"), Mockito.anyObject()); + } + + @Test + public void testTestSynchronizeUniqueKeys() throws NoSuchFieldException, IllegalAccessException { + UserStorageWrapper userStorageWrapper = Mockito.mock(UserStorageWrapper.class); + TelemetryConsumerSubmitter telemetrySynchronizer = new TelemetryConsumerSubmitter(Mockito.mock(CustomStorageWrapper.class), new SDKMetadata("SDK 4.2.x", "22.215135.1", "testMachine")); + Field telemetryConsumerSubmitterHolder = TelemetryConsumerSubmitter.class.getDeclaredField("_userStorageWrapper"); + telemetryConsumerSubmitterHolder.setAccessible(true); + telemetryConsumerSubmitterHolder.set(telemetrySynchronizer, userStorageWrapper); + + List keys = new ArrayList<>(); + keys.add("key-1"); + keys.add("key-2"); + List uniqueKeys = new ArrayList<>(); + uniqueKeys.add(new UniqueKeys.UniqueKey("feature-1", keys)); + UniqueKeys uniqueKeysToSend = new UniqueKeys(uniqueKeys); + + telemetrySynchronizer.synchronizeUniqueKeys(uniqueKeysToSend); + List uniqueKeysJson = new ArrayList<>(Collections.singletonList("{\"f\":\"feature-1\",\"ks\":[\"key-1\",\"key-2\"]}")); + Mockito.verify(userStorageWrapper).pushItems(Mockito.eq("SPLITIO.uniquekeys"), Mockito.eq(uniqueKeysJson)); } } \ No newline at end of file diff --git a/client/src/test/java/io/split/telemetry/storage/InMemoryTelemetryStorageTest.java b/client/src/test/java/io/split/telemetry/storage/InMemoryTelemetryStorageTest.java index 9f4986ad1..a74ee03ac 100644 --- a/client/src/test/java/io/split/telemetry/storage/InMemoryTelemetryStorageTest.java +++ b/client/src/test/java/io/split/telemetry/storage/InMemoryTelemetryStorageTest.java @@ -1,7 +1,20 @@ package io.split.telemetry.storage; -import io.split.telemetry.domain.*; -import io.split.telemetry.domain.enums.*; +import io.split.telemetry.domain.HTTPErrors; +import io.split.telemetry.domain.HTTPLatencies; +import io.split.telemetry.domain.LastSynchronization; +import io.split.telemetry.domain.MethodExceptions; +import io.split.telemetry.domain.MethodLatencies; +import io.split.telemetry.domain.StreamingEvent; +import io.split.telemetry.domain.UpdatesFromSSE; + +import io.split.telemetry.domain.enums.EventsDataRecordsEnum; +import io.split.telemetry.domain.enums.HTTPLatenciesEnum; +import io.split.telemetry.domain.enums.ImpressionsDataTypeEnum; +import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; +import io.split.telemetry.domain.enums.MethodEnum; +import io.split.telemetry.domain.enums.ResourceEnum; +import io.split.telemetry.domain.enums.UpdatesFromSSEEnum; import org.junit.Assert; import org.junit.Test; @@ -10,7 +23,7 @@ public class InMemoryTelemetryStorageTest{ @Test - public void testInMemoryTelemetryStorage() throws Exception { + public void testInMemoryTelemetryStorage() { InMemoryTelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); //MethodLatencies @@ -20,21 +33,29 @@ public void testInMemoryTelemetryStorage() throws Exception { telemetryStorage.recordLatency(MethodEnum.TREATMENTS, 500l * 1000); telemetryStorage.recordLatency(MethodEnum.TREATMENT_WITH_CONFIG, 800l * 1000); telemetryStorage.recordLatency(MethodEnum.TREATMENTS_WITH_CONFIG, 1000l * 1000); + telemetryStorage.recordLatency(MethodEnum.TREATMENTS_BY_FLAG_SET, 1000l * 1000); + telemetryStorage.recordLatency(MethodEnum.TREATMENTS_BY_FLAG_SETS, 1000l * 1000); + telemetryStorage.recordLatency(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET, 1000l * 1000); + telemetryStorage.recordLatency(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, 1000l * 1000); MethodLatencies latencies = telemetryStorage.popLatencies(); - Assert.assertEquals(2, latencies.get_treatment().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(2, latencies.get_treatments().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, latencies.get_treatmentsWithConfig().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, latencies.get_treatmentWithConfig().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, latencies.get_track().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(2, latencies.getTreatment().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(2, latencies.getTreatments().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, latencies.getTreatmentsWithConfig().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, latencies.getTreatmentWithConfig().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, latencies.getTreatmentByFlagSet().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, latencies.getTreatmentByFlagSets().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, latencies.getTreatmentWithConfigByFlagSet().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, latencies.getTreatmentWithConfigByFlagSets().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, latencies.getTrack().stream().mapToInt(Long::intValue).sum()); //Check empty has worked latencies = telemetryStorage.popLatencies(); - Assert.assertEquals(0, latencies.get_treatment().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, latencies.get_treatments().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, latencies.get_treatmentsWithConfig().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, latencies.get_treatmentWithConfig().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, latencies.get_track().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, latencies.getTreatment().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, latencies.getTreatments().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, latencies.getTreatmentsWithConfig().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, latencies.getTreatmentWithConfig().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, latencies.getTrack().stream().mapToInt(Long::intValue).sum()); //HttpLatencies telemetryStorage.recordSyncLatency(HTTPLatenciesEnum.TELEMETRY, 1500l * 1000); @@ -50,22 +71,22 @@ public void testInMemoryTelemetryStorage() throws Exception { HTTPLatencies httpLatencies = telemetryStorage.popHTTPLatencies(); - Assert.assertEquals(3, httpLatencies.get_splits().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(2, httpLatencies.get_telemetry().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(2, httpLatencies.get_events().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, httpLatencies.get_segments().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, httpLatencies.get_impressions().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, httpLatencies.get_impressionsCount().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, httpLatencies.get_token().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(3, httpLatencies.getSplits().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(2, httpLatencies.getTelemetry().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(2, httpLatencies.getEvents().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, httpLatencies.getSegments().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, httpLatencies.getImpressions().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, httpLatencies.getImpressionsCount().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, httpLatencies.getToken().stream().mapToInt(Long::intValue).sum()); httpLatencies = telemetryStorage.popHTTPLatencies(); - Assert.assertEquals(0, httpLatencies.get_splits().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, httpLatencies.get_telemetry().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, httpLatencies.get_events().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, httpLatencies.get_segments().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, httpLatencies.get_impressions().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, httpLatencies.get_impressionsCount().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, httpLatencies.get_token().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, httpLatencies.getSplits().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, httpLatencies.getTelemetry().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, httpLatencies.getEvents().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, httpLatencies.getSegments().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, httpLatencies.getImpressions().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, httpLatencies.getImpressionsCount().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, httpLatencies.getToken().stream().mapToInt(Long::intValue).sum()); //Exceptions @@ -75,21 +96,33 @@ public void testInMemoryTelemetryStorage() throws Exception { telemetryStorage.recordException(MethodEnum.TREATMENTS); telemetryStorage.recordException(MethodEnum.TREATMENT_WITH_CONFIG); telemetryStorage.recordException(MethodEnum.TREATMENTS_WITH_CONFIG); + telemetryStorage.recordException(MethodEnum.TREATMENTS_BY_FLAG_SET); + telemetryStorage.recordException(MethodEnum.TREATMENTS_BY_FLAG_SETS); + telemetryStorage.recordException(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET); + telemetryStorage.recordException(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS); MethodExceptions methodExceptions = telemetryStorage.popExceptions(); - Assert.assertEquals(2, methodExceptions.get_treatment()); - Assert.assertEquals(2, methodExceptions.get_treatments()); - Assert.assertEquals(1, methodExceptions.get_treatmentsWithConfig()); - Assert.assertEquals(1, methodExceptions.get_treatmentWithConfig()); - Assert.assertEquals(0, methodExceptions.get_track()); + Assert.assertEquals(2, methodExceptions.getTreatment()); + Assert.assertEquals(2, methodExceptions.getTreatments()); + Assert.assertEquals(1, methodExceptions.getTreatmentsWithConfig()); + Assert.assertEquals(1, methodExceptions.getTreatmentWithConfig()); + Assert.assertEquals(1, methodExceptions.getTreatmentByFlagSet()); + Assert.assertEquals(1, methodExceptions.getTreatmentByFlagSets()); + Assert.assertEquals(1, methodExceptions.getTreatmentWithConfigByFlagSet()); + Assert.assertEquals(1, methodExceptions.getTreatmentWithConfigByFlagSets()); + Assert.assertEquals(0, methodExceptions.getTrack()); //Check empty has worked methodExceptions = telemetryStorage.popExceptions(); - Assert.assertEquals(0, methodExceptions.get_treatment()); - Assert.assertEquals(0, methodExceptions.get_treatments()); - Assert.assertEquals(0, methodExceptions.get_treatmentsWithConfig()); - Assert.assertEquals(0, methodExceptions.get_treatmentWithConfig()); - Assert.assertEquals(0, methodExceptions.get_track()); + Assert.assertEquals(0, methodExceptions.getTreatment()); + Assert.assertEquals(0, methodExceptions.getTreatments()); + Assert.assertEquals(0, methodExceptions.getTreatmentsWithConfig()); + Assert.assertEquals(0, methodExceptions.getTreatmentWithConfig()); + Assert.assertEquals(0, methodExceptions.getTreatmentByFlagSet()); + Assert.assertEquals(0, methodExceptions.getTreatmentByFlagSets()); + Assert.assertEquals(0, methodExceptions.getTreatmentWithConfigByFlagSet()); + Assert.assertEquals(0, methodExceptions.getTreatmentWithConfigByFlagSets()); + Assert.assertEquals(0, methodExceptions.getTrack()); //AuthRejections telemetryStorage.recordAuthRejections(); @@ -158,13 +191,13 @@ public void testInMemoryTelemetryStorage() throws Exception { telemetryStorage.recordSuccessfulSync(LastSynchronizationRecordsEnum.TOKEN, 129); LastSynchronization lastSynchronization = telemetryStorage.getLastSynchronization(); - Assert.assertEquals(800, lastSynchronization.get_events()); - Assert.assertEquals(129, lastSynchronization.get_token()); - Assert.assertEquals(1580, lastSynchronization.get_segments()); - Assert.assertEquals(0, lastSynchronization.get_splits()); - Assert.assertEquals(10500, lastSynchronization.get_impressions()); - Assert.assertEquals(1500, lastSynchronization.get_impressionsCount()); - Assert.assertEquals(265, lastSynchronization.get_telemetry()); + Assert.assertEquals(800, lastSynchronization.getEvents()); + Assert.assertEquals(129, lastSynchronization.getToken()); + Assert.assertEquals(1580, lastSynchronization.getSegments()); + Assert.assertEquals(0, lastSynchronization.getSplits()); + Assert.assertEquals(10500, lastSynchronization.getImpressions()); + Assert.assertEquals(1500, lastSynchronization.getImpressionsCount()); + Assert.assertEquals(265, lastSynchronization.getTelemetry()); //Session length telemetryStorage.recordSessionLength(91218); @@ -183,21 +216,21 @@ public void testInMemoryTelemetryStorage() throws Exception { telemetryStorage.recordSyncError(ResourceEnum.TOKEN_SYNC, 403); HTTPErrors httpErrors = telemetryStorage.popHTTPErrors(); - Assert.assertEquals(2, httpErrors.get_telemetry().get(400l).intValue()); - Assert.assertEquals(1, httpErrors.get_segments().get(501l).intValue()); - Assert.assertEquals(2, httpErrors.get_impressions().get(403l).intValue()); - Assert.assertEquals(1, httpErrors.get_impressionsCount().get(403l).intValue()); - Assert.assertEquals(1, httpErrors.get_events().get(503l).intValue()); - Assert.assertEquals(1, httpErrors.get_splits().get(403l).intValue()); - Assert.assertEquals(1, httpErrors.get_token().get(403l).intValue()); + Assert.assertEquals(2, httpErrors.getTelemetry().get(400l).intValue()); + Assert.assertEquals(1, httpErrors.getSegments().get(501l).intValue()); + Assert.assertEquals(2, httpErrors.getImpressions().get(403l).intValue()); + Assert.assertEquals(1, httpErrors.getImpressionsCount().get(403l).intValue()); + Assert.assertEquals(1, httpErrors.getEvents().get(503l).intValue()); + Assert.assertEquals(1, httpErrors.getSplits().get(403l).intValue()); + Assert.assertEquals(1, httpErrors.getToken().get(403l).intValue()); //Streaming events StreamingEvent streamingEvent = new StreamingEvent(1, 290, 91218); telemetryStorage.recordStreamingEvents(streamingEvent); List streamingEvents = telemetryStorage.popStreamingEvents(); - Assert.assertEquals(290, streamingEvents.get(0).get_data()); - Assert.assertEquals(1, streamingEvents.get(0).get_type()); + Assert.assertEquals(290, streamingEvents.get(0).getData()); + Assert.assertEquals(1, streamingEvents.get(0).getType()); Assert.assertEquals(91218, streamingEvents.get(0).getTimestamp()); //Check list has been cleared @@ -215,5 +248,12 @@ public void testInMemoryTelemetryStorage() throws Exception { tags = telemetryStorage.popTags(); Assert.assertEquals(0, tags.size()); + //UpdatesFromSSE + telemetryStorage.recordUpdatesFromSSE(UpdatesFromSSEEnum.SPLITS); + telemetryStorage.recordUpdatesFromSSE(UpdatesFromSSEEnum.SPLITS); + telemetryStorage.recordUpdatesFromSSE(UpdatesFromSSEEnum.SPLITS); + + UpdatesFromSSE updatesFromSSE = telemetryStorage.popUpdatesFromSSE(); + Assert.assertEquals(3, updatesFromSSE.getSplits()); } -} +} \ No newline at end of file diff --git a/client/src/test/java/io/split/telemetry/synchronizer/TelemetryInMemorySubmitterTest.java b/client/src/test/java/io/split/telemetry/synchronizer/TelemetryInMemorySubmitterTest.java index de9afc6df..39a6be14c 100644 --- a/client/src/test/java/io/split/telemetry/synchronizer/TelemetryInMemorySubmitterTest.java +++ b/client/src/test/java/io/split/telemetry/synchronizer/TelemetryInMemorySubmitterTest.java @@ -1,6 +1,11 @@ package io.split.telemetry.synchronizer; import io.split.TestHelper; +import io.split.client.RequestDecorator; +import io.split.client.dtos.UniqueKeys; +import io.split.client.utils.SDKMetadata; +import io.split.service.SplitHttpClient; +import io.split.service.SplitHttpClientImpl; import io.split.storages.SegmentCacheConsumer; import io.split.storages.SplitCacheConsumer; import io.split.client.ApiKeyCounter; @@ -8,7 +13,14 @@ import io.split.telemetry.domain.Config; import io.split.telemetry.domain.Stats; import io.split.telemetry.domain.StreamingEvent; -import io.split.telemetry.domain.enums.*; + +import io.split.telemetry.domain.enums.EventsDataRecordsEnum; +import io.split.telemetry.domain.enums.HTTPLatenciesEnum; +import io.split.telemetry.domain.enums.ImpressionsDataTypeEnum; +import io.split.telemetry.domain.enums.LastSynchronizationRecordsEnum; +import io.split.telemetry.domain.enums.MethodEnum; +import io.split.telemetry.domain.enums.ResourceEnum; +import io.split.telemetry.domain.enums.UpdatesFromSSEEnum; import io.split.telemetry.storage.InMemoryTelemetryStorage; import io.split.telemetry.storage.TelemetryRuntimeProducer; import io.split.telemetry.storage.TelemetryStorage; @@ -26,6 +38,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -35,27 +48,51 @@ public class TelemetryInMemorySubmitterTest { public static final String TELEMETRY_ENDPOINT = "https://telemetry.split.io/api/v1"; @Test - public void testSynchronizeConfig() throws URISyntaxException, NoSuchMethodException, IOException, IllegalAccessException, InvocationTargetException { + public void testSynchronizeConfig() throws URISyntaxException, NoSuchMethodException, IOException, + IllegalAccessException, InvocationTargetException { CloseableHttpClient httpClient = TestHelper.mockHttpClient(TELEMETRY_ENDPOINT, HttpStatus.SC_OK); - TelemetrySynchronizer telemetrySynchronizer = getTelemetrySynchronizer(httpClient); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + TelemetrySynchronizer telemetrySynchronizer = getTelemetrySynchronizer(splitHttpClient); SplitClientConfig splitClientConfig = SplitClientConfig.builder().build(); - telemetrySynchronizer.synchronizeConfig(splitClientConfig, 100l, new HashMap(), new ArrayList()); + telemetrySynchronizer.synchronizeConfig(splitClientConfig, 100l, new HashMap(), + new ArrayList()); Mockito.verify(httpClient, Mockito.times(1)).execute(Mockito.any()); } - @Test public void testSynchronizeStats() throws Exception { CloseableHttpClient httpClient = TestHelper.mockHttpClient(TELEMETRY_ENDPOINT, HttpStatus.SC_OK); - TelemetrySynchronizer telemetrySynchronizer = getTelemetrySynchronizer(httpClient); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + TelemetrySynchronizer telemetrySynchronizer = getTelemetrySynchronizer(splitHttpClient); telemetrySynchronizer.synchronizeStats(); Mockito.verify(httpClient, Mockito.times(1)).execute(Mockito.any()); } @Test - public void testConfig() throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, IOException, URISyntaxException, NoSuchFieldException, ClassNotFoundException { + public void testSynchronizeUniqueKeys() throws Exception { + CloseableHttpClient httpClient = TestHelper.mockHttpClient(TELEMETRY_ENDPOINT, HttpStatus.SC_OK); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + TelemetrySynchronizer telemetrySynchronizer = getTelemetrySynchronizer(splitHttpClient); + + List keys = new ArrayList<>(); + keys.add("key-1"); + keys.add("key-2"); + List uniqueKeys = new ArrayList<>(); + uniqueKeys.add(new UniqueKeys.UniqueKey("feature-1", keys)); + UniqueKeys imp = new UniqueKeys(uniqueKeys); + + telemetrySynchronizer.synchronizeUniqueKeys(imp); + Mockito.verify(httpClient, Mockito.times(1)).execute(Mockito.any()); + } + + @Test + public void testConfig() throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, + IOException, URISyntaxException, NoSuchFieldException { ApiKeyCounter.getApiKeyCounterInstance().clearApiKeys(); ApiKeyCounter.getApiKeyCounterInstance().add(FIRST_KEY); ApiKeyCounter.getApiKeyCounterInstance().add(FIRST_KEY); @@ -64,86 +101,112 @@ public void testConfig() throws InvocationTargetException, NoSuchMethodException ApiKeyCounter.getApiKeyCounterInstance().add(SECOND_KEY); TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); CloseableHttpClient httpClient = TestHelper.mockHttpClient(TELEMETRY_ENDPOINT, HttpStatus.SC_OK); - TelemetryInMemorySubmitter telemetrySynchronizer = getTelemetrySynchronizer(httpClient); - SplitClientConfig splitClientConfig = SplitClientConfig.builder().build(); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + TelemetryInMemorySubmitter telemetrySynchronizer = getTelemetrySynchronizer(splitHttpClient); + SplitClientConfig splitClientConfig = SplitClientConfig.builder() + .flagSetsFilter(Arrays.asList("a", "_b", "a", "a", "c", "d", "_d")).build(); populateConfig(telemetryStorage); - Field teleTelemetryStorageConsumer = TelemetryInMemorySubmitter.class.getDeclaredField("_teleTelemetryStorageConsumer"); - teleTelemetryStorageConsumer.setAccessible(true); + Field telemetryStorageConsumer = TelemetryInMemorySubmitter.class.getDeclaredField("_telemetryStorageConsumer"); + telemetryStorageConsumer.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); - modifiersField.setInt(teleTelemetryStorageConsumer, teleTelemetryStorageConsumer.getModifiers() & ~Modifier.FINAL); - teleTelemetryStorageConsumer.set(telemetrySynchronizer, telemetryStorage); - Config config = telemetrySynchronizer.generateConfig(splitClientConfig, 100l, ApiKeyCounter.getApiKeyCounterInstance().getFactoryInstances(), new ArrayList<>()); - Assert.assertEquals(3, config.get_redundantFactories()); - Assert.assertEquals(2, config.get_burTimeouts()); - Assert.assertEquals(3, config.get_nonReadyUsages()); + modifiersField.setInt(telemetryStorageConsumer, telemetryStorageConsumer.getModifiers() & ~Modifier.FINAL); + telemetryStorageConsumer.set(telemetrySynchronizer, telemetryStorage); + Config config = telemetrySynchronizer.generateConfig(splitClientConfig, 100l, + ApiKeyCounter.getApiKeyCounterInstance().getFactoryInstances(), new ArrayList<>()); + Assert.assertEquals(3, config.getRedundantFactories()); + Assert.assertEquals(2, config.getBurTimeouts()); + Assert.assertEquals(3, config.getNonReadyUsages()); + Assert.assertEquals(7, config.getFlagSetsTotal()); + Assert.assertEquals(4, config.getFlagSetsInvalid()); } @Test public void testStats() throws Exception { TelemetryStorage telemetryStorage = new InMemoryTelemetryStorage(); CloseableHttpClient httpClient = TestHelper.mockHttpClient(TELEMETRY_ENDPOINT, HttpStatus.SC_OK); - TelemetryInMemorySubmitter telemetrySynchronizer = getTelemetrySynchronizer(httpClient); + SplitHttpClient splitHttpClient = SplitHttpClientImpl.create(httpClient, new RequestDecorator(null), "qwerty", + metadata()); + TelemetryInMemorySubmitter telemetrySynchronizer = getTelemetrySynchronizer(splitHttpClient); populateStats(telemetryStorage); - Field teleTelemetryStorageConsumer = TelemetryInMemorySubmitter.class.getDeclaredField("_teleTelemetryStorageConsumer"); - teleTelemetryStorageConsumer.setAccessible(true); + Field telemetryStorageConsumer = TelemetryInMemorySubmitter.class.getDeclaredField("_telemetryStorageConsumer"); + telemetryStorageConsumer.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); - modifiersField.setInt(teleTelemetryStorageConsumer, teleTelemetryStorageConsumer.getModifiers() & ~Modifier.FINAL); + modifiersField.setInt(telemetryStorageConsumer, telemetryStorageConsumer.getModifiers() & ~Modifier.FINAL); - teleTelemetryStorageConsumer.set(telemetrySynchronizer, telemetryStorage); + telemetryStorageConsumer.set(telemetrySynchronizer, telemetryStorage); Stats stats = telemetrySynchronizer.generateStats(); - Assert.assertEquals(2, stats.get_methodLatencies().get_treatment().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(2, stats.get_methodLatencies().get_treatments().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, stats.get_methodLatencies().get_treatmentsWithConfig().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, stats.get_methodLatencies().get_treatmentWithConfig().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, stats.get_methodLatencies().get_track().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(3, stats.get_httpLatencies().get_splits().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(2, stats.get_httpLatencies().get_telemetry().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(2, stats.get_httpLatencies().get_events().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, stats.get_httpLatencies().get_segments().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, stats.get_httpLatencies().get_impressions().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(1, stats.get_httpLatencies().get_impressionsCount().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(0, stats.get_httpLatencies().get_token().stream().mapToInt(Long::intValue).sum()); - Assert.assertEquals(2, stats.get_methodExceptions().get_treatment()); - Assert.assertEquals(2, stats.get_methodExceptions().get_treatments()); - Assert.assertEquals(1, stats.get_methodExceptions().get_treatmentsWithConfig()); - Assert.assertEquals(1, stats.get_methodExceptions().get_treatmentWithConfig()); - Assert.assertEquals(0, stats.get_methodExceptions().get_track()); - Assert.assertEquals(1, stats.get_authRejections()); - Assert.assertEquals(2, stats.get_tokenRefreshes()); - Assert.assertEquals(4, stats.get_impressionsDeduped()); - Assert.assertEquals(12, stats.get_impressionsDropped()); - Assert.assertEquals(0, stats.get_impressionsQueued()); - Assert.assertEquals(10, stats.get_eventsDropped()); - Assert.assertEquals(3, stats.get_eventsQueued()); - Assert.assertEquals(800, stats.get_lastSynchronization().get_events()); - Assert.assertEquals(129, stats.get_lastSynchronization().get_token()); - Assert.assertEquals(1580, stats.get_lastSynchronization().get_segments()); - Assert.assertEquals(0, stats.get_lastSynchronization().get_splits()); - Assert.assertEquals(10500, stats.get_lastSynchronization().get_impressions()); - Assert.assertEquals(1500, stats.get_lastSynchronization().get_impressionsCount()); - Assert.assertEquals(265, stats.get_lastSynchronization().get_telemetry()); - Assert.assertEquals(91218, stats.get_sessionLengthMs()); - Assert.assertEquals(2, stats.get_httpErrors().get_telemetry().get(400l).intValue()); - Assert.assertEquals(1, stats.get_httpErrors().get_segments().get(501l).intValue()); - Assert.assertEquals(2, stats.get_httpErrors().get_impressions().get(403l).intValue()); - Assert.assertEquals(1, stats.get_httpErrors().get_impressionsCount().get(403l).intValue()); - Assert.assertEquals(1, stats.get_httpErrors().get_events().get(503l).intValue()); - Assert.assertEquals(1, stats.get_httpErrors().get_splits().get(403l).intValue()); - Assert.assertEquals(1, stats.get_httpErrors().get_token().get(403l).intValue()); - List streamingEvents = stats.get_streamingEvents(); - Assert.assertEquals(290, streamingEvents.get(0).get_data()); - Assert.assertEquals(1, streamingEvents.get(0).get_type()); + Assert.assertEquals(2, stats.getMethodLatencies().getTreatment().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(2, stats.getMethodLatencies().getTreatments().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, + stats.getMethodLatencies().getTreatmentsWithConfig().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, + stats.getMethodLatencies().getTreatmentWithConfig().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, + stats.getMethodLatencies().getTreatmentByFlagSet().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, + stats.getMethodLatencies().getTreatmentByFlagSets().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, + stats.getMethodLatencies().getTreatmentWithConfigByFlagSet().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, + stats.getMethodLatencies().getTreatmentWithConfigByFlagSets().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, stats.getMethodLatencies().getTrack().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(3, stats.getHttpLatencies().getSplits().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(2, stats.getHttpLatencies().getTelemetry().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(2, stats.getHttpLatencies().getEvents().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, stats.getHttpLatencies().getSegments().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, stats.getHttpLatencies().getImpressions().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(1, stats.getHttpLatencies().getImpressionsCount().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(0, stats.getHttpLatencies().getToken().stream().mapToInt(Long::intValue).sum()); + Assert.assertEquals(2, stats.getMethodExceptions().getTreatment()); + Assert.assertEquals(2, stats.getMethodExceptions().getTreatments()); + Assert.assertEquals(1, stats.getMethodExceptions().getTreatmentsWithConfig()); + Assert.assertEquals(1, stats.getMethodExceptions().getTreatmentWithConfig()); + Assert.assertEquals(1, stats.getMethodExceptions().getTreatmentWithConfig()); + Assert.assertEquals(1, stats.getMethodExceptions().getTreatmentByFlagSet()); + Assert.assertEquals(1, stats.getMethodExceptions().getTreatmentByFlagSets()); + Assert.assertEquals(1, stats.getMethodExceptions().getTreatmentWithConfigByFlagSet()); + Assert.assertEquals(1, stats.getMethodExceptions().getTreatmentWithConfigByFlagSets()); + Assert.assertEquals(0, stats.getMethodExceptions().getTrack()); + Assert.assertEquals(1, stats.getAuthRejections()); + Assert.assertEquals(2, stats.getTokenRefreshes()); + Assert.assertEquals(4, stats.getImpressionsDeduped()); + Assert.assertEquals(12, stats.getImpressionsDropped()); + Assert.assertEquals(0, stats.getImpressionsQueued()); + Assert.assertEquals(10, stats.getEventsDropped()); + Assert.assertEquals(3, stats.getEventsQueued()); + Assert.assertEquals(800, stats.getLastSynchronization().getEvents()); + Assert.assertEquals(129, stats.getLastSynchronization().getToken()); + Assert.assertEquals(1580, stats.getLastSynchronization().getSegments()); + Assert.assertEquals(0, stats.getLastSynchronization().getSplits()); + Assert.assertEquals(10500, stats.getLastSynchronization().getImpressions()); + Assert.assertEquals(1500, stats.getLastSynchronization().getImpressionsCount()); + Assert.assertEquals(265, stats.getLastSynchronization().getTelemetry()); + Assert.assertEquals(91218, stats.getSessionLengthMs()); + Assert.assertEquals(2, stats.getHttpErrors().getTelemetry().get(400l).intValue()); + Assert.assertEquals(1, stats.getHttpErrors().getSegments().get(501l).intValue()); + Assert.assertEquals(2, stats.getHttpErrors().getImpressions().get(403l).intValue()); + Assert.assertEquals(1, stats.getHttpErrors().getImpressionsCount().get(403l).intValue()); + Assert.assertEquals(1, stats.getHttpErrors().getEvents().get(503l).intValue()); + Assert.assertEquals(1, stats.getHttpErrors().getSplits().get(403l).intValue()); + Assert.assertEquals(1, stats.getHttpErrors().getToken().get(403l).intValue()); + List streamingEvents = stats.getStreamingEvents(); + Assert.assertEquals(290, streamingEvents.get(0).getData()); + Assert.assertEquals(1, streamingEvents.get(0).getType()); Assert.assertEquals(91218, streamingEvents.get(0).getTimestamp()); + Assert.assertEquals(1, stats.getUpdatesFromSSE().getSplits()); } - private TelemetryInMemorySubmitter getTelemetrySynchronizer(CloseableHttpClient httpClient) throws URISyntaxException, InvocationTargetException, NoSuchMethodException, IllegalAccessException, IOException { + private TelemetryInMemorySubmitter getTelemetrySynchronizer(SplitHttpClient httpClient) throws URISyntaxException { TelemetryStorageConsumer consumer = Mockito.mock(InMemoryTelemetryStorage.class); TelemetryRuntimeProducer telemetryRuntimeProducer = Mockito.mock(TelemetryRuntimeProducer.class); SplitCacheConsumer splitCacheConsumer = Mockito.mock(SplitCacheConsumer.class); SegmentCacheConsumer segmentCacheConsumer = Mockito.mock(SegmentCacheConsumer.class); - TelemetryInMemorySubmitter telemetrySynchronizer = new TelemetryInMemorySubmitter(httpClient, URI.create(TELEMETRY_ENDPOINT), consumer, splitCacheConsumer, segmentCacheConsumer, telemetryRuntimeProducer, 0l); + TelemetryInMemorySubmitter telemetrySynchronizer = new TelemetryInMemorySubmitter(httpClient, + URI.create(TELEMETRY_ENDPOINT), consumer, splitCacheConsumer, segmentCacheConsumer, + telemetryRuntimeProducer, 0l); return telemetrySynchronizer; } @@ -154,6 +217,10 @@ private void populateStats(TelemetryStorage telemetryStorage) { telemetryStorage.recordLatency(MethodEnum.TREATMENTS, 500l * 1000); telemetryStorage.recordLatency(MethodEnum.TREATMENT_WITH_CONFIG, 800l * 1000); telemetryStorage.recordLatency(MethodEnum.TREATMENTS_WITH_CONFIG, 1000l * 1000); + telemetryStorage.recordLatency(MethodEnum.TREATMENTS_BY_FLAG_SET, 1000l * 1000); + telemetryStorage.recordLatency(MethodEnum.TREATMENTS_BY_FLAG_SETS, 1000l * 1000); + telemetryStorage.recordLatency(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET, 1000l * 1000); + telemetryStorage.recordLatency(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, 1000l * 1000); telemetryStorage.recordSyncLatency(HTTPLatenciesEnum.TELEMETRY, 1500l * 1000); telemetryStorage.recordSyncLatency(HTTPLatenciesEnum.TELEMETRY, 2000l * 1000); @@ -172,6 +239,10 @@ private void populateStats(TelemetryStorage telemetryStorage) { telemetryStorage.recordException(MethodEnum.TREATMENTS); telemetryStorage.recordException(MethodEnum.TREATMENT_WITH_CONFIG); telemetryStorage.recordException(MethodEnum.TREATMENTS_WITH_CONFIG); + telemetryStorage.recordException(MethodEnum.TREATMENTS_BY_FLAG_SET); + telemetryStorage.recordException(MethodEnum.TREATMENTS_BY_FLAG_SETS); + telemetryStorage.recordException(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SET); + telemetryStorage.recordException(MethodEnum.TREATMENTS_WITH_CONFIG_BY_FLAG_SETS); telemetryStorage.recordAuthRejections(); @@ -211,6 +282,7 @@ private void populateStats(TelemetryStorage telemetryStorage) { StreamingEvent streamingEvent = new StreamingEvent(1, 290, 91218); telemetryStorage.recordStreamingEvents(streamingEvent); + telemetryStorage.recordUpdatesFromSSE(UpdatesFromSSEEnum.SPLITS); } private void populateConfig(TelemetryStorage telemetryStorage) { @@ -221,4 +293,8 @@ private void populateConfig(TelemetryStorage telemetryStorage) { telemetryStorage.recordNonReadyUsage(); } -} \ No newline at end of file + private SDKMetadata metadata() { + return new SDKMetadata("java-1.2.3", "1.2.3.4", "someIP"); + } + +} diff --git a/client/src/test/java/io/split/telemetry/synchronizer/TelemetrySyncTaskTest.java b/client/src/test/java/io/split/telemetry/synchronizer/TelemetrySyncTaskTest.java index 781d2dcf4..fbb6470b4 100644 --- a/client/src/test/java/io/split/telemetry/synchronizer/TelemetrySyncTaskTest.java +++ b/client/src/test/java/io/split/telemetry/synchronizer/TelemetrySyncTaskTest.java @@ -9,7 +9,8 @@ public class TelemetrySyncTaskTest { public void testSynchronizationTask() throws Exception { TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); Mockito.doNothing().when(telemetrySynchronizer).synchronizeStats(); - TelemetrySyncTask telemetrySyncTask = new TelemetrySyncTask(1, telemetrySynchronizer); + TelemetrySyncTask telemetrySyncTask = new TelemetrySyncTask(1, telemetrySynchronizer, null); + telemetrySyncTask.startScheduledTask(); Thread.sleep(2900); Mockito.verify(telemetrySynchronizer, Mockito.times(2)).synchronizeStats(); } @@ -18,12 +19,13 @@ public void testSynchronizationTask() throws Exception { public void testStopSynchronizationTask() throws Exception { TelemetrySynchronizer telemetrySynchronizer = Mockito.mock(TelemetryInMemorySubmitter.class); // Mockito.doNothing().when(telemetrySynchronizer).synchronizeStats(); - TelemetrySyncTask telemetrySyncTask = new TelemetrySyncTask(1, telemetrySynchronizer); + TelemetrySyncTask telemetrySyncTask = new TelemetrySyncTask(1, telemetrySynchronizer, null); + telemetrySyncTask.startScheduledTask(); Thread.sleep(2100); Mockito.verify(telemetrySynchronizer, Mockito.times(2)).synchronizeStats(); - telemetrySyncTask.stopScheduledTask(1l, 1l, 1l); + telemetrySyncTask.stopScheduledTask(); Mockito.verify(telemetrySynchronizer, Mockito.times(2)).synchronizeStats(); - Mockito.verify(telemetrySynchronizer, Mockito.times(1)).finalSynchronization(1l, 1l, 1l); + Mockito.verify(telemetrySynchronizer, Mockito.times(1)).finalSynchronization(); } } \ No newline at end of file diff --git a/client/src/test/resources/keyStore.p12 b/client/src/test/resources/keyStore.p12 new file mode 100644 index 000000000..ce2b34171 Binary files /dev/null and b/client/src/test/resources/keyStore.p12 differ diff --git a/client/src/test/resources/org/powermock/extensions/configuration.properties b/client/src/test/resources/org/powermock/extensions/configuration.properties new file mode 100644 index 000000000..a8ebaeba3 --- /dev/null +++ b/client/src/test/resources/org/powermock/extensions/configuration.properties @@ -0,0 +1 @@ +powermock.global-ignore=jdk.internal.reflect.*,javax.net.ssl.* \ No newline at end of file diff --git a/client/src/test/resources/rule_base_segments.json b/client/src/test/resources/rule_base_segments.json new file mode 100644 index 000000000..65cd9a5d8 --- /dev/null +++ b/client/src/test/resources/rule_base_segments.json @@ -0,0 +1,61 @@ +{"ff": {"d": [], "t": -1, "s": -1}, +"rbs": {"t": -1, "s": -1, "d": + [{ + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{"keys":["mauro@split.io","gaston@split.io"],"segments":[]}, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ]}, + { + "changeNumber": 5, + "name": "dependent_rbs", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded": { + "keys": [], + "segments": [] + }, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + } + } + ] + }] +}} diff --git a/client/src/test/resources/rule_base_segments2.json b/client/src/test/resources/rule_base_segments2.json new file mode 100644 index 000000000..991fa81ba --- /dev/null +++ b/client/src/test/resources/rule_base_segments2.json @@ -0,0 +1,63 @@ +{"ff": {"d": [], "t": -1, "s": -1}, +"rbs": {"t": -1, "s": -1, "d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[{"type":"rule-based", "name":"no_excludes"}] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@harness.io" + ] + } + } + ] + } + } + ] + }, + { + "changeNumber": 5, + "name": "no_excludes", + "status": "ACTIVE", + "trafficTypeName": "user", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + } +]}} diff --git a/client/src/test/resources/rule_base_segments3.json b/client/src/test/resources/rule_base_segments3.json new file mode 100644 index 000000000..f738f3f77 --- /dev/null +++ b/client/src/test/resources/rule_base_segments3.json @@ -0,0 +1,35 @@ +{"ff": {"d": [], "t": -1, "s": -1}, +"rbs": {"t": -1, "s": -1, "d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[{"type":"standard", "name":"segment1"}] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + } +]}} diff --git a/client/src/test/resources/sanitizer/sameInAddedAndRemoved.json b/client/src/test/resources/sanitizer/sameInAddedAndRemoved.json new file mode 100644 index 000000000..5e87f8515 --- /dev/null +++ b/client/src/test/resources/sanitizer/sameInAddedAndRemoved.json @@ -0,0 +1,15 @@ +{ + "name": "segment3", + "added": [ + "testo2222", + "test_string_without_attr", + "Test_Save_1", + "test_in_segment" + ], + "removed": [ + "Test_Save_1", + "test_in_segment" + ], + "since": -1, + "till": 1585948850110 +} diff --git a/client/src/test/resources/sanitizer/segmentChangeSinceTill.json b/client/src/test/resources/sanitizer/segmentChangeSinceTill.json new file mode 100644 index 000000000..e8c9b3fe0 --- /dev/null +++ b/client/src/test/resources/sanitizer/segmentChangeSinceTill.json @@ -0,0 +1,12 @@ +{ + "name": "segment3", + "added": [ + "testo2222", + "test_string_without_attr", + "Test_Save_1", + "test_in_segment" + ], + "removed": [], + "since": -3223, + "till": 0 +} \ No newline at end of file diff --git a/client/src/test/resources/sanitizer/segmentNameNull.json b/client/src/test/resources/sanitizer/segmentNameNull.json new file mode 100644 index 000000000..8db5e86ef --- /dev/null +++ b/client/src/test/resources/sanitizer/segmentNameNull.json @@ -0,0 +1,11 @@ +{ + "added": [ + "testo2222", + "test_string_without_attr", + "Test_Save_1", + "test_in_segment" + ], + "removed": [], + "since": -1, + "till": 1585948850110 +} \ No newline at end of file diff --git a/client/src/test/resources/sanitizer/splitChangeSplitsToSanitize.json b/client/src/test/resources/sanitizer/splitChangeSplitsToSanitize.json new file mode 100644 index 000000000..de35084ed --- /dev/null +++ b/client/src/test/resources/sanitizer/splitChangeSplitsToSanitize.json @@ -0,0 +1,132 @@ +{"ff": { + "d": [ + { + "name": "test1", + "trafficAllocation": 101, + "killed": false, + "changeNumber": 1660326991072, + "algo": 2, + "configurations": {} + }, + { + "trafficTypeName": "user", + "trafficAllocation": 100, + "trafficAllocationSeed": -670005248, + "seed": -1297078412, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1650919058695, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "admin" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "off", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + }, + { + "treatment": "V5", + "size": 0 + }, + { + "treatment": "v8", + "size": 0 + } + ], + "label": "default rule" + } + ] + } + ], + "s": -1, + "t": 1660326991072 + }, +"rbs":{ + "d": [ + {"changeNumber":5, + "name":"sample_rule_based_segment", + "status":"ACTIVE", + "trafficTypeName":"user", + "excluded":{"keys":["mauro@split.io"]} + }, + {"changeNumber":5, + "status":"ACTIVE", + "trafficTypeName":"user", + "excluded":{"keys":["mauro@split.io"],"segments":[]}, + "conditions":[ + {"conditionType":"ROLLOUT", + "matcherGroup":{"combiner":"AND", + "matchers":[ + {"keySelector":{"trafficType":"user","attribute":"email"}, + "matcherType":"ENDS_WITH", + "negate":false, + "whitelistMatcherData":{"whitelist":["@split.io"]} + } + ] + } + } + ] + } + + ], + "s": -1, + "t": -1} +} \ No newline at end of file diff --git a/client/src/test/resources/sanitizer/splitChangeTillSanitization.json b/client/src/test/resources/sanitizer/splitChangeTillSanitization.json new file mode 100644 index 000000000..5a1f806fc --- /dev/null +++ b/client/src/test/resources/sanitizer/splitChangeTillSanitization.json @@ -0,0 +1,55 @@ +{"ff": { + "d": [ + { + "trafficTypeName": "user", + "name": "test1", + "trafficAllocation": 100, + "trafficAllocationSeed": -1364119282, + "seed": -605938843, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1660326991072, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ] + } + ], + "s": 398, + "t": 0 +}, "rbs":{"d": [], "s": -1, "t": 0}} \ No newline at end of file diff --git a/client/src/test/resources/sanitizer/splitChangeWithoutSplits.json b/client/src/test/resources/sanitizer/splitChangeWithoutSplits.json new file mode 100644 index 000000000..29463bffb --- /dev/null +++ b/client/src/test/resources/sanitizer/splitChangeWithoutSplits.json @@ -0,0 +1,4 @@ +{"ff": { + "s": -1, + "t": 2434234234 +}, "rbs":{"s": -1, "t": -1}} \ No newline at end of file diff --git a/client/src/test/resources/sanitizer/splitChangerMatchersNull.json b/client/src/test/resources/sanitizer/splitChangerMatchersNull.json new file mode 100644 index 000000000..282f3b548 --- /dev/null +++ b/client/src/test/resources/sanitizer/splitChangerMatchersNull.json @@ -0,0 +1,77 @@ +{"ff": { + "d": [ + { + "name": "test1", + "trafficTypeName": "user", + "trafficAllocation": 100, + "trafficAllocationSeed": -670005248, + "seed": -1297078412, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1650919058695, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "admin" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "off", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND" + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + }, + { + "treatment": "V5", + "size": 0 + }, + { + "treatment": "v8", + "size": 0 + } + ], + "label": "default rule" + } + ] + } + ], + "s": -1, + "t": 1660326991072 +}, "rbs":{"d": [], "s": -1, "t": -1}} \ No newline at end of file diff --git a/client/src/test/resources/segmentFetcher/segment_test.json b/client/src/test/resources/segmentFetcher/segment_test.json new file mode 100644 index 000000000..add131bbb --- /dev/null +++ b/client/src/test/resources/segmentFetcher/segment_test.json @@ -0,0 +1 @@ +{"name":"segment_test","added":["user-1"],"removed":["user-2","user-3"],"since":-1,"till":-1} \ No newline at end of file diff --git a/client/src/test/resources/segment_1.json b/client/src/test/resources/segment_1.json new file mode 100644 index 000000000..5ba630266 --- /dev/null +++ b/client/src/test/resources/segment_1.json @@ -0,0 +1,12 @@ +{ + "name": "segment_1", + "added": [ + "user1", + "user2", + "user3", + "user4" + ], + "removed": [], + "since": -1, + "till": 1585948850110 +} \ No newline at end of file diff --git a/client/src/test/resources/segment_2.json b/client/src/test/resources/segment_2.json new file mode 100644 index 000000000..7e8c81e79 --- /dev/null +++ b/client/src/test/resources/segment_2.json @@ -0,0 +1,10 @@ +{ + "name": "segment_2", + "added": [ + "user1", + "user4" + ], + "removed": [], + "since": -1, + "till": 1585948850110 +} \ No newline at end of file diff --git a/client/src/test/resources/segment_test.json b/client/src/test/resources/segment_test.json new file mode 100644 index 000000000..a1458fc42 --- /dev/null +++ b/client/src/test/resources/segment_test.json @@ -0,0 +1,12 @@ +{ + "name": "segment_test", + "added": [ + "user1", + "user2", + "user3", + "user4" + ], + "removed": [], + "since": -1, + "till": 1585948850110 +} \ No newline at end of file diff --git a/client/src/test/resources/semver/between-semver.csv b/client/src/test/resources/semver/between-semver.csv new file mode 100644 index 000000000..71bdf3b24 --- /dev/null +++ b/client/src/test/resources/semver/between-semver.csv @@ -0,0 +1,18 @@ +version1,version2,version3,expected +1.1.1,2.2.2,3.3.3,true +1.1.1-rc.1,1.1.1-rc.2,1.1.1-rc.3,true +1.0.0-alpha,1.0.0-alpha.1,1.0.0-alpha.beta,true +1.0.0-alpha.1,1.0.0-alpha.beta,1.0.0-beta,true +1.0.0-alpha.beta,1.0.0-beta,1.0.0-beta.2,true +1.0.0-beta,1.0.0-beta.2,1.0.0-beta.11,true +1.0.0-beta.2,1.0.0-beta.11,1.0.0-rc.1,true +1.0.0-beta.11,1.0.0-rc.1,1.0.0,true +1.1.2,1.1.3,1.1.4,true +1.2.1,1.3.1,1.4.1,true +2.0.0,3.0.0,4.0.0,true +2.2.2,2.2.3-rc1,2.2.3,true +2.2.2,2.3.2-rc100,2.3.3,true +1.0.0-rc.1+build.1,1.2.3-beta,1.2.3-rc.1+build.123,true +3.3.3,3.3.3-alpha,3.3.4,false +2.2.2-rc.1,2.2.2+metadata,2.2.2-rc.10,false +1.1.1-rc.1,1.1.1-rc.3,1.1.1-rc.2,false \ No newline at end of file diff --git a/client/src/test/resources/semver/equal-to-semver.csv b/client/src/test/resources/semver/equal-to-semver.csv new file mode 100644 index 000000000..87d8db5ae --- /dev/null +++ b/client/src/test/resources/semver/equal-to-semver.csv @@ -0,0 +1,7 @@ +version1,version2,equals +1.1.1,1.1.1,true +1.1.1,1.1.1+metadata,false +1.1.1,1.1.1-rc.1,false +88.88.88,88.88.88,true +1.2.3----RC-SNAPSHOT.12.9.1--.12,1.2.3----RC-SNAPSHOT.12.9.1--.12,true +10.2.3-DEV-SNAPSHOT,10.2.3-SNAPSHOT-123,false \ No newline at end of file diff --git a/client/src/test/resources/semver/invalid-semantic-versions.csv b/client/src/test/resources/semver/invalid-semantic-versions.csv new file mode 100644 index 000000000..7a7f9fbcf --- /dev/null +++ b/client/src/test/resources/semver/invalid-semantic-versions.csv @@ -0,0 +1,28 @@ +invalid +1 +1.2 +1.alpha.2 ++invalid +-invalid +-invalid+invalid +-invalid.01 +alpha +alpha.beta +alpha.beta.1 +alpha.1 +alpha+beta +alpha_beta +alpha. +alpha.. +beta +-alpha. +1.2 +1.2.3.DEV +1.2-SNAPSHOT +1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788 +1.2-RC-SNAPSHOT +-1.0.3-gamma+b7718 ++justmeta +1.1.1+ +1.1.1- +#99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12 \ No newline at end of file diff --git a/client/src/test/resources/semver/semver-splits.json b/client/src/test/resources/semver/semver-splits.json new file mode 100644 index 000000000..a266c5676 --- /dev/null +++ b/client/src/test/resources/semver/semver-splits.json @@ -0,0 +1,740 @@ +{ "ff": { + "d":[ + { + "trafficTypeName":"user", + "name":"semver_between", + "trafficAllocation":100, + "trafficAllocationSeed":1068038034, + "seed":-1053389887, + "status":"ACTIVE", + "killed":false, + "defaultTreatment":"off", + "changeNumber":1675259356568, + "algo":2, + "configurations":null, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"BETWEEN_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":null, + "betweenStringMatcherData":{ + "start":"1.22.9", + "end":"2.1.0" + } + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":100 + }, + { + "treatment":"off", + "size":0 + } + ], + "label":"between semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":0 + }, + { + "treatment":"off", + "size":100 + } + ], + "label":"default rule" + } + ] + }, + { + "trafficTypeName":"user", + "name":"semver_equalto", + "trafficAllocation":100, + "trafficAllocationSeed":1068038034, + "seed":-1053389887, + "status":"ACTIVE", + "killed":false, + "defaultTreatment":"off", + "changeNumber":1675259356568, + "algo":2, + "configurations":null, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"EQUAL_TO_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":"1.22.9" + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":100 + }, + { + "treatment":"off", + "size":0 + } + ], + "label":"equal to semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":0 + }, + { + "treatment":"off", + "size":100 + } + ], + "label":"default rule" + } + ] + }, + { + "trafficTypeName":"user", + "name":"semver_greater_or_equalto", + "trafficAllocation":100, + "trafficAllocationSeed":1068038034, + "seed":-1053389887, + "status":"ACTIVE", + "killed":false, + "defaultTreatment":"off", + "changeNumber":1675259356568, + "algo":2, + "configurations":null, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"GREATER_THAN_OR_EQUAL_TO_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":"1.22.9" + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":100 + }, + { + "treatment":"off", + "size":0 + } + ], + "label":"greater than or equal to semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":0 + }, + { + "treatment":"off", + "size":100 + } + ], + "label":"default rule" + } + ] + }, + { + "trafficTypeName":"user", + "name":"semver_inlist", + "trafficAllocation":100, + "trafficAllocationSeed":1068038034, + "seed":-1053389887, + "status":"ACTIVE", + "killed":false, + "defaultTreatment":"off", + "changeNumber":1675259356568, + "algo":2, + "configurations":null, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"IN_LIST_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":{ + "whitelist":[ + "1.22.9", + "2.1.0" + ] + }, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":null, + "betweenStringMatcherData":null + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":100 + }, + { + "treatment":"off", + "size":0 + } + ], + "label":"in list semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":0 + }, + { + "treatment":"off", + "size":100 + } + ], + "label":"default rule" + } + ] + }, + { + "trafficTypeName":"user", + "name":"semver_less_or_equalto", + "trafficAllocation":100, + "trafficAllocationSeed":1068038034, + "seed":-1053389887, + "status":"ACTIVE", + "killed":false, + "defaultTreatment":"off", + "changeNumber":1675259356568, + "algo":2, + "configurations":null, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"LESS_THAN_OR_EQUAL_TO_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":"1.22.9" + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":100 + }, + { + "treatment":"off", + "size":0 + } + ], + "label":"less than or equal to semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":0 + }, + { + "treatment":"off", + "size":100 + } + ], + "label":"default rule" + } + ] + } + ], + "s":-1, + "t":1675259356568}, + "rbs": { + "t": 1675259356568, + "s": -1, + "d": [ + { + "trafficTypeName":"user", + "name":"rbs_semver_between", + "status":"ACTIVE", + "changeNumber":1675259356568, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"BETWEEN_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":null, + "betweenStringMatcherData":{ + "start":"1.22.9", + "end":"2.1.0" + } + } + ] + }, + "label":"between semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + } + ] + }, + "label":"default rule" + } + ], + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + } + }, + { + "name":"rbs_semver_equalto", + "status":"ACTIVE", + "changeNumber":1675259356568, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"EQUAL_TO_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":"1.22.9" + } + ] + }, + "label":"equal to semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + } + ] + }, + "label":"default rule" + } + ], + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + } + }, + { + "name":"rbs_semver_greater_or_equalto", + "status":"ACTIVE", + "defaultTreatment":"off", + "changeNumber":1675259356568, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[{ + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"GREATER_THAN_OR_EQUAL_TO_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":"1.22.9" + }]}, + "label":"greater than or equal to semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + } + ] + }, + "label":"default rule" + } + ], + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + } + }, + { + "trafficTypeName":"user", + "name":"rbs_semver_inlist", + "status":"ACTIVE", + "changeNumber":1675259356568, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"IN_LIST_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":{ + "whitelist":[ + "1.22.9", + "2.1.0" + ] + }, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":null, + "betweenStringMatcherData":null + }]}, + "label":"in list semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + } + ]}, + "label":"default rule" + }], + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + } + }, + { + "trafficTypeName":"user", + "name":"rbs_semver_less_or_equalto", + "trafficAllocation":100, + "trafficAllocationSeed":1068038034, + "seed":-1053389887, + "status":"ACTIVE", + "killed":false, + "defaultTreatment":"off", + "changeNumber":1675259356568, + "algo":2, + "configurations":null, + "conditions":[ + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":"version" + }, + "matcherType":"LESS_THAN_OR_EQUAL_TO_SEMVER", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "dependencyMatcherData":null, + "booleanMatcherData":null, + "stringMatcherData":"1.22.9" + }]}, + "label":"less than or equal to semver" + }, + { + "conditionType":"ROLLOUT", + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "keySelector":{ + "trafficType":"user", + "attribute":null + }, + "matcherType":"ALL_KEYS", + "negate":false, + "userDefinedSegmentMatcherData":null, + "whitelistMatcherData":null, + "unaryNumericMatcherData":null, + "betweenMatcherData":null, + "booleanMatcherData":null, + "dependencyMatcherData":null, + "stringMatcherData":null + }]}, + "label":"default rule" + }], + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + } + }] + } +} \ No newline at end of file diff --git a/client/src/test/resources/semver/valid-semantic-versions.csv b/client/src/test/resources/semver/valid-semantic-versions.csv new file mode 100644 index 000000000..f491e77f2 --- /dev/null +++ b/client/src/test/resources/semver/valid-semantic-versions.csv @@ -0,0 +1,25 @@ +higher,lower +1.1.2,1.1.1 +1.0.0,1.0.0-rc.1 +1.1.0-rc.1,1.0.0-beta.11 +1.0.0-beta.11,1.0.0-beta.2 +1.0.0-beta.2,1.0.0-beta +1.0.0-beta,1.0.0-alpha.beta +1.0.0-alpha.beta,1.0.0-alpha.1 +1.0.0-alpha.1,1.0.0-alpha +2.2.2-rc.2+metadata-lalala,2.2.2-rc.1.2 +1.2.3,0.0.4 +1.1.2+meta,1.1.2-prerelease+meta +1.0.0-beta,1.0.0-alpha +1.0.0-alpha0.valid,1.0.0-alpha.0valid +1.0.0-rc.1+build.1,1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay +10.2.3-DEV-SNAPSHOT,1.2.3-SNAPSHOT-123 +1.1.1-rc2,1.0.0-0A.is.legal +1.2.3----RC-SNAPSHOT.12.9.1--.12+788,1.2.3----R-S.12.9.1--.12+meta +1.2.3----RC-SNAPSHOT.12.9.1--.12.88,1.2.3----RC-SNAPSHOT.12.9.1--.12 +9223372036854775807.9223372036854775807.9223372036854775807,9223372036854775807.9223372036854775807.9223372036854775806 +1.1.1-alpha.beta.rc.build.java.pr.support.10,1.1.1-alpha.beta.rc.build.java.pr.support +1.1.2,1.1.1 +1.2.1,1.1.1 +2.1.1,1.1.1 +1.1.1-rc.1,1.1.1-rc.0 \ No newline at end of file diff --git a/client/src/test/resources/split-change-special-characters.json b/client/src/test/resources/split-change-special-characters.json index 276ab19ee..ae99d7a7f 100644 --- a/client/src/test/resources/split-change-special-characters.json +++ b/client/src/test/resources/split-change-special-characters.json @@ -1,5 +1,5 @@ -{ - "splits": [ +{ "ff": { + "d": [ { "trafficTypeName": "user", "name": "DEMO_MURMUR2", @@ -10,6 +10,10 @@ "killed": false, "defaultTreatment": "of", "changeNumber": 1491244291288, + "sets": [ + "set1", + "set2" + ], "algo": 2, "configurations": { "on": "{\"test\": \"blue\",\"grüne Straße\": 13}", @@ -50,6 +54,7 @@ ] } ], - "since": 1491244291288, - "till": 1491244291288 + "s": 1491244291288, + "t": 1491244291288}, + "rbs": {"d": [], "s": -1, "t": -1} } diff --git a/client/src/test/resources/splitFetcher/test_0.json b/client/src/test/resources/splitFetcher/test_0.json new file mode 100644 index 000000000..82e6bbad5 --- /dev/null +++ b/client/src/test/resources/splitFetcher/test_0.json @@ -0,0 +1,4 @@ +{"ff": {"d": +[{"trafficTypeName":"user","name":"SPLIT_1","trafficAllocation":100,"trafficAllocationSeed":-1780071202,"seed":-1442762199,"status":"ACTIVE","killed":false,"defaultTreatment":"off","changeNumber":1675443537882,"algo":2,"configurations":{},"conditions":[{"conditionType":"ROLLOUT","matcherGroup":{"combiner":"AND","matchers":[{"keySelector":{"trafficType":"user","attribute":null},"matcherType":"ALL_KEYS","negate":false,"userDefinedSegmentMatcherData":null,"whitelistMatcherData":null,"unaryNumericMatcherData":null,"betweenMatcherData":null,"booleanMatcherData":null,"dependencyMatcherData":null,"stringMatcherData":null}]},"partitions":[{"treatment":"on","size":0},{"treatment":"off","size":100}],"label":"default rule"}]},{"trafficTypeName":"user","name":"SPLIT_2","trafficAllocation":100,"trafficAllocationSeed":-1780071202,"seed":-1442762199,"status":"ACTIVE","killed":false,"defaultTreatment":"off","changeNumber":1675443537882,"algo":2,"configurations":{},"conditions":[{"conditionType":"ROLLOUT","matcherGroup":{"combiner":"AND","matchers":[{"keySelector":{"trafficType":"user","attribute":null},"matcherType":"ALL_KEYS","negate":false,"userDefinedSegmentMatcherData":null,"whitelistMatcherData":null,"unaryNumericMatcherData":null,"betweenMatcherData":null,"booleanMatcherData":null,"dependencyMatcherData":null,"stringMatcherData":null}]},"partitions":[{"treatment":"on","size":0},{"treatment":"off","size":100}],"label":"default rule"}]}], + "since":-1,"till":-1 +}, "rbs":{"d": [], "s": -1, "t": -1}} \ No newline at end of file diff --git a/client/src/test/resources/split_compact.yml b/client/src/test/resources/split_compact.yml new file mode 100644 index 000000000..bff410b0b --- /dev/null +++ b/client/src/test/resources/split_compact.yml @@ -0,0 +1,3 @@ +- split_1: {keys: user_c, treatment: 'off', config: '{ "size" : 10 }'} +- split_1: {keys: user_d, treatment: 'on'} +- split_2: {keys: user_e, treatment: 'off', config: '{ "size" : 55 }'} \ No newline at end of file diff --git a/client/src/test/resources/split_init.json b/client/src/test/resources/split_init.json new file mode 100644 index 000000000..01d57975a --- /dev/null +++ b/client/src/test/resources/split_init.json @@ -0,0 +1,605 @@ +{"ff": { + "d": [ + { + "trafficTypeName": "user", + "name": "split_1", + "trafficAllocation": 100, + "trafficAllocationSeed": -1364119282, + "seed": -605938843, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1660326991072, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 50 + }, + { + "treatment": "off", + "size": 50 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_2", + "trafficAllocation": 100, + "trafficAllocationSeed": -92391491, + "seed": -1769377604, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1651003069855, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "admin", + "user_1", + "user_2" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "v5", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "IN_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "segment_1" + }, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + }, + { + "treatment": "V4", + "size": 0 + }, + { + "treatment": "v5", + "size": 0 + } + ], + "label": "in segment segment_1" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + }, + { + "treatment": "V4", + "size": 0 + }, + { + "treatment": "v5", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_3", + "trafficAllocation": 100, + "trafficAllocationSeed": -670005248, + "seed": -1297078412, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1650919058695, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "admin" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "off", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + }, + { + "treatment": "V5", + "size": 0 + }, + { + "treatment": "v8", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_4", + "trafficAllocation": 50, + "trafficAllocationSeed": -1520910077, + "seed": -1785086567, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1647274074042, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_5", + "trafficAllocation": 100, + "trafficAllocationSeed": -3629915, + "seed": 816031817, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1622494310037, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "seba", + "tincho" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "user_3" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "off", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_6", + "trafficAllocation": 100, + "trafficAllocationSeed": -970151859, + "seed": -1258287669, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1605020019151, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "admin" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_7", + "trafficAllocation": 100, + "trafficAllocationSeed": 291807630, + "seed": -134149800, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1603461301902, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ] + } + ], + "s": -1, + "t": 1660326991072 +}, "rbs":{"d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "IN_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "segment_2" + }, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + } + } + ] + } +], "s": -1, "t": -1}} \ No newline at end of file diff --git a/client/src/test/resources/split_old_spec.json b/client/src/test/resources/split_old_spec.json new file mode 100644 index 000000000..66a05ca89 --- /dev/null +++ b/client/src/test/resources/split_old_spec.json @@ -0,0 +1,566 @@ +{"splits": [ + { + "trafficTypeName": "user", + "name": "split_1", + "trafficAllocation": 100, + "trafficAllocationSeed": -1364119282, + "seed": -605938843, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1660326991072, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 50 + }, + { + "treatment": "off", + "size": 50 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_2", + "trafficAllocation": 100, + "trafficAllocationSeed": -92391491, + "seed": -1769377604, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1651003069855, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "admin", + "user_1", + "user_2" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "v5", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "IN_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "segment_1" + }, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + }, + { + "treatment": "V4", + "size": 0 + }, + { + "treatment": "v5", + "size": 0 + } + ], + "label": "in segment segment_1" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + }, + { + "treatment": "V4", + "size": 0 + }, + { + "treatment": "v5", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_3", + "trafficAllocation": 100, + "trafficAllocationSeed": -670005248, + "seed": -1297078412, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1650919058695, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "admin" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "off", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + }, + { + "treatment": "V5", + "size": 0 + }, + { + "treatment": "v8", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_4", + "trafficAllocation": 50, + "trafficAllocationSeed": -1520910077, + "seed": -1785086567, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1647274074042, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_5", + "trafficAllocation": 100, + "trafficAllocationSeed": -3629915, + "seed": 816031817, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1622494310037, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "seba", + "tincho" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "user_3" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "off", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_6", + "trafficAllocation": 100, + "trafficAllocationSeed": -970151859, + "seed": -1258287669, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1605020019151, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "admin" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "split_7", + "trafficAllocation": 100, + "trafficAllocationSeed": 291807630, + "seed": -134149800, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1603461301902, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ] + } + ], + "since": -1, + "till": 1660326991072 +} \ No newline at end of file diff --git a/client/src/test/resources/splits.json b/client/src/test/resources/splits.json index de9696b4e..da2654f1b 100644 --- a/client/src/test/resources/splits.json +++ b/client/src/test/resources/splits.json @@ -1,5 +1,5 @@ -{ - "splits": [ +{"ff": { + "d": [ { "trafficTypeName": "user", "name": "push_test", @@ -224,8 +224,113 @@ "label": "default label" } ] + }, + { + "changeNumber": 10, + "trafficTypeName": "user", + "name": "rbs_flag", + "trafficAllocation": 100, + "trafficAllocationSeed": 1828377380, + "seed": -286617921, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "algo": 2, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "ALL_KEYS", + "negate": false + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ], + "configurations": {}, + "sets": [], + "impressionsDisabled": false } ], - "since": -1, - "till": 1585948850109 + "s": -1, + "t": 1585948850109 +}, "rbs":{"d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + }], "s": -1, "t": 1585948850109} } diff --git a/client/src/test/resources/splits2.json b/client/src/test/resources/splits2.json index a01787d4a..afbc92992 100644 --- a/client/src/test/resources/splits2.json +++ b/client/src/test/resources/splits2.json @@ -1,5 +1,5 @@ -{ - "splits": [ +{"ff": { + "d": [ { "trafficTypeName": "user", "name": "push_test", @@ -115,6 +115,6 @@ ] } ], - "since": 1585948850110, - "till": 1585948850111 -} \ No newline at end of file + "s": 1585948850110, + "t": 1585948850111 +}, "rbs":{"d": [], "s": 1585948850110, "t": 1585948850111}} \ No newline at end of file diff --git a/client/src/test/resources/splits_imp_toggle.json b/client/src/test/resources/splits_imp_toggle.json new file mode 100644 index 000000000..18c5b0aaa --- /dev/null +++ b/client/src/test/resources/splits_imp_toggle.json @@ -0,0 +1,156 @@ +{ + "ff": { + "d": [ + { + "trafficTypeName": "user", + "name": "without_impression_toggle", + "trafficAllocation": 24, + "trafficAllocationSeed": -172559061, + "seed": -906334215, + "status": "ACTIVE", + "killed": true, + "defaultTreatment": "off", + "changeNumber": 1585948717645, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "impression_toggle_on", + "trafficAllocation": 24, + "trafficAllocationSeed": -172559061, + "seed": -906334215, + "status": "ACTIVE", + "killed": true, + "defaultTreatment": "off", + "changeNumber": 1585948717645, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ], + "impressionsDisabled": false + }, + { + "trafficTypeName": "user", + "name": "impression_toggle_off", + "trafficAllocation": 24, + "trafficAllocationSeed": -172559061, + "seed": -906334215, + "status": "ACTIVE", + "killed": true, + "defaultTreatment": "off", + "changeNumber": 1585948717645, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ], + "impressionsDisabled": true + } + ], + "s": -1, + "t": 1602796638344 + }, "rbs": {"s": -1, "t": -1, "d": []}} diff --git a/client/src/test/resources/splits_killed.json b/client/src/test/resources/splits_killed.json index 13eed1a6c..6924afc67 100644 --- a/client/src/test/resources/splits_killed.json +++ b/client/src/test/resources/splits_killed.json @@ -1,5 +1,5 @@ -{ - "splits": [ +{"ff": { + "d": [ { "trafficTypeName": "user", "name": "push_test", @@ -86,6 +86,6 @@ ] } ], - "since": 1585948850111, - "till": 1585948850112 -} \ No newline at end of file + "s": 1585948850111, + "t": 1585948850112 +}, "rbs":{"d": [], "s": 1585948850111, "t": 1585948850112}} \ No newline at end of file diff --git a/client/src/test/resources/splits_localhost.json b/client/src/test/resources/splits_localhost.json new file mode 100644 index 000000000..6f4abdb29 --- /dev/null +++ b/client/src/test/resources/splits_localhost.json @@ -0,0 +1,295 @@ +{"ff": { + "d": [ + { + "trafficTypeName": "user", + "name": "push_test", + "trafficAllocation": 100, + "trafficAllocationSeed": -2092979940, + "seed": 105482719, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "on_default", + "changeNumber": 1585948850109, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "admin", + "mauro" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on_whitelist", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + }, + { + "treatment": "V1", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "tinchotest", + "trafficAllocation": 24, + "trafficAllocationSeed": -172559061, + "seed": -906334215, + "status": "ACTIVE", + "killed": true, + "defaultTreatment": "off", + "changeNumber": 1585948717645, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "trafficTypeName": "user", + "name": "test_split", + "trafficAllocation": 100, + "trafficAllocationSeed": 1582960494, + "seed": 1842944006, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "changeNumber": 1582741588594, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ] + }, + { + "changeNumber": 10, + "trafficTypeName": "user", + "name": "rbs_flag", + "trafficAllocation": 100, + "trafficAllocationSeed": 1828377380, + "seed": -286617921, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "algo": 2, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "ALL_KEYS", + "negate": false + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ], + "configurations": {}, + "sets": [], + "impressionsDisabled": false + } + ], + "s": -1, + "t": -1 +}, "rbs":{"d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + }], "s": -1, "t": -1} +} diff --git a/client/src/test/resources/splits_prereq.json b/client/src/test/resources/splits_prereq.json new file mode 100644 index 000000000..5efa7feda --- /dev/null +++ b/client/src/test/resources/splits_prereq.json @@ -0,0 +1,293 @@ +{"ff": { + "d": [ + { + "trafficTypeName": "user", + "name": "test_prereq", + "prerequisites": [ + { "n": "feature_segment", "ts": ["off", "def_test"] }, + { "n": "rbs_flag", "ts": ["on"] } + ], + "trafficAllocation": 100, + "trafficAllocationSeed": 1582960494, + "seed": 1842944006, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "def_treatment", + "changeNumber": 1582741588594, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "name":"feature_segment", + "trafficTypeId":"u", + "trafficTypeName":"User", + "trafficAllocation": 100, + "trafficAllocationSeed": 1582960494, + "seed":-1177551240, + "status":"ACTIVE", + "killed":false, + "defaultTreatment":"def_test", + "changeNumber": 1582741588594, + "algo": 2, + "configurations": {}, + "conditions":[ + { + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "matcherType":"IN_SEGMENT", + "negate":false, + "userDefinedSegmentMatcherData":{ + "segmentName":"segment-test" + }, + "whitelistMatcherData":null + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":100 + }, + { + "treatment":"off", + "size":0 + } + ], + "label": "default label" + } + ] + }, + { + "changeNumber": 10, + "trafficTypeName": "user", + "name": "rbs_flag", + "trafficAllocation": 100, + "trafficAllocationSeed": 1828377380, + "seed": -286617921, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "algo": 2, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "ALL_KEYS", + "negate": false + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ], + "configurations": {}, + "sets": [], + "impressionsDisabled": false + }, + { + "trafficTypeName": "user", + "name": "prereq_chain", + "prerequisites": [ + { "n": "test_prereq", "ts": ["on"] } + ], + "trafficAllocation": 100, + "trafficAllocationSeed": -2092979940, + "seed": 105482719, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "on_default", + "changeNumber": 1585948850109, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "bilal@split.io" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on_whitelist", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + }, + { + "treatment": "V1", + "size": 0 + } + ], + "label": "default rule" + } + ] + } + ], + "s": -1, + "t": 1585948850109 +}, "rbs":{"d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + }], "s": -1, "t": 1585948850109} +} diff --git a/client/src/test/resources/streaming-auth-push-disabled-empty-token.json b/client/src/test/resources/streaming-auth-push-disabled-empty-token.json new file mode 100644 index 000000000..d40fd01c9 --- /dev/null +++ b/client/src/test/resources/streaming-auth-push-disabled-empty-token.json @@ -0,0 +1 @@ +{"pushEnabled":false,"token":""} \ No newline at end of file diff --git a/okhttp-modules/pom.xml b/okhttp-modules/pom.xml new file mode 100644 index 000000000..3753050e0 --- /dev/null +++ b/okhttp-modules/pom.xml @@ -0,0 +1,92 @@ + + + + java-client-parent + io.split.client + 4.18.2 + + 4.0.0 + 4.18.2 + okhttp-modules + jar + http-modules + Alternative Http Modules + + + + release + + + + org.sonatype.central + central-publishing-maven-plugin + 0.8.0 + true + + false + central + false + published + + + + + + + + + com.squareup.okhttp3 + okhttp + 4.12.0 + + + com.squareup.okhttp3 + logging-interceptor + 4.12.0 + + + io.split.client + java-client + 4.14.0-rc1 + compile + + + org.apache.httpcomponents.client5 + httpclient5 + 5.5 + + + + junit + junit + test + + + org.mockito + mockito-core + 1.10.19 + test + + + org.powermock + powermock-module-junit4 + 1.7.4 + test + + + org.powermock + powermock-api-mockito + 1.7.4 + test + + + com.squareup.okhttp3 + mockwebserver + 4.8.0 + test + + + + diff --git a/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/HTTPKerberosAuthInterceptor.java b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/HTTPKerberosAuthInterceptor.java new file mode 100644 index 000000000..26bd23ea5 --- /dev/null +++ b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/HTTPKerberosAuthInterceptor.java @@ -0,0 +1,273 @@ +package io.split.httpmodules.okhttp; + +import java.io.IOException; +import java.util.Map; +import java.util.Date; +import java.util.Set; +import java.util.Base64; + +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.security.Principal; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; +import javax.security.auth.Subject; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.Configuration; +import javax.security.auth.kerberos.KerberosTicket; + +import org.ietf.jgss.GSSContext; +import org.ietf.jgss.GSSCredential; +import org.ietf.jgss.GSSException; +import org.ietf.jgss.GSSManager; +import org.ietf.jgss.GSSName; +import org.ietf.jgss.Oid; + +import okhttp3.Request; +import okhttp3.Response; +import okhttp3.Authenticator; +import okhttp3.Route; + +/** + * + * An HTTP Request interceptor that modifies the request headers to enable + * Kerberos authentication. It appends the Kerberos authentication token to the + * 'Authorization' request header for Kerberos authentication + * + * Copyright 2024 MarkLogic Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +public class HTTPKerberosAuthInterceptor implements Authenticator { + String host; + Map krbOptions; + LoginContext loginContext; + public HTTPKerberosAuthInterceptor(String host, Map krbOptions) throws IOException { + this.host = host; + this.krbOptions = krbOptions; + try { + buildSubjectCredentials(); + } catch (LoginException e) { + throw new IOException(e.getMessage(), e); + } + } + + /** + * Class to create Kerberos Configuration object which specifies the Kerberos + * Login Module to be used for authentication. + * + */ + protected static class KerberosLoginConfiguration extends Configuration { + Map krbOptions = null; + + public KerberosLoginConfiguration() {} + + KerberosLoginConfiguration(Map krbOptions) { + + this.krbOptions = krbOptions; + } + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(String name) { + if (krbOptions == null) { + throw new IllegalStateException("Cannot create AppConfigurationEntry without Kerberos Options"); + } + return new AppConfigurationEntry[] { new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule", + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, krbOptions) }; + } + } + + /** + * This method checks the validity of the TGT in the cache and build the + * Subject inside the LoginContext using Krb5LoginModule and the TGT cached by + * the Kerberos client. It assumes that a valid TGT is already present in the + * kerberos client's cache. + * + * @throws LoginException + */ + protected void buildSubjectCredentials() throws LoginException { + Subject subject = new Subject(); + /** + * We are not getting the TGT from KDC here. The actual TGT is got from the + * KDC using kinit or equivalent but we use the cached TGT in order to build + * the LoginContext and populate the TGT inside the Subject using + * Krb5LoginModule + */ + + LoginContext lc = getLoginContext(subject); + lc.login(); + loginContext = lc; + } + + protected LoginContext getLoginContext(Subject subject) throws LoginException { + return new LoginContext("Krb5LoginContext", subject, null, + (krbOptions != null) ? new KerberosLoginConfiguration(krbOptions) : new KerberosLoginConfiguration()); + } + /** + * This method is responsible for getting the client principal name from the + * subject's principal set + * + * @return String the Kerberos principal name populated in the subject + * @throws IllegalStateException if there is more than 0 or more than 1 + * principal is present + */ + protected String getClientPrincipalName() { + final Set principalSet = getContextSubject().getPrincipals(); + if (principalSet.size() != 1) + throw new IllegalStateException( + "Only one principal is expected. Found 0 or more than one principals :" + principalSet); + return principalSet.iterator().next().getName(); + } + + protected Subject getContextSubject() { + Subject subject = loginContext.getSubject(); + if (subject == null) + throw new IllegalStateException("Kerberos login context without subject"); + return subject; + } + + protected CreateAuthorizationHeaderAction getAuthorizationHeaderAction(String clientPrincipal, + String serverPrincipalName) { + return new CreateAuthorizationHeaderAction(clientPrincipal, + serverPrincipalName); + } + + /** + * This method builds the Authorization header for Kerberos. It + * generates a request token based on the service ticket, client principal name and + * time-stamp + * + * @param serverPrincipalName + * the name registered with the KDC of the service for which we + * need to authenticate + * @return the HTTP Authorization header token + */ + protected String buildAuthorizationHeader(String serverPrincipalName) throws LoginException, PrivilegedActionException { + /* + * Get the principal from the Subject's private credentials and populate the + * client and server principal name for the GSS API + */ + final String clientPrincipal = getClientPrincipalName(); + final CreateAuthorizationHeaderAction action = getAuthorizationHeaderAction(clientPrincipal, + serverPrincipalName); + + /* + * Check if the TGT in the Subject's private credentials are valid. If + * valid, then we use the TGT in the Subject's private credentials. If not, + * we build the Subject's private credentials again from valid TGT in the + * Kerberos client cache. + */ + Set privateCreds = getContextSubject().getPrivateCredentials(); + for (Object privateCred : privateCreds) { + if (privateCred instanceof KerberosTicket) { + String serverPrincipalTicketName = ((KerberosTicket) privateCred).getServer().getName(); + if ((serverPrincipalTicketName.startsWith("krbtgt")) + && ((KerberosTicket) privateCred).getEndTime().compareTo(new Date()) < 0) { + buildSubjectCredentials(); + break; + } + } + } + + /* + * Subject.doAs takes in the Subject context and the action to be run as + * arguments. This method executes the action as the Subject given in the + * argument. We do this in order to provide the Subject's context so that we + * reuse the service ticket which will be populated in the Subject rather + * than getting the service ticket from the KDC for each request. The GSS + * API populates the service ticket in the Subject and reuses it + * + */ + Subject.doAs(loginContext.getSubject(), action); + return action.getNegotiateToken(); + } + + /** + * Creates a privileged action which will be executed as the Subject using + * Subject.doAs() method. We do this in order to create a context of the user + * who has the service ticket and reuse this context for subsequent requests + */ + protected static class CreateAuthorizationHeaderAction implements PrivilegedExceptionAction { + String clientPrincipalName; + String serverPrincipalName; + + private StringBuilder outputToken = new StringBuilder(); + + protected CreateAuthorizationHeaderAction(final String clientPrincipalName, final String serverPrincipalName) { + this.clientPrincipalName = clientPrincipalName; + this.serverPrincipalName = serverPrincipalName; + } + + protected String getNegotiateToken() { + return outputToken.toString(); + } + + /* + * Here GSS API takes care of getting the service ticket from the Subject + * cache or by using the TGT information populated in the subject which is + * done by buildSubjectCredentials method. The service ticket received is + * populated in the subject's private credentials along with the TGT + * information since we will be executing this method as the Subject. For + * subsequent requests, the cached service ticket will be re-used. For this + * to work the System property javax.security.auth.useSubjectCredsOnly must + * be set to true. + */ + @Override + public Object run() throws KerberosAuthException { + try { + Oid krb5Mechanism = new Oid("1.2.840.113554.1.2.2"); + Oid krb5PrincipalNameType = new Oid("1.2.840.113554.1.2.2.1"); + final GSSManager manager = GSSManager.getInstance(); + final GSSName clientName = manager.createName(clientPrincipalName, krb5PrincipalNameType); + final GSSCredential clientCred = manager.createCredential(clientName, 8 * 3600, krb5Mechanism, + GSSCredential.INITIATE_ONLY); + final GSSName serverName = manager.createName(serverPrincipalName, krb5PrincipalNameType); + + final GSSContext context = manager.createContext(serverName, krb5Mechanism, clientCred, + GSSContext.DEFAULT_LIFETIME); + byte[] inToken = new byte[0]; + byte[] outToken = context.initSecContext(inToken, 0, inToken.length); + if (outToken == null) { + throw new IOException("could not initialize the security context"); + } + context.requestMutualAuth(true); + outputToken.append(new String(Base64.getEncoder().encode(outToken))); + context.dispose(); + } catch (GSSException | IOException exception) { + throw new KerberosAuthException(exception.getMessage(), exception); + } + return null; + } + } + + /* + * The server principal name which we pass as an argument to + * buildAuthorizationHeader method would always start with 'HTTP/' because we + * create the principal name for the Marklogic server starting with 'HTTP/' + * followed by the host name as mentioned in the External + * Security Guide. + */ + @Override public Request authenticate(Route route, Response response) throws IOException { + String authValue; + try { + authValue = "Negotiate " + buildAuthorizationHeader("HTTP/" + host); + } catch (Exception e) { + throw new IOException(e.getMessage(), e); + } + + return response.request().newBuilder() + .header("Proxy-authorization", authValue) + .build(); + } +} diff --git a/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/KerberosAuthException.java b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/KerberosAuthException.java new file mode 100644 index 000000000..06fa2672f --- /dev/null +++ b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/KerberosAuthException.java @@ -0,0 +1,10 @@ +package io.split.httpmodules.okhttp; + +public class KerberosAuthException extends Exception { + public KerberosAuthException(String message) { + super(message); + } + public KerberosAuthException(String message, Throwable exception) { + super(message, exception); + } +} diff --git a/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/OkHttpClientImpl.java b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/OkHttpClientImpl.java new file mode 100644 index 000000000..65a59921f --- /dev/null +++ b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/OkHttpClientImpl.java @@ -0,0 +1,247 @@ +package io.split.httpmodules.okhttp; + +import io.split.client.RequestDecorator; +import io.split.client.dtos.SplitHttpResponse; +import io.split.client.utils.SDKMetadata; +import io.split.engine.common.FetchOptions; +import io.split.service.SplitHttpClient; + +import okhttp3.Authenticator; +import okhttp3.OkHttpClient; +import okhttp3.logging.HttpLoggingInterceptor; +import okhttp3.MediaType; +import okhttp3.Request; +import okhttp3.RequestBody; +import okhttp3.Response; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.Proxy; +import java.net.URI; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class OkHttpClientImpl implements SplitHttpClient { + protected OkHttpClient httpClient; + private static final Logger _log = LoggerFactory.getLogger(OkHttpClientImpl.class); + private static final String HEADER_CACHE_CONTROL_NAME = "Cache-Control"; + private static final String HEADER_CACHE_CONTROL_VALUE = "no-cache"; + private static final String HEADER_API_KEY = "Authorization"; + private static final String HEADER_CLIENT_KEY = "SplitSDKClientKey"; + private static final String HEADER_CLIENT_MACHINE_NAME = "SplitSDKMachineName"; + private static final String HEADER_CLIENT_MACHINE_IP = "SplitSDKMachineIP"; + private static final String HEADER_CLIENT_VERSION = "SplitSDKVersion"; + private String _apikey; + protected SDKMetadata _metadata; + private final RequestDecorator _decorator; + + public OkHttpClientImpl(String apiToken, SDKMetadata sdkMetadata, + Proxy proxy, String proxyAuthKerberosPrincipalName, boolean debugEnabled, + int readTimeout, int connectionTimeout, RequestDecorator decorator) throws IOException { + _apikey = apiToken; + _metadata = sdkMetadata; + _decorator = decorator; + setHttpClient(proxy, proxyAuthKerberosPrincipalName, debugEnabled, + readTimeout, connectionTimeout); + + } + protected void setHttpClient(Proxy proxy, String proxyAuthKerberosPrincipalName, boolean debugEnabled, + int readTimeout, int connectionTimeout) throws IOException { + httpClient = initializeClient(proxy, proxyAuthKerberosPrincipalName, debugEnabled, + readTimeout, connectionTimeout); + } + protected OkHttpClient initializeClient(Proxy proxy, String proxyAuthKerberosPrincipalName, boolean debugEnabled, + int readTimeout, int connectionTimeout) throws IOException { + HttpLoggingInterceptor logging = new HttpLoggingInterceptor(); + if (debugEnabled) { + logging.setLevel(HttpLoggingInterceptor.Level.HEADERS); + } else { + logging.setLevel(HttpLoggingInterceptor.Level.NONE); + } + + Map kerberosOptions = new HashMap<>(); + kerberosOptions.put("com.sun.security.auth.module.Krb5LoginModule", "required"); + kerberosOptions.put("refreshKrb5Config", "false"); + kerberosOptions.put("doNotPrompt", "false"); + kerberosOptions.put("useTicketCache", "true"); + + Authenticator proxyAuthenticator = getProxyAuthenticator(proxyAuthKerberosPrincipalName, kerberosOptions); + + return new okhttp3.OkHttpClient.Builder() + .proxy(proxy) + .readTimeout(readTimeout, TimeUnit.MILLISECONDS) + .connectTimeout(connectionTimeout, TimeUnit.MILLISECONDS) + .addInterceptor(logging) + .proxyAuthenticator(proxyAuthenticator) + .build(); + } + + public HTTPKerberosAuthInterceptor getProxyAuthenticator(String proxyKerberosPrincipalName, + Map kerberosOptions) throws IOException { + return new HTTPKerberosAuthInterceptor(proxyKerberosPrincipalName, kerberosOptions); + } + + @Override + public SplitHttpResponse get(URI uri, FetchOptions options, Map> additionalHeaders) { + try { + okhttp3.Request.Builder requestBuilder = getRequestBuilder(); + requestBuilder.url(uri.toString()); + Map> headers = mergeHeaders(buildBasicHeaders(), additionalHeaders); + Map> decorateHeaders = OkHttpRequestDecorator.decorate(headers, _decorator); + Map> finalHeaders; + if (decorateHeaders.isEmpty()) { + finalHeaders = headers; + } else { + finalHeaders = decorateHeaders; + } + for (Map.Entry> e : finalHeaders.entrySet()) { + for (String headerValue : e.getValue()) { + requestBuilder.addHeader(e.getKey(), headerValue); + } + } + + if (options.cacheControlHeadersEnabled()) { + requestBuilder.addHeader(HEADER_CACHE_CONTROL_NAME, HEADER_CACHE_CONTROL_VALUE); + } + + Request request = requestBuilder.build(); + _log.debug(String.format("Request Headers: %s", request.headers())); + + Response response = httpClient.newCall(request).execute(); + + int responseCode = response.code(); + + _log.debug(String.format("[GET] %s. Status code: %s", + request.url().toString(), + responseCode)); + + String statusMessage = ""; + if (responseCode < HttpURLConnection.HTTP_OK || responseCode >= HttpURLConnection.HTTP_MULT_CHOICE) { + _log.warn(String.format("Response status was: %s. Reason: %s", responseCode, + response.message())); + statusMessage = response.message(); + } + + String responseBody = response.body().string(); + response.close(); + + return new SplitHttpResponse(responseCode, + statusMessage, + responseBody, + getResponseHeaders(response)); + } catch (Exception e) { + throw new IllegalStateException(String.format("Problem in http get operation: %s", e), e); + } + } + + @Override + public SplitHttpResponse post(URI url, String entity, + Map> additionalHeaders) { + try { + okhttp3.Request.Builder requestBuilder = getRequestBuilder(); + requestBuilder.url(url.toString()); + Map> headers = mergeHeaders(buildBasicHeaders(), additionalHeaders); + Map> decorateHeaders = OkHttpRequestDecorator.decorate(headers, _decorator); + Map> finalHeaders; + if (decorateHeaders.isEmpty()) { + finalHeaders = headers; + } else { + finalHeaders = decorateHeaders; + } + for (Map.Entry> e : finalHeaders.entrySet()) { + for (String headerValue : e.getValue()) { + requestBuilder.addHeader(e.getKey(), headerValue); + } + } + requestBuilder.addHeader("Accept-Encoding", "gzip"); + requestBuilder.addHeader("Content-Type", "application/json"); + RequestBody postBody = RequestBody.create(MediaType.parse("application/json; charset=utf-8"), entity); + requestBuilder.post(postBody); + + Request request = requestBuilder.build(); + _log.debug(String.format("Request Headers: %s", request.headers())); + + Response response = httpClient.newCall(request).execute(); + + int responseCode = response.code(); + + _log.debug(String.format("[GET] %s. Status code: %s", + request.url().toString(), + responseCode)); + + String statusMessage = ""; + if (responseCode < HttpURLConnection.HTTP_OK || responseCode >= HttpURLConnection.HTTP_MULT_CHOICE) { + _log.warn(String.format("Response status was: %s. Reason: %s", responseCode, + response.message())); + statusMessage = response.message(); + } + response.close(); + + return new SplitHttpResponse(responseCode, statusMessage, "", getResponseHeaders(response)); + } catch (Exception e) { + throw new IllegalStateException(String.format("Problem in http post operation: %s", e), e); + } + } + + protected okhttp3.Request.Builder getRequestBuilder() { + return new okhttp3.Request.Builder(); + } + + protected Request getRequest(okhttp3.Request.Builder requestBuilder) { + return requestBuilder.build(); + } + + protected Map> buildBasicHeaders() { + Map> h = new HashMap<>(); + h.put(HEADER_API_KEY, Collections.singletonList("Bearer " + _apikey)); + h.put(HEADER_CLIENT_VERSION, Collections.singletonList(_metadata.getSdkVersion())); + h.put(HEADER_CLIENT_MACHINE_IP, Collections.singletonList(_metadata.getMachineIp())); + h.put(HEADER_CLIENT_MACHINE_NAME, Collections.singletonList(_metadata.getMachineName())); + h.put(HEADER_CLIENT_KEY, Collections.singletonList(_apikey.length() > 4 + ? _apikey.substring(_apikey.length() - 4) + : _apikey)); + return h; + } + + protected Map> mergeHeaders(Map> headers, + Map> toAdd) { + if (toAdd == null || toAdd.size() == 0) { + return headers; + } + + for (Map.Entry> entry : toAdd.entrySet()) { + headers.put(entry.getKey(), entry.getValue()); +// headers.computeIfPresent(entry.getKey(), +// (k, oldValue) -> Stream.concat(oldValue.stream(), entry.getValue().stream()) +// .collect(Collectors.toList())); + } + + return headers; + } + + protected SplitHttpResponse.Header[] getResponseHeaders(Response response) { + List responseHeaders = new ArrayList<>(); + Map> map = response.headers().toMultimap(); + for (Map.Entry> entry : map.entrySet()) { + if (entry.getKey() != null) { + responseHeaders.add(new SplitHttpResponse.Header(entry.getKey(), entry.getValue())); + } + } + return responseHeaders.toArray(new SplitHttpResponse.Header[0]); + } + + @Override + public void close() throws IOException { + httpClient.dispatcher().executorService().shutdown(); + } + +} diff --git a/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/OkHttpModule.java b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/OkHttpModule.java new file mode 100644 index 000000000..9f512874d --- /dev/null +++ b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/OkHttpModule.java @@ -0,0 +1,195 @@ +package io.split.httpmodules.okhttp; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Proxy; + +import io.split.client.RequestDecorator; +import io.split.client.utils.SDKMetadata; +import io.split.service.CustomHttpModule; + +public class OkHttpModule implements CustomHttpModule { + private static final int DEFAULT_CONNECTION_TIMEOUT = 15000; + private static final int DEFAULT_READ_TIMEOUT = 15000; + private final Boolean _debugEnabled; + private final Integer _connectionTimeout; + private final Integer _readTimeout; + private final Proxy _proxy; + private final ProxyAuthScheme _proxyAuthScheme; + private final String _proxyAuthKerberosPrincipalName; + + public static Builder builder() { + return new Builder(); + } + + private OkHttpModule(ProxyAuthScheme proxyAuthScheme, + String proxyAuthKerberosPrincipalName, + Proxy proxy, + Integer connectionTimeout, + Integer readTimeout, + Boolean debugEnabled) { + _proxyAuthScheme = proxyAuthScheme; + _proxyAuthKerberosPrincipalName = proxyAuthKerberosPrincipalName; + _proxy = proxy; + _connectionTimeout = connectionTimeout; + _readTimeout = readTimeout; + _debugEnabled = debugEnabled; + } + + @Override + public OkHttpClientImpl createClient(String apiToken, SDKMetadata sdkMetadata, RequestDecorator decorator) + throws IOException { + return new OkHttpClientImpl(apiToken, sdkMetadata, + _proxy, _proxyAuthKerberosPrincipalName, _debugEnabled, + _readTimeout, _connectionTimeout, decorator); + } + + public Proxy proxy() { + return _proxy; + } + + public ProxyAuthScheme proxyAuthScheme() { + return _proxyAuthScheme; + } + + public String proxyKerberosPrincipalName() { + return _proxyAuthKerberosPrincipalName; + } + + public Integer connectionTimeout() { + return _connectionTimeout; + } + + public Boolean debugEnabled() { + return _debugEnabled; + } + + public Integer readTimeout() { + return _readTimeout; + } + + public static final class Builder { + private Integer _connectionTimeout = DEFAULT_CONNECTION_TIMEOUT; + private Integer _readTimeout = DEFAULT_READ_TIMEOUT; + private String _proxyHost = "localhost"; + private int _proxyPort = -1; + private ProxyAuthScheme _proxyAuthScheme = null; + private String _proxyAuthKerberosPrincipalName = null; + private Boolean _debugEnabled = false; + + public Builder() { + } + + public Builder debugEnabled() { + _debugEnabled = true; + return this; + } + + /** + * The host location of the proxy. Default is localhost. + * + * @param proxyHost location of the proxy + * @return this builder + */ + public Builder proxyHost(String proxyHost) { + _proxyHost = proxyHost; + return this; + } + + /** + * The port of the proxy. Default is -1. + * + * @param proxyPort port for the proxy + * @return this builder + */ + public Builder proxyPort(int proxyPort) { + _proxyPort = proxyPort; + return this; + } + + Proxy proxy() { + if (_proxyPort != -1) { + return new Proxy(Proxy.Type.HTTP, new InetSocketAddress(_proxyHost, _proxyPort)); + } + // Default is no proxy. + return null; + } + + /** + * Authentication Scheme + * + * @param proxyAuthScheme + * @return this builder + */ + public Builder proxyAuthScheme(ProxyAuthScheme proxyAuthScheme) { + _proxyAuthScheme = proxyAuthScheme; + return this; + } + + /** + * Kerberos Principal Account Name + * + * @param proxyAuthKerberosPrincipalName + * @return this builder + */ + public Builder proxyAuthKerberosPrincipalName(String proxyAuthKerberosPrincipalName) { + _proxyAuthKerberosPrincipalName = proxyAuthKerberosPrincipalName; + return this; + } + + /** + * HTTP Connection Timeout + * + * @param connectionTimeout + * @return this builder + */ + public Builder connectionTimeout(int connectionTimeout) { + _connectionTimeout = connectionTimeout; + return this; + } + + /** + * HTTP Read Timeout + * + * @param readTimeout + * @return this builder + */ + public Builder readTimeout(int readTimeout) { + _readTimeout = readTimeout; + return this; + } + + private void verifyAuthScheme() { + if (_proxyAuthScheme == ProxyAuthScheme.KERBEROS) { + if (proxy() == null) { + throw new IllegalStateException("Kerberos mode require Proxy parameters."); + } + if (_proxyAuthKerberosPrincipalName == null) { + throw new IllegalStateException("Kerberos mode require Kerberos Principal Name."); + } + } + } + + private void verifyTimeouts() { + if (_connectionTimeout <= 0) { + _connectionTimeout = DEFAULT_CONNECTION_TIMEOUT; + } + if (_readTimeout <= 0) { + _readTimeout = DEFAULT_READ_TIMEOUT; + } + } + + public OkHttpModule build() { + verifyTimeouts(); + verifyAuthScheme(); + + return new OkHttpModule( + _proxyAuthScheme, + _proxyAuthKerberosPrincipalName, + proxy(), + _connectionTimeout, + _readTimeout, + _debugEnabled); + } + } +} diff --git a/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/OkHttpRequestDecorator.java b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/OkHttpRequestDecorator.java new file mode 100644 index 000000000..efe9b8077 --- /dev/null +++ b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/OkHttpRequestDecorator.java @@ -0,0 +1,15 @@ +package io.split.httpmodules.okhttp; + +import java.util.List; +import java.util.Map; + +import io.split.client.RequestDecorator; +import io.split.client.dtos.RequestContext; + +class OkHttpRequestDecorator { + + public static Map> decorate(Map> headers, + RequestDecorator decorator) { + return decorator.decorateHeaders(new RequestContext(headers)).headers(); + } +} diff --git a/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/ProxyAuthScheme.java b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/ProxyAuthScheme.java new file mode 100644 index 000000000..4340829a2 --- /dev/null +++ b/okhttp-modules/src/main/java/io/split/httpmodules/okhttp/ProxyAuthScheme.java @@ -0,0 +1,5 @@ +package io.split.httpmodules.okhttp; + +public enum ProxyAuthScheme { + KERBEROS +} diff --git a/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/HTTPKerberosAuthIntercepterTest.java b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/HTTPKerberosAuthIntercepterTest.java new file mode 100644 index 000000000..94fcb85a7 --- /dev/null +++ b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/HTTPKerberosAuthIntercepterTest.java @@ -0,0 +1,111 @@ +package io.split.httpmodules.okhttp; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import javax.security.auth.Subject; +import javax.security.auth.kerberos.KerberosPrincipal; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.verify; +import static org.mockito.internal.verification.VerificationModeFactory.times; +import static org.powermock.api.mockito.PowerMockito.*; + +import java.security.PrivilegedActionException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +@RunWith(PowerMockRunner.class) +@PrepareForTest(HTTPKerberosAuthInterceptor.class) +public class HTTPKerberosAuthIntercepterTest { + @Test + public void testBasicFlow() throws Exception { + System.setProperty("java.security.krb5.conf", "src/test/resources/krb5.conf"); + + HTTPKerberosAuthInterceptor kerberosAuthInterceptor = mock(HTTPKerberosAuthInterceptor.class); + LoginContext loginContext = PowerMockito.mock(LoginContext.class); + when(kerberosAuthInterceptor.getLoginContext(any())).thenReturn((loginContext)); + + doCallRealMethod().when(kerberosAuthInterceptor).buildSubjectCredentials(); + kerberosAuthInterceptor.buildSubjectCredentials(); + verify(loginContext, times(1)).login(); + + Subject subject = new Subject(); + when(loginContext.getSubject()).thenReturn(subject); + doCallRealMethod().when(kerberosAuthInterceptor).getContextSubject(); + kerberosAuthInterceptor.getContextSubject(); + verify(loginContext, times(1)).getSubject(); + + subject.getPrincipals().add(new KerberosPrincipal("bilal")); + subject.getPublicCredentials().add(new KerberosPrincipal("name")); + subject.getPrivateCredentials().add(new KerberosPrincipal("name")); + + doCallRealMethod().when(kerberosAuthInterceptor).getClientPrincipalName(); + assertThat(kerberosAuthInterceptor.getClientPrincipalName(), is(equalTo("bilal@ATHENA.MIT.EDU"))) ; + verify(loginContext, times(2)).getSubject(); + + when(kerberosAuthInterceptor.buildAuthorizationHeader(any())).thenReturn("secured-token"); + okhttp3.Request originalRequest = new okhttp3.Request.Builder().url("http://somthing").build(); + okhttp3.Response response = new okhttp3.Response.Builder().code(200).request(originalRequest). + protocol(okhttp3.Protocol.HTTP_1_1).message("ok").build(); + doCallRealMethod().when(kerberosAuthInterceptor).authenticate(null, response); + okhttp3.Request request = kerberosAuthInterceptor.authenticate(null, response); + assertThat(request.headers("Proxy-authorization"), is(equalTo(Arrays.asList("Negotiate secured-token")))); + } +/* + @Test + public void testKerberosLoginConfiguration() { + Map kerberosOptions = new HashMap(); + kerberosOptions.put("com.sun.security.auth.module.Krb5LoginModule", "required"); + kerberosOptions.put("refreshKrb5Config", "false"); + kerberosOptions.put("doNotPrompt", "false"); + kerberosOptions.put("useTicketCache", "true"); + + HTTPKerberosAuthInterceptor.KerberosLoginConfiguration kerberosConfig = new HTTPKerberosAuthInterceptor.KerberosLoginConfiguration(kerberosOptions); + AppConfigurationEntry[] appConfig = kerberosConfig.getAppConfigurationEntry(""); + assertThat("com.sun.security.auth.module.Krb5LoginModule", is(equalTo(appConfig[0].getLoginModuleName()))); + assertThat(AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, is(equalTo(appConfig[0].getControlFlag()))); + } + + @Test(expected = IllegalStateException.class) + public void testKerberosLoginConfigurationException() { + HTTPKerberosAuthInterceptor.KerberosLoginConfiguration kerberosConfig = new HTTPKerberosAuthInterceptor.KerberosLoginConfiguration(); + AppConfigurationEntry[] appConfig = kerberosConfig.getAppConfigurationEntry(""); + } +*/ + @Test + public void testBuildAuthorizationHeader() throws LoginException, PrivilegedActionException { + System.setProperty("java.security.krb5.conf", "src/test/resources/krb5.conf"); + + HTTPKerberosAuthInterceptor kerberosAuthInterceptor = mock(HTTPKerberosAuthInterceptor.class); + HTTPKerberosAuthInterceptor.CreateAuthorizationHeaderAction ahh = mock(HTTPKerberosAuthInterceptor.CreateAuthorizationHeaderAction.class); + when(ahh.getNegotiateToken()).thenReturn("secret-token"); + when(kerberosAuthInterceptor.getAuthorizationHeaderAction(any(), any())).thenReturn(ahh); + + LoginContext loginContext = PowerMockito.mock(LoginContext.class); + doCallRealMethod().when(kerberosAuthInterceptor).buildAuthorizationHeader("bilal"); + Subject subject = new Subject(); + when(loginContext.getSubject()).thenReturn(subject); + when(kerberosAuthInterceptor.getContextSubject()).thenReturn(subject); + when(kerberosAuthInterceptor.getLoginContext(subject)).thenReturn((loginContext)); + doCallRealMethod().when(kerberosAuthInterceptor).buildSubjectCredentials(); + kerberosAuthInterceptor.buildSubjectCredentials(); + + subject.getPrincipals().add(new KerberosPrincipal("bilal")); + subject.getPublicCredentials().add(new KerberosPrincipal("name")); + subject.getPrivateCredentials().add(new KerberosPrincipal("name")); + doCallRealMethod().when(kerberosAuthInterceptor).getClientPrincipalName(); + + assertThat("secret-token", is(equalTo(kerberosAuthInterceptor.buildAuthorizationHeader("bilal")))); + } +} diff --git a/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/OkHttpClientImplTest.java b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/OkHttpClientImplTest.java new file mode 100644 index 000000000..88d93333a --- /dev/null +++ b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/OkHttpClientImplTest.java @@ -0,0 +1,429 @@ +package io.split.httpmodules.okhttp; + +import org.powermock.api.mockito.PowerMockito; +import org.powermock.reflect.Whitebox; + +import io.split.client.CustomHeaderDecorator; +import io.split.client.RequestDecorator; +import io.split.client.dtos.*; +import io.split.client.impressions.Impression; +import io.split.client.utils.Json; +import io.split.client.utils.SDKMetadata; +import io.split.client.dtos.SplitHttpResponse.Header; +import io.split.engine.common.FetchOptions; + +import okhttp3.OkHttpClient; +import okhttp3.OkHttpClient.*; +import okhttp3.HttpUrl; +import okhttp3.Headers; + +import okhttp3.mockwebserver.MockResponse; +import okhttp3.mockwebserver.MockWebServer; +import okhttp3.mockwebserver.RecordedRequest; +import org.junit.Assert; +import org.junit.Test; + +import java.io.*; + +import java.net.URI; +import java.net.URISyntaxException; +import java.net.HttpURLConnection; +import java.util.*; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.mockito.Matchers.any; +import static org.powermock.api.mockito.PowerMockito.mock; + +public class OkHttpClientImplTest { + + @Test + public void testGetWithSpecialCharacters() throws IOException, InterruptedException { + MockWebServer server = new MockWebServer(); + BufferedReader br = new BufferedReader(new FileReader("src/test/resources/split-change-special-characters.json")); + String body; + try { + StringBuilder sb = new StringBuilder(); + String line = br.readLine(); + + while (line != null) { + sb.append(line); + sb.append(System.lineSeparator()); + line = br.readLine(); + } + body = sb.toString(); + } finally { + br.close(); + } + + server.enqueue(new MockResponse().setBody(body).addHeader("via", "HTTP/1.1 s_proxy_rio1")); + server.start(); + HttpUrl baseUrl = server.url("/v1/"); + URI rootTarget = baseUrl.uri(); + + OkHttpClientImpl okHttpClientImpl = mock(OkHttpClientImpl.class); + OkHttpClient client = new OkHttpClient.Builder().build(); + PowerMockito.doReturn(client).when(okHttpClientImpl).initializeClient(null, "bilal", false, + 0, 0); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).setHttpClient(null, "bilal", false, + 0, 0); + okHttpClientImpl.setHttpClient(null, "bilal", false, + 0, 0); + + Map> additionalHeaders = Collections.singletonMap("AdditionalHeader", + Collections.singletonList("add")); + FetchOptions fetchOptions = new FetchOptions.Builder().cacheControlHeaders(true).build(); + RequestDecorator requestDecorator = new RequestDecorator(null); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).get(rootTarget, fetchOptions, additionalHeaders); + okhttp3.Request.Builder requestBuilder = new okhttp3.Request.Builder(); + requestBuilder.url(rootTarget.toString()); + PowerMockito.doReturn(requestBuilder).when(okHttpClientImpl).getRequestBuilder(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).buildBasicHeaders(); + Whitebox.setInternalState(okHttpClientImpl, "_metadata", metadata()); + Whitebox.setInternalState(okHttpClientImpl, "_apikey", "qwerty"); + Whitebox.setInternalState(okHttpClientImpl, "_decorator", requestDecorator); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).mergeHeaders(buildBasicHeaders(), additionalHeaders); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).getResponseHeaders(any()); + PowerMockito.doReturn(requestBuilder.build()).when(okHttpClientImpl).getRequest(requestBuilder); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).getRequest(requestBuilder); + + SplitHttpResponse splitHttpResponse = okHttpClientImpl.get(rootTarget, fetchOptions, additionalHeaders); + + RecordedRequest request = server.takeRequest(); + server.shutdown(); + Headers requestHeaders = request.getHeaders(); + + assertThat(splitHttpResponse.statusCode(), is(equalTo(HttpURLConnection.HTTP_OK))); + Assert.assertEquals("/v1/", request.getPath()); + assertThat(requestHeaders.get("Authorization"), is(equalTo("Bearer qwerty"))) ; + assertThat(requestHeaders.get("SplitSDKClientKey"), is(equalTo("erty"))); + assertThat(requestHeaders.get("SplitSDKVersion"), is(equalTo("java-1.2.3"))); + assertThat(requestHeaders.get("SplitSDKMachineIP"), is(equalTo("1.2.3.4"))); + assertThat(requestHeaders.get("SplitSDKMachineName"), is(equalTo("someIP"))); + assertThat(requestHeaders.get("AdditionalHeader"), is(equalTo("add"))); + + SplitChange change = Json.fromJson(splitHttpResponse.body(), SplitChange.class); + + Header[] headers = splitHttpResponse.responseHeaders(); + assertThat(headers[1].getName(), is(equalTo("via"))); + assertThat(headers[1].getValues().get(0), is(equalTo("HTTP/1.1 s_proxy_rio1"))); + assertThat(splitHttpResponse.statusCode(), is(equalTo(200))); + Assert.assertNotNull(change); + Assert.assertEquals(1, change.splits.size()); + Assert.assertNotNull(change.splits.get(0)); + + Split split = change.splits.get(0); + Map configs = split.configurations; + Assert.assertEquals(2, configs.size()); + Assert.assertEquals("{\"test\": \"blue\",\"grüne Straße\": 13}", configs.get("on")); + Assert.assertEquals("{\"test\": \"blue\",\"size\": 15}", configs.get("off")); + Assert.assertEquals(2, split.sets.size()); + okHttpClientImpl.close(); + } + + @Test + public void testGetErrors() throws IOException, InterruptedException { + MockWebServer server = new MockWebServer(); + server.enqueue(new MockResponse().setBody("").setResponseCode(HttpURLConnection.HTTP_INTERNAL_ERROR)); + server.start(); + HttpUrl baseUrl = server.url("/v1/"); + URI rootTarget = baseUrl.uri(); + + OkHttpClientImpl okHttpClientImpl = mock(OkHttpClientImpl.class); + OkHttpClient client = new OkHttpClient.Builder().build(); + PowerMockito.doReturn(client).when(okHttpClientImpl).initializeClient(null, "bilal", false, + 0, 0); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).setHttpClient(null, "bilal", false, + 0, 0); + okHttpClientImpl.setHttpClient(null, "bilal", false, + 0, 0); + + Map> additionalHeaders = Collections.singletonMap("AdditionalHeader", + Collections.singletonList("add")); + FetchOptions fetchOptions = new FetchOptions.Builder().cacheControlHeaders(true).build(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).get(rootTarget, fetchOptions, additionalHeaders); + okhttp3.Request.Builder requestBuilder = new okhttp3.Request.Builder(); + requestBuilder.url(rootTarget.toString()); + PowerMockito.doReturn(requestBuilder).when(okHttpClientImpl).getRequestBuilder(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).buildBasicHeaders(); + Whitebox.setInternalState(okHttpClientImpl, "_metadata", metadata()); + Whitebox.setInternalState(okHttpClientImpl, "_apikey", "qwerty"); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).mergeHeaders(buildBasicHeaders(), additionalHeaders); + RequestDecorator requestDecorator = new RequestDecorator(null); + Whitebox.setInternalState(okHttpClientImpl, "_decorator", requestDecorator); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).getResponseHeaders(any()); + + SplitHttpResponse splitHttpResponse = okHttpClientImpl.get(rootTarget, + fetchOptions, additionalHeaders); + + RecordedRequest request = server.takeRequest(); + server.shutdown(); + assertThat(splitHttpResponse.statusCode(), is(equalTo(HttpURLConnection.HTTP_INTERNAL_ERROR))); + okHttpClientImpl.close(); + } + + @Test + public void testGetParameters() throws IOException, InterruptedException { + class MyCustomHeaders implements CustomHeaderDecorator { + public MyCustomHeaders() {} + @Override + public Map> getHeaderOverrides(RequestContext context) { + Map> additionalHeaders = context.headers(); + additionalHeaders.put("first", Arrays.asList("1")); + additionalHeaders.put("second", Arrays.asList("2.1", "2.2")); + additionalHeaders.put("third", Arrays.asList("3")); + return additionalHeaders; + } + } + MockWebServer server = new MockWebServer(); + BufferedReader br = new BufferedReader(new FileReader("src/test/resources/split-change-special-characters.json")); + String body; + try { + StringBuilder sb = new StringBuilder(); + String line = br.readLine(); + + while (line != null) { + sb.append(line); + sb.append(System.lineSeparator()); + line = br.readLine(); + } + body = sb.toString(); + } finally { + br.close(); + } + + server.enqueue(new MockResponse().setBody(body).addHeader("via", "HTTP/1.1 s_proxy_rio1")); + server.start(); + HttpUrl baseUrl = server.url("/splitChanges?since=1234567"); + URI rootTarget = baseUrl.uri(); + RequestDecorator requestDecorator = new RequestDecorator(new MyCustomHeaders()); + + OkHttpClientImpl okHttpClientImpl = mock(OkHttpClientImpl.class); + OkHttpClient client = new OkHttpClient.Builder().build(); + PowerMockito.doReturn(client).when(okHttpClientImpl).initializeClient(null, "bilal", false, + 0, 0); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).setHttpClient(null, "bilal", false, + 0, 0); + okHttpClientImpl.setHttpClient(null, "bilal", false, + 0, 0); + + FetchOptions fetchOptions = new FetchOptions.Builder().cacheControlHeaders(true).build(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).get(rootTarget, fetchOptions, null); + okhttp3.Request.Builder requestBuilder = new okhttp3.Request.Builder(); + requestBuilder.url(rootTarget.toString()); + PowerMockito.doReturn(requestBuilder).when(okHttpClientImpl).getRequestBuilder(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).buildBasicHeaders(); + Whitebox.setInternalState(okHttpClientImpl, "_metadata", metadata()); + Whitebox.setInternalState(okHttpClientImpl, "_apikey", "qwerty"); + Whitebox.setInternalState(okHttpClientImpl, "_decorator", requestDecorator); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).mergeHeaders(buildBasicHeaders(), null); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).getResponseHeaders(any()); + FetchOptions options = new FetchOptions.Builder().cacheControlHeaders(true).build(); + + SplitHttpResponse splitHttpResponse = okHttpClientImpl.get(rootTarget, options, null); + + RecordedRequest request = server.takeRequest(); + server.shutdown(); + Headers requestHeaders = request.getHeaders(); + + assertThat(requestHeaders.get("Cache-Control"), is(equalTo("no-cache"))); + assertThat(requestHeaders.get("first"), is(equalTo("1"))); + assertThat(requestHeaders.values("second"), is(equalTo(Arrays.asList("2.1","2.2")))); + assertThat(requestHeaders.get("third"), is(equalTo("3"))); + Assert.assertEquals("/splitChanges?since=1234567", request.getPath()); + assertThat(request.getMethod(), is(equalTo("GET"))); + } + + @Test(expected = IllegalStateException.class) + public void testException() throws URISyntaxException, IOException { + URI rootTarget = new URI("https://api.split.io/splitChanges?since=1234567"); + RequestDecorator requestDecorator = null; + + OkHttpClientImpl okHttpClientImpl = mock(OkHttpClientImpl.class); + OkHttpClient client = new OkHttpClient.Builder().build(); + PowerMockito.doReturn(client).when(okHttpClientImpl).initializeClient(null, "bilal", false, + 0, 0); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).setHttpClient(null, "bilal", false, + 0, 0); + okHttpClientImpl.setHttpClient(null, "bilal", false, + 0, 0); + + FetchOptions fetchOptions = new FetchOptions.Builder().cacheControlHeaders(true).build(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).get(rootTarget, fetchOptions, null); + okhttp3.Request.Builder requestBuilder = new okhttp3.Request.Builder(); + requestBuilder.url(rootTarget.toString()); + PowerMockito.doReturn(requestBuilder).when(okHttpClientImpl).getRequestBuilder(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).buildBasicHeaders(); + Whitebox.setInternalState(okHttpClientImpl, "_metadata", metadata()); + Whitebox.setInternalState(okHttpClientImpl, "_apikey", "qwerty"); + Whitebox.setInternalState(okHttpClientImpl, "_decorator", requestDecorator); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).mergeHeaders(buildBasicHeaders(), null); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).getResponseHeaders(any()); + FetchOptions options = new FetchOptions.Builder().cacheControlHeaders(true).build(); + + SplitHttpResponse splitHttpResponse = okHttpClientImpl.get(rootTarget, + new FetchOptions.Builder().cacheControlHeaders(true).build(), null); + } + + + + @Test + public void testPost() throws IOException, InterruptedException { + MockWebServer server = new MockWebServer(); + + server.enqueue(new MockResponse().addHeader("via", "HTTP/1.1 s_proxy_rio1")); + server.start(); + HttpUrl baseUrl = server.url("/impressions"); + URI rootTarget = baseUrl.uri(); + RequestDecorator requestDecorator = new RequestDecorator(null); + + OkHttpClientImpl okHttpClientImpl = mock(OkHttpClientImpl.class); + OkHttpClient client = new OkHttpClient.Builder().build(); + PowerMockito.doReturn(client).when(okHttpClientImpl).initializeClient(null, "bilal", false, + 0, 0); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).setHttpClient(null, "bilal", false, + 0, 0); + okHttpClientImpl.setHttpClient(null, "bilal", false, + 0, 0); + Map> additionalHeaders = Collections.singletonMap("SplitSDKImpressionsMode", + Collections.singletonList("OPTIMIZED")); + + okhttp3.Request.Builder requestBuilder = new okhttp3.Request.Builder(); + requestBuilder.url(rootTarget.toString()); + PowerMockito.doReturn(requestBuilder).when(okHttpClientImpl).getRequestBuilder(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).buildBasicHeaders(); + Whitebox.setInternalState(okHttpClientImpl, "_metadata", metadata()); + Whitebox.setInternalState(okHttpClientImpl, "_apikey", "qwerty"); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).mergeHeaders(buildBasicHeaders(), additionalHeaders); + Whitebox.setInternalState(okHttpClientImpl, "_decorator", requestDecorator); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).getResponseHeaders(any()); + // Send impressions + List toSend = Arrays.asList(new TestImpressions("t1", Arrays.asList( + KeyImpression.fromImpression(new Impression("k1", null, "t1", "on", 123L, "r1", 456L, null)), + KeyImpression.fromImpression(new Impression("k2", null, "t1", "on", 123L, "r1", 456L, null)), + KeyImpression.fromImpression(new Impression("k3", null, "t1", "on", 123L, "r1", 456L, null)))), + new TestImpressions("t2", Arrays.asList( + KeyImpression.fromImpression(new Impression("k1", null, "t2", "on", 123L, "r1", 456L, null)), + KeyImpression.fromImpression(new Impression("k2", null, "t2", "on", 123L, "r1", 456L, null)), + KeyImpression.fromImpression(new Impression("k3", null, "t2", "on", 123L, "r1", 456L, null))))); + String data = Json.toJson(toSend); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).post(rootTarget, data, + additionalHeaders); + + SplitHttpResponse splitHttpResponse = okHttpClientImpl.post(rootTarget, data, + additionalHeaders); + + RecordedRequest request = server.takeRequest(); + server.shutdown(); + Headers requestHeaders = request.getHeaders(); + + Assert.assertEquals("POST /impressions HTTP/1.1", request.getRequestLine()); + Assert.assertEquals(data, request.getBody().readUtf8()); + assertThat(requestHeaders.get("Authorization"), is(equalTo("Bearer qwerty"))) ; + assertThat(requestHeaders.get("SplitSDKClientKey"), is(equalTo("erty"))); + assertThat(requestHeaders.get("SplitSDKVersion"), is(equalTo("java-1.2.3"))); + assertThat(requestHeaders.get("SplitSDKMachineIP"), is(equalTo("1.2.3.4"))); + assertThat(requestHeaders.get("SplitSDKMachineName"), is(equalTo("someIP"))); + assertThat(requestHeaders.get("SplitSDKImpressionsMode"), is(equalTo("OPTIMIZED"))); + + Header[] headers = splitHttpResponse.responseHeaders(); + assertThat(headers[1].getName(), is(equalTo("via"))); + assertThat(headers[1].getValues().get(0), is(equalTo("HTTP/1.1 s_proxy_rio1"))); + assertThat(splitHttpResponse.statusCode(), is(equalTo(200))); + } + + @Test + public void testPostErrors() throws IOException, InterruptedException { + MockWebServer server = new MockWebServer(); + server.enqueue(new MockResponse().setBody("").setResponseCode(HttpURLConnection.HTTP_INTERNAL_ERROR)); + server.start(); + HttpUrl baseUrl = server.url("/v1/"); + URI rootTarget = baseUrl.uri(); + RequestDecorator requestDecorator = new RequestDecorator(null); + + OkHttpClientImpl okHttpClientImpl = mock(OkHttpClientImpl.class); + OkHttpClient client = new OkHttpClient.Builder().build(); + PowerMockito.doReturn(client).when(okHttpClientImpl).initializeClient(null, "bilal", false, + 0, 0); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).setHttpClient(null, "bilal", false, + 0, 0); + okHttpClientImpl.setHttpClient(null, "bilal", false, + 0, 0); + Map> additionalHeaders = Collections.singletonMap("SplitSDKImpressionsMode", + Collections.singletonList("OPTIMIZED")); + + okhttp3.Request.Builder requestBuilder = new okhttp3.Request.Builder(); + requestBuilder.url(rootTarget.toString()); + PowerMockito.doReturn(requestBuilder).when(okHttpClientImpl).getRequestBuilder(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).buildBasicHeaders(); + Whitebox.setInternalState(okHttpClientImpl, "_metadata", metadata()); + Whitebox.setInternalState(okHttpClientImpl, "_apikey", "qwerty"); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).mergeHeaders(buildBasicHeaders(), additionalHeaders); + Whitebox.setInternalState(okHttpClientImpl, "_decorator", requestDecorator); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).getResponseHeaders(any()); + + String data = Json.toJson("<>"); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).post(rootTarget, data, + additionalHeaders); + + SplitHttpResponse splitHttpResponse = okHttpClientImpl.post(rootTarget, data, + additionalHeaders); + + RecordedRequest request = server.takeRequest(); + server.shutdown(); + assertThat(splitHttpResponse.statusCode(), is(equalTo(HttpURLConnection.HTTP_INTERNAL_ERROR))); + okHttpClientImpl.close(); + } + + @Test(expected = IllegalStateException.class) + public void testPosttException() throws URISyntaxException, IOException { + URI rootTarget = new URI("https://kubernetesturl.com/split/api/testImpressions/bulk"); + + OkHttpClientImpl okHttpClientImpl = mock(OkHttpClientImpl.class); + OkHttpClient client = new OkHttpClient.Builder().build(); + PowerMockito.doReturn(client).when(okHttpClientImpl).initializeClient(null, "bilal", false, + 0, 0); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).setHttpClient(null, "bilal", false, + 0, 0); + okHttpClientImpl.setHttpClient(null, "bilal", false, + 0, 0); + Map> additionalHeaders = Collections.singletonMap("SplitSDKImpressionsMode", + Collections.singletonList("OPTIMIZED")); + + okhttp3.Request.Builder requestBuilder = new okhttp3.Request.Builder(); + RequestDecorator requestDecorator = new RequestDecorator(null); + requestBuilder.url(rootTarget.toString()); + PowerMockito.doReturn(requestBuilder).when(okHttpClientImpl).getRequestBuilder(); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).buildBasicHeaders(); + Whitebox.setInternalState(okHttpClientImpl, "_metadata", metadata()); + Whitebox.setInternalState(okHttpClientImpl, "_apikey", "qwerty"); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).mergeHeaders(buildBasicHeaders(), additionalHeaders); + Whitebox.setInternalState(okHttpClientImpl, "_decorator", requestDecorator); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).getResponseHeaders(any()); + + String data = Json.toJson("<>"); + PowerMockito.doCallRealMethod().when(okHttpClientImpl).post(rootTarget, data, + additionalHeaders); + + SplitHttpResponse splitHttpResponse = okHttpClientImpl.post(rootTarget, data, + additionalHeaders); + } + + private SDKMetadata metadata() { + return new SDKMetadata("java-1.2.3", "1.2.3.4", "someIP"); + } + + private Map> buildBasicHeaders() { + Map> h = new HashMap<>(); + h.put("Authorization", Collections.singletonList("Bearer qwerty")); + h.put("SplitSDKVersion", Collections.singletonList(metadata().getSdkVersion())); + h.put("SplitSDKMachineIP", Collections.singletonList(metadata().getMachineIp())); + h.put("SplitSDKMachineName", Collections.singletonList(metadata().getMachineName())); + h.put("SplitSDKClientKey", Collections.singletonList("qwerty".length() > 4 + ? "qwerty".substring("qwerty".length() - 4) + : "qwerty")); + return h; + } + +} diff --git a/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/OkHttpModuleTests.java b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/OkHttpModuleTests.java new file mode 100644 index 000000000..d8c5b5242 --- /dev/null +++ b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/OkHttpModuleTests.java @@ -0,0 +1,111 @@ +package io.split.httpmodules.okhttp; + +import io.split.client.RequestDecorator; +import io.split.client.utils.SDKMetadata; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.mockito.stubbing.Answer; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.mockito.Mockito.verify; +import static org.powermock.api.mockito.PowerMockito.mock; +import static org.powermock.api.mockito.PowerMockito.whenNew; + +@RunWith(PowerMockRunner.class) +@PrepareForTest(OkHttpModule.class) +public class OkHttpModuleTests { + @Test + public void checkProxySettings() { + OkHttpModule module = OkHttpModule.builder() + .proxyAuthScheme(ProxyAuthScheme.KERBEROS) + .proxyAuthKerberosPrincipalName("bilal@bilal") + .proxyHost("some-proxy") + .proxyPort(3128) + .build(); + Assert.assertEquals(ProxyAuthScheme.KERBEROS, module.proxyAuthScheme()); + Assert.assertEquals("bilal@bilal", module.proxyKerberosPrincipalName()); + Assert.assertEquals("HTTP @ some-proxy:3128", module.proxy().toString()); + } + + @Test + public void checkDebugLog() { + OkHttpModule module = OkHttpModule.builder() + .debugEnabled() + .build(); + Assert.assertEquals(true, module.debugEnabled()); + + module = OkHttpModule.builder() + .build(); + Assert.assertEquals(false, module.debugEnabled()); + } + + @Test + public void checkTimeouts() { + OkHttpModule module = OkHttpModule.builder() + .build(); + Assert.assertEquals(15000, (int) module.connectionTimeout()); + Assert.assertEquals(15000, (int) module.readTimeout()); + + module = OkHttpModule.builder() + .connectionTimeout(13000) + .readTimeout(14000) + .build(); + Assert.assertEquals(13000, (int) module.connectionTimeout()); + Assert.assertEquals(14000, (int) module.readTimeout()); + + module = OkHttpModule.builder() + .connectionTimeout(-1) + .readTimeout(-10) + .build(); + Assert.assertEquals(15000, (int) module.connectionTimeout()); + Assert.assertEquals(15000, (int) module.readTimeout()); + } + + @Test + public void testCreateClient() throws Exception { + OkHttpClientImpl mockclient = mock(OkHttpClientImpl.class); + AtomicBoolean argsCaptured = new AtomicBoolean(false); + + Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("some-proxy", 3128)); + String apiToken = "qwerty"; + SDKMetadata sdkMetadata = new SDKMetadata("1.1.1", "ip", "name"); + RequestDecorator requestDecorator = new RequestDecorator(null); + + whenNew(OkHttpClientImpl.class).withAnyArguments() + .then((Answer) invocationOnMock -> { + assertThat("qwerty", is(equalTo((String) invocationOnMock.getArguments()[0]))); + assertThat(sdkMetadata, is(equalTo((SDKMetadata) invocationOnMock.getArguments()[1]))); + assertThat(proxy, is(equalTo((Proxy) invocationOnMock.getArguments()[2]))); + assertThat("bilal@bilal", is(equalTo((String) invocationOnMock.getArguments()[3]))); + assertThat(false, is(equalTo((Boolean) invocationOnMock.getArguments()[4]))); + assertThat(11000, is(equalTo((Integer) invocationOnMock.getArguments()[5]))); + assertThat(12000, is(equalTo((Integer) invocationOnMock.getArguments()[6]))); + assertThat(requestDecorator, is(equalTo((RequestDecorator) invocationOnMock.getArguments()[7]))); + argsCaptured.set(true); + return mockclient; + } + ); + + OkHttpModule module = OkHttpModule.builder() + .proxyAuthScheme(ProxyAuthScheme.KERBEROS) + .proxyAuthKerberosPrincipalName("bilal@bilal") + .proxyHost("some-proxy") + .proxyPort(3128) + .connectionTimeout(12000) + .readTimeout(11000) + .build(); + + module.createClient(apiToken, sdkMetadata, requestDecorator); + assertThat(true, is(equalTo(argsCaptured.get()))); + } +} diff --git a/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/SplitConfigTests.java b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/SplitConfigTests.java new file mode 100644 index 000000000..20feddb38 --- /dev/null +++ b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/SplitConfigTests.java @@ -0,0 +1,45 @@ +package io.split.httpmodules.okhttp; + +import io.split.client.SplitClientConfig; +import org.junit.Assert; +import org.junit.Test; + +public class SplitConfigTests { + + @Test + public void checkExpectedAuthScheme() { + SplitClientConfig cfg = SplitClientConfig.builder() + .alternativeHTTPModule(OkHttpModule.builder() + .proxyAuthScheme(ProxyAuthScheme.KERBEROS) + .proxyAuthKerberosPrincipalName("bilal@bilal") + .proxyHost("some-proxy") + .proxyPort(3128) + .debugEnabled() + .build() + ) + .streamingEnabled(false) + .build(); + OkHttpModule module = (OkHttpModule) cfg.alternativeHTTPModule(); + Assert.assertEquals(ProxyAuthScheme.KERBEROS, module.proxyAuthScheme()); + Assert.assertEquals("bilal@bilal", module.proxyKerberosPrincipalName()); + Assert.assertEquals("HTTP @ some-proxy:3128", module.proxy().toString()); + + cfg = SplitClientConfig.builder() + .build(); + Assert.assertEquals(null, cfg.alternativeHTTPModule()); + } + + @Test(expected = IllegalArgumentException.class) + public void checkStreamingEnabled() { + SplitClientConfig cfg = SplitClientConfig.builder() + .alternativeHTTPModule(OkHttpModule.builder() + .proxyAuthScheme(ProxyAuthScheme.KERBEROS) + .proxyAuthKerberosPrincipalName("bilal@bilal") + .proxyHost("some-proxy") + .proxyPort(3128) + .debugEnabled() + .build()) + .streamingEnabled(true) + .build(); + } +} diff --git a/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/SplitFactoryTests.java b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/SplitFactoryTests.java new file mode 100644 index 000000000..23cf3cb53 --- /dev/null +++ b/okhttp-modules/src/test/java/io/split/httpmodules/okhttp/SplitFactoryTests.java @@ -0,0 +1,68 @@ +package io.split.httpmodules.okhttp; + +import io.split.client.*; +import io.split.client.utils.SDKMetadata; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.stubbing.Answer; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.powermock.api.mockito.PowerMockito.mock; +import static org.powermock.api.mockito.PowerMockito.whenNew; + +@RunWith(PowerMockRunner.class) +@PrepareForTest(OkHttpModule.class) +public class SplitFactoryTests { + @Test + public void testFactoryCreatingClient() throws Exception { + OkHttpClientImpl mockclient = mock(OkHttpClientImpl.class); + AtomicBoolean argsCaptured = new AtomicBoolean(false); + + Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("some-proxy", 3128)); + String apiToken = "qwerty"; + + whenNew(OkHttpClientImpl.class).withAnyArguments() + .then((Answer) invocationOnMock -> { + assertThat("qwerty", is(equalTo((String) invocationOnMock.getArguments()[0]))); + assertThat((SDKMetadata) invocationOnMock.getArguments()[1], instanceOf(SDKMetadata.class)); + assertThat(proxy, is(equalTo((Proxy) invocationOnMock.getArguments()[2]))); + assertThat("bilal@bilal", is(equalTo((String) invocationOnMock.getArguments()[3]))); + assertThat(false, is(equalTo((Boolean) invocationOnMock.getArguments()[4]))); + assertThat(11000, is(equalTo((Integer) invocationOnMock.getArguments()[5]))); + assertThat(12000, is(equalTo((Integer) invocationOnMock.getArguments()[6]))); + assertThat((RequestDecorator) invocationOnMock.getArguments()[7], instanceOf(RequestDecorator.class)); + argsCaptured.set(true); + return mockclient; + } + ); + + OkHttpModule module = OkHttpModule.builder() + .proxyAuthScheme(ProxyAuthScheme.KERBEROS) + .proxyAuthKerberosPrincipalName("bilal@bilal") + .proxyHost("some-proxy") + .proxyPort(3128) + .connectionTimeout(12000) + .readTimeout(11000) + .build(); + + SplitClientConfig cfg = SplitClientConfig.builder() + .alternativeHTTPModule(module) + .streamingEnabled(false) + .build(); + + SplitFactoryImpl factory = (SplitFactoryImpl) SplitFactoryBuilder.build(apiToken, cfg); + +// module.createClient(apiToken, sdkMetadata, requestDecorator); + assertThat(true, is(equalTo(argsCaptured.get()))); + } +} diff --git a/okhttp-modules/src/test/resources/krb5.conf b/okhttp-modules/src/test/resources/krb5.conf new file mode 100644 index 000000000..78d63ba8f --- /dev/null +++ b/okhttp-modules/src/test/resources/krb5.conf @@ -0,0 +1,37 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +[libdefaults] + kdc_realm = ATHENA.MIT.EDU + default_realm = ATHENA.MIT.EDU + kdc_tcp_port = 88 + kdc_udp_port = 88 + dns_lookup_realm = false + dns_lookup_kdc = false + udp_preference_limit = 1 + +[logging] + default = FILE:/var/logs/krb5kdc.log + +[realms] + ATHENA.MIT.EDU = { +# kdc = 10.12.4.76:88 +# kdc = tcp/10.12.4.76:88 +# kdc = tcp/192.168.1.19:88 + kdc = 192.168.1.19:88 + } \ No newline at end of file diff --git a/okhttp-modules/src/test/resources/org/powermock/extensions/configuration.properties b/okhttp-modules/src/test/resources/org/powermock/extensions/configuration.properties new file mode 100644 index 000000000..a8ebaeba3 --- /dev/null +++ b/okhttp-modules/src/test/resources/org/powermock/extensions/configuration.properties @@ -0,0 +1 @@ +powermock.global-ignore=jdk.internal.reflect.*,javax.net.ssl.* \ No newline at end of file diff --git a/okhttp-modules/src/test/resources/split-change-special-characters.json b/okhttp-modules/src/test/resources/split-change-special-characters.json new file mode 100644 index 000000000..9fd55904e --- /dev/null +++ b/okhttp-modules/src/test/resources/split-change-special-characters.json @@ -0,0 +1,56 @@ +{ + "splits": [ + { + "trafficTypeName": "user", + "name": "DEMO_MURMUR2", + "trafficAllocation": 100, + "trafficAllocationSeed": 1314112417, + "seed": -2059033614, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "of", + "changeNumber": 1491244291288, + "sets": [ "set1", "set2" ], + "algo": 2, + "configurations": { + "on": "{\"test\": \"blue\",\"grüne Straße\": 13}", + "off": "{\"test\": \"blue\",\"size\": 15}" + }, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "of", + "size": 100 + } + ], + "label": "in segment all" + } + ] + } + ], + "since": 1491244291288, + "till": 1491244291288 +} diff --git a/pluggable-storage/pom.xml b/pluggable-storage/pom.xml index 52e57ee0e..c6d3abb70 100644 --- a/pluggable-storage/pom.xml +++ b/pluggable-storage/pom.xml @@ -6,28 +6,35 @@ java-client-parent io.split.client - 4.4.2 + 4.18.2 - 1.0.0 + 2.1.0 pluggable-storage jar Package for Pluggable Storage Wrapper interface to implement Pluggable Storage - - - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.3 - true - - false - - - - - + + + release + + + + org.sonatype.central + central-publishing-maven-plugin + 0.8.0 + true + + true + central + false + published + + + + + + diff --git a/pluggable-storage/src/main/java/pluggable/CustomStorageWrapper.java b/pluggable-storage/src/main/java/pluggable/CustomStorageWrapper.java index 47e531b39..67f81d7a5 100644 --- a/pluggable-storage/src/main/java/pluggable/CustomStorageWrapper.java +++ b/pluggable-storage/src/main/java/pluggable/CustomStorageWrapper.java @@ -8,6 +8,7 @@ public interface CustomStorageWrapper { String get(String key) throws Exception; List getMany(List keys) throws Exception; void set(String key, String item) throws Exception; + void hSet(String key, String field, String item) throws Exception; void delete(List keys) throws Exception; String getAndSet(String key, String item) throws Exception; Set getKeysByPrefix(String prefix) throws Exception; @@ -15,6 +16,7 @@ public interface CustomStorageWrapper { // integer operations long increment(String key, long value) throws Exception; long decrement(String key, long value) throws Exception; + long hIncrement(String key, String field, long value) throws Exception; // queue operations long pushItems(String key, List items) throws Exception; @@ -26,6 +28,8 @@ public interface CustomStorageWrapper { void addItems(String key, List items) throws Exception; void removeItems(String key, List items) throws Exception; List getItems(List keys) throws Exception; + Set getMembers(String key) throws Exception; boolean connect() throws Exception; boolean disconnect() throws Exception; -} + Pipeline pipeline() throws Exception; +} \ No newline at end of file diff --git a/pluggable-storage/src/main/java/pluggable/HasPipelineSupport.java b/pluggable-storage/src/main/java/pluggable/HasPipelineSupport.java new file mode 100644 index 000000000..ad162deb6 --- /dev/null +++ b/pluggable-storage/src/main/java/pluggable/HasPipelineSupport.java @@ -0,0 +1,5 @@ +package pluggable; + +public interface HasPipelineSupport { + Pipeline pipeline() throws Exception; +} diff --git a/pluggable-storage/src/main/java/pluggable/NotPipelinedImpl.java b/pluggable-storage/src/main/java/pluggable/NotPipelinedImpl.java new file mode 100644 index 000000000..8c7bd0a6f --- /dev/null +++ b/pluggable-storage/src/main/java/pluggable/NotPipelinedImpl.java @@ -0,0 +1,35 @@ +package pluggable; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; + +public class NotPipelinedImpl implements Pipeline { + + private final List> _methods; + private final CustomStorageWrapper _storage; + + public NotPipelinedImpl(CustomStorageWrapper storage) { + _methods = new ArrayList<>(); + _storage = storage; + } + + @Override + public List exec() throws Exception { + List result = new ArrayList<>(); + for (Callable method : _methods) { + result.add(new Result(method.call())); + } + return result; + } + + @Override + public void hIncrement(String key, String field, long value) { + _methods.add(() -> { return _storage.hIncrement(key, field, value);}); + } + + @Override + public void getMembers(String key) throws Exception { + _methods.add(() -> { return _storage.getMembers(key);}); + } +} \ No newline at end of file diff --git a/pluggable-storage/src/main/java/pluggable/Pipeline.java b/pluggable-storage/src/main/java/pluggable/Pipeline.java new file mode 100644 index 000000000..09b40bbfb --- /dev/null +++ b/pluggable-storage/src/main/java/pluggable/Pipeline.java @@ -0,0 +1,9 @@ +package pluggable; + +import java.util.List; + +public interface Pipeline { + List exec() throws Exception; + void hIncrement(String key, String field, long value); + void getMembers(String key) throws Exception; +} diff --git a/pluggable-storage/src/main/java/pluggable/Result.java b/pluggable-storage/src/main/java/pluggable/Result.java new file mode 100644 index 000000000..26b5d6d3a --- /dev/null +++ b/pluggable-storage/src/main/java/pluggable/Result.java @@ -0,0 +1,33 @@ +package pluggable; + +import java.util.HashSet; +import java.util.Optional; + +public class Result { + private final Object _item; + + public Result(Object item) { + _item = item; + } + + public Optional asString() { + if (_item instanceof String) { + return Optional.ofNullable((String)_item); + } + return Optional.empty(); + } + + public Optional> asHash() { + if (_item instanceof HashSet) { + return Optional.ofNullable((HashSet) _item); + } + return Optional.empty(); + } + + public Optional asLong() { + if (_item instanceof Long) { + return Optional.ofNullable((Long)_item); + } + return Optional.empty(); + } +} diff --git a/pom.xml b/pom.xml index 982f85648..d0d321ed7 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 io.split.client java-client-parent - 4.4.2 + 4.18.2 @@ -12,11 +12,6 @@ junit 4.13.1 - - org.slf4j - slf4j-api - 1.7.25 - pom @@ -52,16 +47,15 @@ scm:git@github.com:splitio/java-client.git git@github.com:splitio/java-client.git - - - ossrh - https://oss.sonatype.org/content/repositories/snapshots - + + - sonatype releases + ossrh https://oss.sonatype.org/content/repositories/releases - + + + UTF-8 UTF-8 @@ -70,10 +64,11 @@ 1.8 - testing - client pluggable-storage redis-wrapper + testing + okhttp-modules + client @@ -82,20 +77,8 @@ maven-compiler-plugin 3.3 - 1.8 - 1.8 - - - - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.3 - true - - ossrh - https://oss.sonatype.org/ - true + ${maven.compiler.source} + ${maven.compiler.target} @@ -130,11 +113,55 @@ + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.3.0 + + + checkstyle + validate + + check + + + + + .github/linter/google-java-style.xml + warning + true + + + + test + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + attach-javadocs + + jar + + + -Xdoclint:none + 1.8 + + + + + + + + release + @@ -167,6 +194,18 @@ + + + org.sonatype.central + central-publishing-maven-plugin + 0.8.0 + true + + central + false + published + + diff --git a/redis-wrapper/pom.xml b/redis-wrapper/pom.xml index ab52f2b85..486d5dc8f 100644 --- a/redis-wrapper/pom.xml +++ b/redis-wrapper/pom.xml @@ -6,11 +6,10 @@ java-client-parent io.split.client - 4.4.2 + 4.18.2 - redis-wrapper - 1.0.0 + 3.1.2 jar Package for Redis Wrapper Implementation Implements Redis Pluggable Storage @@ -23,42 +22,91 @@ io.split.client pluggable-storage - 1.0.0 + 2.1.0 compile redis.clients jedis - 3.7.0 + 4.4.8 - junit junit test - - com.google.guava - guava - 30.0-jre - compile - + + + release + + + + org.sonatype.central + central-publishing-maven-plugin + 0.8.0 + true + + true + central + false + published + + + + + + - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.3 - true - - false + org.apache.maven.plugins + maven-shade-plugin + 3.2.4 + + + package + + shade + + + false + true + + + redis.clients:* + + + + + + + + + redis.clients.jedis + redis.clients.jedis + + + + + *:* + + META-INF/license/** + META-INF/* + META-INF/maven/** + META-INF/services/** + LICENSE + NOTICE + /*.txt + build.properties + + + + + - - - diff --git a/redis-wrapper/src/main/java/redis/RedisCluster.java b/redis-wrapper/src/main/java/redis/RedisCluster.java new file mode 100644 index 000000000..c63f1b87e --- /dev/null +++ b/redis-wrapper/src/main/java/redis/RedisCluster.java @@ -0,0 +1,270 @@ +package redis; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import pluggable.CustomStorageWrapper; +import pluggable.Pipeline; +import redis.clients.jedis.JedisCluster; +import redis.common.CommonRedis; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +class RedisCluster implements CustomStorageWrapper { + private final CommonRedis _commonRedis; + private final JedisCluster jedis; + + public static final String DEFAULT_HASHTAG = "{SPLITIO}" ; + + private static final Logger _log = LoggerFactory.getLogger(RedisCluster.class); + + private String validateHashtag(String hashtag) { + if (hashtag == null) { + _log.warn("The hashtag wasn't set. It's going to use the default hashtag"); + return DEFAULT_HASHTAG; + } + if (hashtag.length() <= 2) { + _log.warn("The hashtag is too short. It's going to use the default hashtag"); + return DEFAULT_HASHTAG; + } + if (!hashtag.startsWith("{")) { + _log.warn("The hashtag doesn't have '{'. It's going to use the default hashtag"); + return DEFAULT_HASHTAG; + } + if (!hashtag.endsWith("}")) { + _log.warn("The hashtag doesn't have '}'. It's going to use the default hashtag"); + return DEFAULT_HASHTAG; + } + + return hashtag; + } + + public RedisCluster(JedisCluster jedisCluster, String prefix, String hashtag) { + this.jedis = jedisCluster; + _commonRedis = CommonRedis.create(validateHashtag(hashtag) + prefix); + } + + @Override + public String get(String key) throws Exception { + try { + return jedis.get(_commonRedis.buildKeyWithPrefix(key)); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public List getMany(List keys) throws Exception { + if(keys == null || keys.isEmpty()){ + return new ArrayList<>(); + } + try { + keys = keys.stream().map(key -> _commonRedis.buildKeyWithPrefix(key)).collect(Collectors.toList()); + + return jedis.mget(keys.toArray(new String[keys.size()])); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public void set(String key, String item) throws Exception { + try { + if(key.contains(CommonRedis.TELEMETRY_INIT)) { + String[] splittedKey = key.split("::"); + jedis.hset(_commonRedis.buildKeyWithPrefix(splittedKey[0]), splittedKey[1], item); + return; + } + jedis.set(_commonRedis.buildKeyWithPrefix(key), item); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public void hSet(String key, String field, String item) throws Exception { + try { + jedis.hset(_commonRedis.buildKeyWithPrefix(key), field, item); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public void delete(List keys) throws Exception { + if(keys == null || keys.isEmpty()){ + return ; + } + try { + keys = keys.stream().map(key -> _commonRedis.buildKeyWithPrefix(key)).collect(Collectors.toList()); + + jedis.del(keys.toArray(new String[keys.size()])); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public String getAndSet(String key, String item) throws Exception { + //Todo if this method isn't used we should deprecated + try { + return jedis.getSet(_commonRedis.buildKeyWithPrefix(key), item); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public Set getKeysByPrefix(String prefix) throws Exception { + try { + Set keysWithPrefix = jedis.keys(_commonRedis.buildKeyWithPrefix(prefix)); + keysWithPrefix = keysWithPrefix.stream().map(key -> key.replace(_commonRedis.getPrefix() + ".", "")).collect(Collectors.toSet()); + return keysWithPrefix; + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public long increment(String key, long value) throws Exception { + try { + return jedis.incrBy(_commonRedis.buildKeyWithPrefix(key), value); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public long hIncrement(String key, String field, long value) throws RedisException { + try { + return jedis.hincrBy(_commonRedis.buildKeyWithPrefix(key), field, value); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public long decrement(String key, long value) throws Exception { + try { + return jedis.decrBy(_commonRedis.buildKeyWithPrefix(key), value); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public long pushItems(String key, List items) throws Exception { + try { + long addedItems = jedis.rpush(_commonRedis.buildKeyWithPrefix(key), items.toArray(new String[items.size()])); + if((CommonRedis.EVENTS_KEY.equals(key) || CommonRedis.IMPRESSIONS_KEY.equals(key)) && addedItems == items.size()) { + jedis.pexpire(key, CommonRedis.IMPRESSIONS_OR_EVENTS_DEFAULT_TTL); + } + return addedItems; + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public List popItems(String key, long count) throws Exception { + try { + String keyWithPrefix = _commonRedis.buildKeyWithPrefix(key); + List items = jedis.lrange(keyWithPrefix, 0, count-1); + int fetchedCount = items.size(); + jedis.ltrim(keyWithPrefix, fetchedCount, -1); + return items; + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + // Return length of redis set. + @Override + public long getItemsCount(String key) throws Exception { + try { + return jedis.scard(_commonRedis.buildKeyWithPrefix(key)); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public boolean itemContains(String key, String item) throws Exception { + try { + return jedis.sismember(_commonRedis.buildKeyWithPrefix(key), item); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public void addItems(String key, List items) throws Exception { + try { + jedis.sadd(_commonRedis.buildKeyWithPrefix(key), items.toArray(new String[items.size()])); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public void removeItems(String key, List items) throws Exception { + try { + jedis.srem(_commonRedis.buildKeyWithPrefix(key), items.toArray(new String[items.size()])); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public List getItems(List keys) throws Exception { + if(keys == null || keys.isEmpty()){ + return new ArrayList<>(); + } + try { + keys = keys.stream().map(key -> _commonRedis.buildKeyWithPrefix(key)).collect(Collectors.toList()); + + return jedis.mget(keys.toArray(new String[keys.size()])); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public Set getMembers(String key) throws Exception { + Set items; + try { + items = jedis.smembers(_commonRedis.buildKeyWithPrefix(key)); + return new HashSet<>(items); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public boolean connect() throws Exception { + try { + return jedis.getClusterNodes().entrySet().stream().findFirst().map(e -> e.getValue().getResource().isConnected()).orElse(false); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public boolean disconnect() throws Exception { + try { + jedis.close(); + + return true; + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public Pipeline pipeline() throws Exception { + return null; + } +} \ No newline at end of file diff --git a/redis-wrapper/src/main/java/redis/RedisInstance.java b/redis-wrapper/src/main/java/redis/RedisInstance.java index 0ef7e8a10..bc3eb2786 100644 --- a/redis-wrapper/src/main/java/redis/RedisInstance.java +++ b/redis-wrapper/src/main/java/redis/RedisInstance.java @@ -2,6 +2,7 @@ import org.apache.commons.pool2.impl.GenericObjectPoolConfig; import pluggable.CustomStorageWrapper; +import redis.clients.jedis.JedisCluster; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; @@ -9,19 +10,28 @@ public class RedisInstance { private static final int TIMEOUT = 1000; + private RedisInstance() { + throw new IllegalStateException("Utility class"); + } + public static Builder builder() { return new Builder(); } - private static CustomStorageWrapper getRedisInstance(String host, int port, int timeout, String user, String password, int database, String prefix, int maxTotal) { + private static CustomStorageWrapper getRedisInstance(String host, int port, int timeout, String password, int database, + String prefix, int maxTotal) { JedisPoolConfig poolConfig = new JedisPoolConfig(); poolConfig.setMaxTotal(maxTotal); JedisPool jedisPool = new JedisPool(poolConfig, host, port, timeout, password, database); - return new RedisImp(jedisPool, prefix); + return new RedisSingle(jedisPool, prefix); } private static CustomStorageWrapper getRedisInstance(JedisPool jedisPool, String prefix) { - return new RedisImp(jedisPool, prefix); + return new RedisSingle(jedisPool, prefix); + } + + private static CustomStorageWrapper getRedisInstance(JedisCluster jedisCluster, String prefix, String hashtag) { + return new RedisCluster(jedisCluster, prefix, hashtag); } public static final class Builder { @@ -32,7 +42,9 @@ public static final class Builder { private String _password = null; private int _database = 0; private String _prefix = ""; + private String _hashtag = ""; private JedisPool _jedisPool = null; + private JedisCluster _jedisCluster = null; private int _maxTotal = GenericObjectPoolConfig.DEFAULT_MAX_TOTAL; public Builder timeout(int timeout) { @@ -70,13 +82,23 @@ public Builder prefix(String prefix) { return this; } + public Builder hashtag(String hashtag) { + _hashtag = hashtag; + return this; + } + public Builder jedisPool(JedisPool jedisPool) { _jedisPool = jedisPool; return this; } - public Builder maxTotal(int _maxTotal) { - _maxTotal = _maxTotal; + public Builder jedisCluster(JedisCluster jedisCluster) { + _jedisCluster = jedisCluster; + return this; + } + + public Builder maxTotal(int maxTotal) { + _maxTotal = maxTotal; return this; } @@ -84,7 +106,10 @@ public CustomStorageWrapper build() { if(_jedisPool != null) { return RedisInstance.getRedisInstance(_jedisPool, _prefix); } - return RedisInstance.getRedisInstance(_host, _port, _timeout, _user, _password, _database, _prefix, _maxTotal); + if(_jedisCluster != null) { + return RedisInstance.getRedisInstance(_jedisCluster, _prefix, _hashtag); + } + return RedisInstance.getRedisInstance(_host, _port, _timeout, _password, _database, _prefix, _maxTotal); } } -} +} \ No newline at end of file diff --git a/redis-wrapper/src/main/java/redis/RedisPipeline.java b/redis-wrapper/src/main/java/redis/RedisPipeline.java new file mode 100644 index 000000000..e58dfbfa4 --- /dev/null +++ b/redis-wrapper/src/main/java/redis/RedisPipeline.java @@ -0,0 +1,59 @@ +package redis; + +import pluggable.Result; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.JedisPool; +import redis.clients.jedis.Pipeline; +import redis.common.CommonRedis; + +import java.util.List; +import java.util.stream.Collectors; + +public class RedisPipeline implements pluggable.Pipeline { + private Pipeline _pipelined; + private final JedisPool _jedisPool; + private final CommonRedis _commonRedis; + + public RedisPipeline(JedisPool jedisPool, String prefix) throws RedisException { + _jedisPool = jedisPool; + _commonRedis = CommonRedis.create(prefix); + try (Jedis jedis = _jedisPool.getResource()) { + _pipelined = jedis.pipelined(); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public void hIncrement(String key, String field, long value) { + _pipelined.hincrBy(_commonRedis.buildKeyWithPrefix(key), field, value); + } + + @Override + public void getMembers(String key) { + _pipelined.smembers(_commonRedis.buildKeyWithPrefix(key)); + } + + public void delete(List keys) throws RedisException { + if(keys == null || keys.isEmpty()){ + return ; + } + try (Jedis jedis = _jedisPool.getResource()) { + keys = keys.stream().map(key -> _commonRedis.buildKeyWithPrefix(key)).collect(Collectors.toList()); + + jedis.del(keys.toArray(new String[keys.size()])); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public List exec() throws Exception { + try{ + List executionResult = _pipelined.syncAndReturnAll(); + return executionResult.stream().map(i -> new Result(i)).collect(Collectors.toList()); + } catch (Exception e) { + throw new RedisException(e.getMessage()); + } + } +} \ No newline at end of file diff --git a/redis-wrapper/src/main/java/redis/RedisImp.java b/redis-wrapper/src/main/java/redis/RedisSingle.java similarity index 59% rename from redis-wrapper/src/main/java/redis/RedisImp.java rename to redis-wrapper/src/main/java/redis/RedisSingle.java index 2061d6328..97fdbbf98 100644 --- a/redis-wrapper/src/main/java/redis/RedisImp.java +++ b/redis-wrapper/src/main/java/redis/RedisSingle.java @@ -1,33 +1,31 @@ package redis; -import com.google.common.annotations.VisibleForTesting; import pluggable.CustomStorageWrapper; +import pluggable.HasPipelineSupport; +import pluggable.Pipeline; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; +import redis.common.CommonRedis; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; -class RedisImp implements CustomStorageWrapper { - private static final String TELEMETRY_INIT = "SPLITIO.telemetry.init" ; - private static final String EVENTS_KEY = "SPLITIO.events" ; - private static final String IMPRESSIONS_KEY = "SPLITIO.impressions" ; - private static final long IMPRESSIONS_OR_EVENTS_DEFAULT_TTL = 3600000L; - +class RedisSingle implements CustomStorageWrapper, HasPipelineSupport { + private final CommonRedis _commonRedis; private final JedisPool jedisPool; - private final String prefix; - public RedisImp(JedisPool jedisPool, String prefix) { + public RedisSingle(JedisPool jedisPool, String prefix) { this.jedisPool = jedisPool; - this.prefix = prefix; + _commonRedis = CommonRedis.create(prefix); } @Override public String get(String key) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - return jedis.get(buildKeyWithPrefix(key)); + return jedis.get(_commonRedis.buildKeyWithPrefix(key)); } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -39,7 +37,7 @@ public List getMany(List keys) throws Exception { return new ArrayList<>(); } try (Jedis jedis = this.jedisPool.getResource()) { - keys = keys.stream().map(key -> buildKeyWithPrefix(key)).collect(Collectors.toList()); + keys = keys.stream().map(key -> _commonRedis.buildKeyWithPrefix(key)).collect(Collectors.toList()); return jedis.mget(keys.toArray(new String[keys.size()])); } catch (Exception ex) { @@ -50,12 +48,21 @@ public List getMany(List keys) throws Exception { @Override public void set(String key, String item) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - if(key.contains(TELEMETRY_INIT)) { + if(key.contains(CommonRedis.TELEMETRY_INIT)) { String[] splittedKey = key.split("::"); - jedis.hset(buildKeyWithPrefix(splittedKey[0]), splittedKey[1], item); + jedis.hset(_commonRedis.buildKeyWithPrefix(splittedKey[0]), splittedKey[1], item); return; } - jedis.set(buildKeyWithPrefix(key), item); + jedis.set(_commonRedis.buildKeyWithPrefix(key), item); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public void hSet(String key, String field, String item) throws Exception { + try (Jedis jedis = this.jedisPool.getResource()) { + jedis.hset(_commonRedis.buildKeyWithPrefix(key), field, item); } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -67,8 +74,7 @@ public void delete(List keys) throws Exception { return ; } try (Jedis jedis = this.jedisPool.getResource()) { - keys = keys.stream().map(key -> buildKeyWithPrefix(key)).collect(Collectors.toList()); - + keys = keys.stream().map(key -> _commonRedis.buildKeyWithPrefix(key)).collect(Collectors.toList()); jedis.del(keys.toArray(new String[keys.size()])); } catch (Exception ex) { throw new RedisException(ex.getMessage()); @@ -77,8 +83,9 @@ public void delete(List keys) throws Exception { @Override public String getAndSet(String key, String item) throws Exception { + //Todo if this method isn't used we should deprecated try (Jedis jedis = this.jedisPool.getResource()) { - return jedis.getSet(buildKeyWithPrefix(key), item); + return jedis.getSet(_commonRedis.buildKeyWithPrefix(key), item); } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -87,7 +94,11 @@ public String getAndSet(String key, String item) throws Exception { @Override public Set getKeysByPrefix(String prefix) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - return jedis.keys(buildKeyWithPrefix(prefix)); + Set keysWithPrefix = jedis.keys(_commonRedis.buildKeyWithPrefix(prefix)); + if (!_commonRedis.getPrefix().isEmpty()) { + keysWithPrefix = keysWithPrefix.stream().map(key -> key.replace(_commonRedis.getPrefix() + ".", "")).collect(Collectors.toSet()); + } + return keysWithPrefix; } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -96,7 +107,16 @@ public Set getKeysByPrefix(String prefix) throws Exception { @Override public long increment(String key, long value) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - return jedis.incrBy(buildKeyWithPrefix(key), value); + return jedis.incrBy(_commonRedis.buildKeyWithPrefix(key), value); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + + @Override + public long hIncrement(String key, String field, long value) throws RedisException { + try (Jedis jedis = this.jedisPool.getResource()) { + return jedis.hincrBy(_commonRedis.buildKeyWithPrefix(key), field, value); } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -105,7 +125,7 @@ public long increment(String key, long value) throws Exception { @Override public long decrement(String key, long value) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - return jedis.decrBy(buildKeyWithPrefix(key), value); + return jedis.decrBy(_commonRedis.buildKeyWithPrefix(key), value); } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -114,10 +134,10 @@ public long decrement(String key, long value) throws Exception { @Override public long pushItems(String key, List items) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - long addedItems = jedis.rpush(buildKeyWithPrefix(key), items.toArray(new String[items.size()])); - if(EVENTS_KEY.equals(key) || IMPRESSIONS_KEY.equals(key)) { + long addedItems = jedis.rpush(_commonRedis.buildKeyWithPrefix(key), items.toArray(new String[items.size()])); + if(CommonRedis.EVENTS_KEY.equals(key) || CommonRedis.IMPRESSIONS_KEY.equals(key)) { if(addedItems == items.size()) { - jedis.pexpire(key, IMPRESSIONS_OR_EVENTS_DEFAULT_TTL); + jedis.pexpire(key, CommonRedis.IMPRESSIONS_OR_EVENTS_DEFAULT_TTL); } } return addedItems; @@ -129,7 +149,7 @@ public long pushItems(String key, List items) throws Exception { @Override public List popItems(String key, long count) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - String keyWithPrefix = buildKeyWithPrefix(key); + String keyWithPrefix = _commonRedis.buildKeyWithPrefix(key); List items = jedis.lrange(keyWithPrefix, 0, count-1); int fetchedCount = items.size(); jedis.ltrim(keyWithPrefix, fetchedCount, -1); @@ -143,7 +163,7 @@ public List popItems(String key, long count) throws Exception { @Override public long getItemsCount(String key) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - return jedis.scard(buildKeyWithPrefix(key)); + return jedis.scard(_commonRedis.buildKeyWithPrefix(key)); } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -152,7 +172,7 @@ public long getItemsCount(String key) throws Exception { @Override public boolean itemContains(String key, String item) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - return jedis.sismember(buildKeyWithPrefix(key), item); + return jedis.sismember(_commonRedis.buildKeyWithPrefix(key), item); } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -161,7 +181,7 @@ public boolean itemContains(String key, String item) throws Exception { @Override public void addItems(String key, List items) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - jedis.sadd(buildKeyWithPrefix(key), items.toArray(new String[items.size()])); + jedis.sadd(_commonRedis.buildKeyWithPrefix(key), items.toArray(new String[items.size()])); } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -170,7 +190,7 @@ public void addItems(String key, List items) throws Exception { @Override public void removeItems(String key, List items) throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { - jedis.srem(buildKeyWithPrefix(key), items.toArray(new String[items.size()])); + jedis.srem(_commonRedis.buildKeyWithPrefix(key), items.toArray(new String[items.size()])); } catch (Exception ex) { throw new RedisException(ex.getMessage()); } @@ -182,7 +202,7 @@ public List getItems(List keys) throws Exception { return new ArrayList<>(); } try (Jedis jedis = this.jedisPool.getResource()) { - keys = keys.stream().map(key -> buildKeyWithPrefix(key)).collect(Collectors.toList()); + keys = keys.stream().map(key -> _commonRedis.buildKeyWithPrefix(key)).collect(Collectors.toList()); return jedis.mget(keys.toArray(new String[keys.size()])); } catch (Exception ex) { @@ -190,6 +210,17 @@ public List getItems(List keys) throws Exception { } } + @Override + public Set getMembers(String key) throws Exception { + Set items; + try (Jedis jedis = this.jedisPool.getResource()) { + items = jedis.smembers(_commonRedis.buildKeyWithPrefix(key)); + return new HashSet<>(items); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } + } + @Override public boolean connect() throws Exception { try (Jedis jedis = this.jedisPool.getResource()) { @@ -203,20 +234,18 @@ public boolean connect() throws Exception { public boolean disconnect() throws Exception { try { jedisPool.close(); - return true; } catch (Exception ex) { throw new RedisException(ex.getMessage()); } } - @VisibleForTesting - String buildKeyWithPrefix(String key) { - if (!key.startsWith(this.prefix)) { - key = String.format("%s.%s", prefix, key); + @Override + public Pipeline pipeline() throws Exception { + try { + return new RedisPipeline(this.jedisPool, _commonRedis.getPrefix()); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); } - - return key; } -} - +} \ No newline at end of file diff --git a/redis-wrapper/src/main/java/redis/common/CommonRedis.java b/redis-wrapper/src/main/java/redis/common/CommonRedis.java new file mode 100644 index 000000000..895205b96 --- /dev/null +++ b/redis-wrapper/src/main/java/redis/common/CommonRedis.java @@ -0,0 +1,29 @@ +package redis.common; + +public class CommonRedis { + + public static final String TELEMETRY_INIT = "SPLITIO.telemetry.init" ; + public static final String EVENTS_KEY = "SPLITIO.events" ; + public static final String IMPRESSIONS_KEY = "SPLITIO.impressions" ; + public static final long IMPRESSIONS_OR_EVENTS_DEFAULT_TTL = 3600000L; + + private final String _prefix; + + private CommonRedis (String prefix){ + _prefix = prefix; + } + public static CommonRedis create(String prefix) { + return new CommonRedis(prefix); + } + + public String buildKeyWithPrefix(String key) { + if (_prefix.isEmpty()) { + return key; + } + return String.format("%s.%s", _prefix, key); + } + + public String getPrefix() { + return _prefix; + } +} \ No newline at end of file diff --git a/redis-wrapper/src/test/java/redis/RedisPipelineTest.java b/redis-wrapper/src/test/java/redis/RedisPipelineTest.java new file mode 100644 index 000000000..b93f80a21 --- /dev/null +++ b/redis-wrapper/src/test/java/redis/RedisPipelineTest.java @@ -0,0 +1,53 @@ +package redis; + +import org.junit.Assert; +import org.junit.Test; +import pluggable.Result; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.JedisPool; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; + +public class RedisPipelineTest { + + @Test + public void testHincrement() throws Exception { + RedisPipeline redisPipeline = new RedisPipeline(new JedisPool(), "test-prefix:."); + redisPipeline.hIncrement("test", "key1", 1L); + redisPipeline.hIncrement("test", "key1", 1L); + + List results = redisPipeline.exec(); + Assert.assertEquals(Optional.of(1L), results.get(0).asLong()); + Assert.assertEquals(Optional.of(2L), results.get(1).asLong()); + List keys = new ArrayList<>(); + + keys.add("test"); + redisPipeline.delete(keys); + } + + @Test + public void testGetMembers() throws Exception { + JedisPool jedisPool = new JedisPool(); + RedisPipeline redisPipeline = new RedisPipeline(jedisPool, ""); + try (Jedis jedis = jedisPool.getResource()) { + jedis.sadd("set1", "flag1", "flag2", "flag3"); + jedis.sadd("set2", "flag6", "flag5"); + + redisPipeline.getMembers("set1"); + redisPipeline.getMembers("set2"); + + List results = redisPipeline.exec(); + + Assert.assertEquals(3, results.get(0).asHash().get().size()); + Assert.assertEquals(2, results.get(1).asHash().get().size()); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } finally { + redisPipeline.delete(new ArrayList<>(Arrays.asList("set1", "set2"))); + } + } + +} \ No newline at end of file diff --git a/redis-wrapper/src/test/java/redis/RedisImpTest.java b/redis-wrapper/src/test/java/redis/RedisSingleTest.java similarity index 59% rename from redis-wrapper/src/test/java/redis/RedisImpTest.java rename to redis-wrapper/src/test/java/redis/RedisSingleTest.java index cc484abf0..8f70c3240 100644 --- a/redis-wrapper/src/test/java/redis/RedisImpTest.java +++ b/redis-wrapper/src/test/java/redis/RedisSingleTest.java @@ -1,21 +1,22 @@ package redis; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; import pluggable.CustomStorageWrapper; +import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -public class RedisImpTest { +public class RedisSingleTest { @Test public void testSetAndGet() throws Exception { @@ -25,7 +26,7 @@ public void testSetAndGet() throws Exception { map.put("test-7", "7"); map.put("test-8", "8"); - CustomStorageWrapper storageWrapper = new RedisImp(new JedisPool(), "test-prefix:."); + CustomStorageWrapper storageWrapper = new RedisSingle(new JedisPool(), "test-prefix:"); for (Map.Entry entry : map.entrySet()) { storageWrapper.set(entry.getKey(), entry.getValue()); @@ -46,7 +47,7 @@ public void testSetAndGetMany() throws Exception { map.put("test-7", "7"); map.put("test-8", "8"); - CustomStorageWrapper storageWrapper = new RedisImp(new JedisPool(), "test-prefix:."); + CustomStorageWrapper storageWrapper = new RedisSingle(new JedisPool(), "test-prefix:"); for (Map.Entry entry : map.entrySet()) { storageWrapper.set(entry.getKey(), entry.getValue()); @@ -70,7 +71,7 @@ public void testGetSet() throws Exception { Map map = new HashMap<>(); map.put(key, "5"); - CustomStorageWrapper storageWrapper = new RedisImp(new JedisPool(), "test-prefix:."); + CustomStorageWrapper storageWrapper = new RedisSingle(new JedisPool(), "test-prefix:"); storageWrapper.set(key, "5"); String result = storageWrapper.getAndSet(key, "7"); Assert.assertEquals("5", result); @@ -86,7 +87,7 @@ public void testGetKeysByPrefix() throws Exception { map.put("item-2", "2"); map.put("item-3", "3"); map.put("i-4", "4"); - RedisImp storageWrapper = new RedisImp(new JedisPool(), "test-prefix"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix:"); try { for (Map.Entry entry : map.entrySet()) { storageWrapper.set(entry.getKey(), entry.getValue()); @@ -95,11 +96,34 @@ public void testGetKeysByPrefix() throws Exception { Set result = storageWrapper.getKeysByPrefix("item*"); Assert.assertEquals(3, result.size()); - Assert.assertTrue(result.contains(storageWrapper.buildKeyWithPrefix("item-1"))); - Assert.assertTrue(result.contains(storageWrapper.buildKeyWithPrefix("item-2"))); - Assert.assertTrue(result.contains(storageWrapper.buildKeyWithPrefix("item-3"))); + Assert.assertTrue(result.contains("item-1")); + Assert.assertTrue(result.contains("item-2")); + Assert.assertTrue(result.contains("item-3")); + } finally { + storageWrapper.delete(new ArrayList<>(map.keySet())); } - finally { + } + + @Test + public void testGetKeysByPrefixWithoutCustomPrefix() throws Exception { + Map map = new HashMap<>(); + map.put("SPLITIO.item-1", "1"); + map.put("SPLITIO.item-2", "2"); + map.put("SPLITIO.item-3", "3"); + map.put("SPLITIO.i-4", "4"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), ""); + try { + for (Map.Entry entry : map.entrySet()) { + storageWrapper.set(entry.getKey(), entry.getValue()); + } + + Set result = storageWrapper.getKeysByPrefix("SPLITIO.item*"); + + Assert.assertEquals(3, result.size()); + Assert.assertTrue(result.contains("SPLITIO.item-1")); + Assert.assertTrue(result.contains("SPLITIO.item-2")); + Assert.assertTrue(result.contains("SPLITIO.item-3")); + } finally { storageWrapper.delete(new ArrayList<>(map.keySet())); } } @@ -108,7 +132,7 @@ public void testGetKeysByPrefix() throws Exception { public void testIncrementAndDecrement() throws Exception { Map map = new HashMap<>(); map.put("item-1", "2"); - RedisImp storageWrapper = new RedisImp(new JedisPool(), "test-prefix"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix:"); try { for (Map.Entry entry : map.entrySet()) { storageWrapper.set(entry.getKey(), entry.getValue()); @@ -119,8 +143,22 @@ public void testIncrementAndDecrement() throws Exception { result = storageWrapper.decrement("item-1", 3L); Assert.assertEquals(1L, result); + } finally { + storageWrapper.delete(new ArrayList<>(map.keySet())); } - finally { + } + + @Test + public void testHIncrement() throws Exception { + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix"); + Map map = new HashMap<>(); + map.put("count", "test::12232"); + try { + long result = storageWrapper.hIncrement("count", "test::12232", 2L); + Assert.assertEquals(2L, result); + result = storageWrapper.hIncrement("count", "test::12232", 1L); + Assert.assertEquals(3L, result); + } finally { storageWrapper.delete(new ArrayList<>(map.keySet())); } } @@ -129,7 +167,7 @@ public void testIncrementAndDecrement() throws Exception { public void testPushAndPopItems() throws Exception { Map map = new HashMap<>(); map.put("item-1", "1"); - RedisImp storageWrapper = new RedisImp(new JedisPool(), "test-prefix"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix"); try { long push = storageWrapper.pushItems("item-1", Arrays.asList("1", "2", "3", "4")); Assert.assertEquals(4L, push); @@ -140,8 +178,7 @@ public void testPushAndPopItems() throws Exception { push = storageWrapper.pushItems("item-1", Arrays.asList("5")); Assert.assertEquals(2L, push); - } - finally { + } finally { storageWrapper.delete(new ArrayList<>(map.keySet())); } } @@ -150,14 +187,13 @@ public void testPushAndPopItems() throws Exception { public void testGetItemsCount() throws Exception { Map map = new HashMap<>(); map.put("item-1", "1"); - RedisImp storageWrapper = new RedisImp(new JedisPool(), "test-prefix"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix"); try { storageWrapper.addItems("item-1", Arrays.asList("1", "2", "3", "4")); long result = storageWrapper.getItemsCount("item-1"); Assert.assertEquals(4L, result); - } - finally { + } finally { storageWrapper.delete(new ArrayList<>(map.keySet())); } } @@ -166,14 +202,13 @@ public void testGetItemsCount() throws Exception { public void testItemContains() throws Exception { Map map = new HashMap<>(); map.put("item-1", "1"); - RedisImp storageWrapper = new RedisImp(new JedisPool(), "test-prefix"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix"); try { storageWrapper.addItems("item-1", Arrays.asList("1", "2", "3", "4")); boolean result = storageWrapper.itemContains("item-1", "2"); Assert.assertTrue(result); - } - finally { + } finally { storageWrapper.delete(new ArrayList<>(map.keySet())); } } @@ -182,7 +217,7 @@ public void testItemContains() throws Exception { public void testRemoveItems() throws Exception { Map map = new HashMap<>(); map.put("item-1", "1"); - RedisImp storageWrapper = new RedisImp(new JedisPool(), "test-prefix"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix"); try { storageWrapper.addItems("item-1", Arrays.asList("1", "2", "3", "4")); boolean result = storageWrapper.itemContains("item-1", "2"); @@ -193,8 +228,7 @@ public void testRemoveItems() throws Exception { Assert.assertFalse(result); result = storageWrapper.itemContains("item-1", "4"); Assert.assertFalse(result); - } - finally { + } finally { storageWrapper.delete(new ArrayList<>(map.keySet())); } } @@ -206,7 +240,7 @@ public void testGetItems() throws Exception { map.put("item-2", "2"); map.put("item-3", "3"); map.put("i-4", "4"); - RedisImp storageWrapper = new RedisImp(new JedisPool(), "test-prefix"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix:"); try { for (Map.Entry entry : map.entrySet()) { storageWrapper.set(entry.getKey(), entry.getValue()); @@ -215,27 +249,66 @@ public void testGetItems() throws Exception { Set result = storageWrapper.getKeysByPrefix("item*"); Assert.assertEquals(3, result.size()); - List items = storageWrapper.getItems(new ArrayList<>(result)); + List keys = new ArrayList<>(); + keys.add("item-1"); + keys.add("item-2"); + keys.add("item-3"); + List items = storageWrapper.getItems(new ArrayList<>(keys)); Assert.assertEquals(3, items.size()); Assert.assertTrue(items.containsAll(Arrays.asList("1", "2", "3"))); - } - finally { + } finally { storageWrapper.delete(new ArrayList<>(map.keySet())); } } @Test public void testConnect() throws Exception { - RedisImp storageWrapper = new RedisImp(new JedisPool(), "test-prefix"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix"); Assert.assertTrue(storageWrapper.connect()); } - - @Test public void testDisconnect() throws Exception { - RedisImp storageWrapper = new RedisImp(new JedisPool(), "test-prefix"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), "test-prefix"); Assert.assertTrue(storageWrapper.disconnect()); } -} + @Test + public void testWithoutPrefix() throws Exception { + Map map = new HashMap<>(); + map.put("item-1", "1"); + map.put("item-2", "2"); + map.put("item-3", "3"); + map.put("i-4", "4"); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), ""); + try { + for (Map.Entry entry : map.entrySet()) { + storageWrapper.set(entry.getKey(), entry.getValue()); + } + + Set result = storageWrapper.getKeysByPrefix("item*"); + + Assert.assertEquals(3, result.size()); + Assert.assertTrue(result.contains("item-1")); + Assert.assertTrue(result.contains("item-2")); + Assert.assertTrue(result.contains("item-3")); + } finally { + storageWrapper.delete(new ArrayList<>(map.keySet())); + } + } + + @Test + public void testGetMembers() throws Exception { + JedisPool jedisPool = new JedisPool(); + RedisSingle storageWrapper = new RedisSingle(new JedisPool(), ""); + try (Jedis jedis = jedisPool.getResource()) { + jedis.sadd("set1", "flag1", "flag2", "flag3"); + Set flags = storageWrapper.getMembers("set1"); + Assert.assertEquals(3, flags.size()); + } catch (Exception ex) { + throw new RedisException(ex.getMessage()); + } finally { + storageWrapper.delete(new ArrayList<>(Arrays.asList("set1"))); + } + } +} \ No newline at end of file diff --git a/redis-wrapper/src/test/java/redis/common/CommonRedisTest.java b/redis-wrapper/src/test/java/redis/common/CommonRedisTest.java new file mode 100644 index 000000000..4b36b0e97 --- /dev/null +++ b/redis-wrapper/src/test/java/redis/common/CommonRedisTest.java @@ -0,0 +1,17 @@ +package redis.common; + +import org.junit.Assert; +import org.junit.Test; + + +public class CommonRedisTest { + + @Test + public void testBuildKey(){ + CommonRedis commonRedisWithPrefix = CommonRedis.create("testing:"); + Assert.assertEquals("testing:.feature_flag1", commonRedisWithPrefix.buildKeyWithPrefix("feature_flag1")); + + CommonRedis commonRedisWithoutPrefix = CommonRedis.create(""); + Assert.assertEquals("feature_flag2", commonRedisWithoutPrefix.buildKeyWithPrefix("feature_flag2")); + } +} \ No newline at end of file diff --git a/testing/pom.xml b/testing/pom.xml index b5d0cd2f4..d101f697b 100644 --- a/testing/pom.xml +++ b/testing/pom.xml @@ -2,15 +2,14 @@ 4.0.0 - io.split.client java-client-parent - 4.4.2 + 4.18.2 - java-client-testing jar + 4.18.2 Java Client For Testing Testing suite for Java SDK for Split @@ -22,20 +21,29 @@ junit junit + provided - - - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.3 - true - - false - - - - + + + release + + + + org.sonatype.central + central-publishing-maven-plugin + 0.8.0 + true + + central + false + published + false + + + + + + diff --git a/testing/src/main/java/io/split/client/testing/SplitClientForTest.java b/testing/src/main/java/io/split/client/testing/SplitClientForTest.java index 3bc95718a..9ee4b8c3e 100644 --- a/testing/src/main/java/io/split/client/testing/SplitClientForTest.java +++ b/testing/src/main/java/io/split/client/testing/SplitClientForTest.java @@ -3,11 +3,10 @@ import io.split.client.SplitClient; import io.split.client.api.Key; import io.split.client.api.SplitResult; +import io.split.client.dtos.EvaluationOptions; import io.split.grammar.Treatments; -import java.util.HashMap; -import java.util.Map; -import java.util.List; +import java.util.*; import java.util.concurrent.TimeoutException; public class SplitClientForTest implements SplitClient { @@ -33,76 +32,76 @@ public void registerTreatment(String feature, String treatment) { _tests.put(feature, treatment); } - public String getTreatment(String key, String split) { - return _tests.containsKey(split) - ? _tests.get(split) + public String getTreatment(String key, String featureFlagName) { + return _tests.containsKey(featureFlagName) + ? _tests.get(featureFlagName) : Treatments.CONTROL; } - public String getTreatment(String key, String split, Map attributes) { - return _tests.containsKey(split) - ? _tests.get(split) + public String getTreatment(String key, String featureFlagName, Map attributes) { + return _tests.containsKey(featureFlagName) + ? _tests.get(featureFlagName) : Treatments.CONTROL; } - public String getTreatment(Key key, String split, Map attributes) { - return _tests.containsKey(split) - ? _tests.get(split) + public String getTreatment(Key key, String featureFlagName, Map attributes) { + return _tests.containsKey(featureFlagName) + ? _tests.get(featureFlagName) : Treatments.CONTROL; } @Override - public SplitResult getTreatmentWithConfig(String key, String split) { - return new SplitResult(_tests.containsKey(split) - ? _tests.get(split) + public SplitResult getTreatmentWithConfig(String key, String featureFlagName) { + return new SplitResult(_tests.containsKey(featureFlagName) + ? _tests.get(featureFlagName) : Treatments.CONTROL, null); } @Override - public SplitResult getTreatmentWithConfig(String key, String split, Map attributes) { - return new SplitResult(_tests.containsKey(split) - ? _tests.get(split) + public SplitResult getTreatmentWithConfig(String key, String featureFlagName, Map attributes) { + return new SplitResult(_tests.containsKey(featureFlagName) + ? _tests.get(featureFlagName) : Treatments.CONTROL, null); } @Override - public SplitResult getTreatmentWithConfig(Key key, String split, Map attributes) { - return new SplitResult(_tests.containsKey(split) - ? _tests.get(split) + public SplitResult getTreatmentWithConfig(Key key, String featureFlagName, Map attributes) { + return new SplitResult(_tests.containsKey(featureFlagName) + ? _tests.get(featureFlagName) : Treatments.CONTROL, null); } @Override - public Map getTreatments(String key, List splits) { + public Map getTreatments(String key, List featureFlagNames) { Map treatments = new HashMap<>(); - for (String split : splits) { + for (String split : featureFlagNames) { treatments.put(split, _tests.containsKey(split) ? _tests.get(split) : Treatments.CONTROL); } return treatments; } @Override - public Map getTreatments(String key, List splits, Map attributes){ + public Map getTreatments(String key, List featureFlagNames, Map attributes){ Map treatments = new HashMap<>(); - for (String split : splits) { + for (String split : featureFlagNames) { treatments.put(split, _tests.containsKey(split) ? _tests.get(split) : Treatments.CONTROL); } return treatments; } @Override - public Map getTreatments(Key key, List splits, Map attributes) { + public Map getTreatments(Key key, List featureFlagNames, Map attributes) { Map treatments = new HashMap<>(); - for (String split : splits) { + for (String split : featureFlagNames) { treatments.put(split, _tests.containsKey(split) ? _tests.get(split) : Treatments.CONTROL); } return treatments; } @Override - public Map getTreatmentsWithConfig(String key, List splits) { + public Map getTreatmentsWithConfig(String key, List featureFlagNames) { Map treatments = new HashMap<>(); - for (String split : splits) { + for (String split : featureFlagNames) { treatments.put(split, new SplitResult(_tests.containsKey(split) ? _tests.get(split) : Treatments.CONTROL, null)); @@ -111,9 +110,9 @@ public Map getTreatmentsWithConfig(String key, List } @Override - public Map getTreatmentsWithConfig(String key, List splits, Map attributes) { + public Map getTreatmentsWithConfig(String key, List featureFlagNames, Map attributes) { Map treatments = new HashMap<>(); - for (String split : splits) { + for (String split : featureFlagNames) { treatments.put(split, new SplitResult(_tests.containsKey(split) ? _tests.get(split) : Treatments.CONTROL, null)); @@ -122,9 +121,9 @@ public Map getTreatmentsWithConfig(String key, List } @Override - public Map getTreatmentsWithConfig(Key key, List splits, Map attributes) { + public Map getTreatmentsWithConfig(Key key, List featureFlagNames, Map attributes) { Map treatments = new HashMap<>(); - for (String split : splits) { + for (String split : featureFlagNames) { treatments.put(split, new SplitResult(_tests.containsKey(split) ? _tests.get(split) : Treatments.CONTROL, null)); @@ -132,6 +131,200 @@ public Map getTreatmentsWithConfig(Key key, List sp return treatments; } + @Override + public Map getTreatmentsByFlagSet(String key, String flagSet) { + return null; + } + + @Override + public Map getTreatmentsByFlagSet(String key, String flagSet, Map attributes) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsByFlagSet(Key key, String flagSet, Map attributes) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsByFlagSets(String key, List flagSets) { + return null; + } + + @Override + public Map getTreatmentsByFlagSets(String key, List flagSets, Map attributes) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsByFlagSets(Key key, List flagSets, Map attributes) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(String key, String flagSet) { + return null; + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(String key, String flagSet, Map attributes) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(Key key, String flagSet, Map attributes) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(String key, List flagSets) { + return null; + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(String key, List flagSets, Map attributes) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(Key key, List flagSets, Map attributes) { + return new HashMap<>(); + } + + @Override + public String getTreatment(String key, String featureFlagName, EvaluationOptions evaluationOptions) { + return null; + } + + @Override + public String getTreatment(String key, String featureFlagName, Map attributes, EvaluationOptions evaluationOptions) { + return null; + } + + @Override + public String getTreatment(Key key, String featureFlagName, Map attributes, EvaluationOptions evaluationOptions) { + return null; + } + + @Override + public Map getTreatments(String key, List featureFlagNames, EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatments(String key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatments(Key key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public SplitResult getTreatmentWithConfig(String key, String featureFlagName, EvaluationOptions evaluationOptions) { + return null; + } + + @Override + public SplitResult getTreatmentWithConfig(Key key, String featureFlagName, Map attributes, + EvaluationOptions evaluationOptions) { + return null; + } + + @Override + public SplitResult getTreatmentWithConfig(String key, String featureFlagName, Map attributes, + EvaluationOptions evaluationOptions) { + return null; + } + + @Override + public Map getTreatmentsWithConfig(String key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfig(String key, List featureFlagNames, EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsByFlagSet(String key, String flagSet, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsByFlagSets(String key, List flagSets, EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsByFlagSets(String key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(String key, String flagSet, EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(String key, String flagSet, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(String key, List flagSets, EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(String key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsByFlagSet(String key, String flagSet, EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfig(Key key, List featureFlagNames, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsByFlagSet(Key key, String flagSet, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsByFlagSets(Key key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSet(Key key, String flagSet, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + + @Override + public Map getTreatmentsWithConfigByFlagSets(Key key, List flagSets, Map attributes, + EvaluationOptions evaluationOptions) { + return new HashMap<>(); + } + @Override public void destroy() { diff --git a/testing/src/test/java/io/split/client/testing/SplitScenarioAnnotationTest.java b/testing/src/test/java/io/split/client/testing/SplitScenarioAnnotationTest.java index 27da6c06c..84a772ceb 100644 --- a/testing/src/test/java/io/split/client/testing/SplitScenarioAnnotationTest.java +++ b/testing/src/test/java/io/split/client/testing/SplitScenarioAnnotationTest.java @@ -1,5 +1,7 @@ package io.split.client.testing; +import io.split.client.api.Key; +import io.split.client.dtos.EvaluationOptions; import io.split.client.testing.annotations.SplitScenario; import io.split.client.testing.annotations.SplitSuite; import io.split.client.testing.annotations.SplitTest; @@ -10,6 +12,9 @@ import org.junit.Test; import org.junit.runner.RunWith; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; import java.util.Objects; /** @@ -58,8 +63,57 @@ public void testDefaultScenario() { Assert.assertEquals(3, splitClient.tests().size()); Assert.assertEquals(ON_TREATMENT, splitClient.getTreatment(ARBITRARY_KEY, DEFAULT_PARENT_FEATURE)); Assert.assertEquals(ON_TREATMENT, splitClient.getTreatment(ARBITRARY_KEY, DEFAULT_CLIENT_FEATURE)); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatment(ARBITRARY_KEY, DEFAULT_CLIENT_FEATURE, new HashMap<>())); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatment(new Key(ARBITRARY_KEY, ARBITRARY_KEY), DEFAULT_CLIENT_FEATURE, new HashMap<>())); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatmentWithConfig(ARBITRARY_KEY, DEFAULT_CLIENT_FEATURE, new HashMap<>()).treatment()); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatmentWithConfig(ARBITRARY_KEY, DEFAULT_CLIENT_FEATURE).treatment()); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatmentWithConfig(new Key(ARBITRARY_KEY, ARBITRARY_KEY), DEFAULT_CLIENT_FEATURE, new HashMap<>()).treatment()); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatments(ARBITRARY_KEY, Arrays.asList(DEFAULT_CLIENT_FEATURE)).get(DEFAULT_CLIENT_FEATURE)); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatments(ARBITRARY_KEY, Arrays.asList(DEFAULT_CLIENT_FEATURE), new HashMap<>()).get(DEFAULT_CLIENT_FEATURE)); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatments(new Key(ARBITRARY_KEY, ARBITRARY_KEY), Arrays.asList(DEFAULT_CLIENT_FEATURE), new HashMap<>()).get(DEFAULT_CLIENT_FEATURE)); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatmentsWithConfig(ARBITRARY_KEY, Arrays.asList(DEFAULT_CLIENT_FEATURE)).get(DEFAULT_CLIENT_FEATURE).treatment()); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatmentsWithConfig(ARBITRARY_KEY, Arrays.asList(DEFAULT_CLIENT_FEATURE), new HashMap<>()).get(DEFAULT_CLIENT_FEATURE).treatment()); + Assert.assertEquals(ON_TREATMENT, splitClient.getTreatmentsWithConfig(new Key(ARBITRARY_KEY, ARBITRARY_KEY), Arrays.asList(DEFAULT_CLIENT_FEATURE), new HashMap<>()).get(DEFAULT_CLIENT_FEATURE).treatment()); Assert.assertEquals(OFF_TREATMENT, splitClient.getTreatment(ARBITRARY_KEY, OVERRIDDEN_PARENT_FEATURE)); Assert.assertEquals(CONTROL_TREATMENT, splitClient.getTreatment(ARBITRARY_KEY, CONTROL_FEATURE)); + + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsByFlagSet(ARBITRARY_KEY, "flagset", new HashMap<>())); + Assert.assertEquals(null, splitClient.getTreatmentsByFlagSet(ARBITRARY_KEY, "flagset")); + Assert.assertEquals(null, splitClient.getTreatmentsByFlagSet(ARBITRARY_KEY, "flagset")); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"), new HashMap<>())); + Assert.assertEquals(null, splitClient.getTreatmentsByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"))); + Assert.assertEquals(null, splitClient.getTreatmentsByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfigByFlagSet(ARBITRARY_KEY, "flagset", new HashMap<>())); + Assert.assertEquals(null, splitClient.getTreatmentsWithConfigByFlagSet(ARBITRARY_KEY, "flagset")); + Assert.assertEquals(null, splitClient.getTreatmentsWithConfigByFlagSet(ARBITRARY_KEY, "flagset")); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfigByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"), new HashMap<>())); + Assert.assertEquals(null, splitClient.getTreatmentsWithConfigByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"))); + Assert.assertEquals(null, splitClient.getTreatmentsWithConfigByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"))); + + Assert.assertEquals(null, splitClient.getTreatment(ARBITRARY_KEY, DEFAULT_CLIENT_FEATURE, new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(null, splitClient.getTreatment(ARBITRARY_KEY, DEFAULT_CLIENT_FEATURE, new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(null, splitClient.getTreatment(new Key(ARBITRARY_KEY, ARBITRARY_KEY), DEFAULT_CLIENT_FEATURE, new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(null, splitClient.getTreatmentWithConfig(ARBITRARY_KEY, DEFAULT_CLIENT_FEATURE, new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(null, splitClient.getTreatmentWithConfig(ARBITRARY_KEY, DEFAULT_CLIENT_FEATURE, new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(null, splitClient.getTreatmentWithConfig(new Key(ARBITRARY_KEY, ARBITRARY_KEY), DEFAULT_CLIENT_FEATURE, new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatments(ARBITRARY_KEY, Arrays.asList(DEFAULT_CLIENT_FEATURE), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatments(ARBITRARY_KEY, Arrays.asList(DEFAULT_CLIENT_FEATURE), new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfig(ARBITRARY_KEY, Arrays.asList(DEFAULT_CLIENT_FEATURE), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(null, splitClient.getTreatmentsWithConfig(ARBITRARY_KEY, Arrays.asList(DEFAULT_CLIENT_FEATURE), new HashMap<>(), new EvaluationOptions(new HashMap<>())).get(DEFAULT_CLIENT_FEATURE)); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfig(new Key(ARBITRARY_KEY, ARBITRARY_KEY), Arrays.asList(DEFAULT_CLIENT_FEATURE), new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsByFlagSet(ARBITRARY_KEY, "flagset", new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsByFlagSet(ARBITRARY_KEY, "flagset", new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsByFlagSet(ARBITRARY_KEY, "flagset", new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"), new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfigByFlagSet(ARBITRARY_KEY, "flagset", new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfigByFlagSet(ARBITRARY_KEY, "flagset", new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfigByFlagSet(ARBITRARY_KEY, "flagset", new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfigByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"), new HashMap<>(), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfigByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"), new EvaluationOptions(new HashMap<>()))); + Assert.assertEquals(new HashMap<>(), splitClient.getTreatmentsWithConfigByFlagSets(ARBITRARY_KEY, Arrays.asList("flagset"), new EvaluationOptions(new HashMap<>()))); } /** diff --git a/update_maven_settings.sh b/update_maven_settings.sh new file mode 100755 index 000000000..8bea975fb --- /dev/null +++ b/update_maven_settings.sh @@ -0,0 +1,161 @@ +#!/bin/bash + +# Script to update Maven settings.xml with Central Repository credentials using xmlstarlet + +# ANSI color codes +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +BLUE='\033[0;34m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +# Check if xmlstarlet is installed +if ! command -v xmlstarlet &> /dev/null; then + echo -e "${RED}Error: xmlstarlet is not installed.${NC}" + echo "Please install xmlstarlet first:" + echo " macOS: brew install xmlstarlet" + echo " Debian/Ubuntu: sudo apt-get install xmlstarlet" + echo " RHEL/CentOS/Fedora: sudo yum install xmlstarlet" + echo "Then run this script again." + exit 1 +fi + +# Default values +DEFAULT_SETTINGS_PATH="$HOME/.m2/settings.xml" +DEFAULT_SERVER_ID="central" + +echo -e "${BLUE}Maven Settings.xml Update Script${NC}" +echo "This script will update your Maven settings.xml with Central Repository credentials." +echo + +# Ask for settings.xml path or use default +read -p "Path to settings.xml [$DEFAULT_SETTINGS_PATH]: " SETTINGS_PATH +SETTINGS_PATH=${SETTINGS_PATH:-$DEFAULT_SETTINGS_PATH} + +# Variables to store existing values +EXISTING_USERNAME="" +EXISTING_PASSWORD="" + +# Extract existing values if settings.xml exists +if [ -f "$SETTINGS_PATH" ] && command -v xmlstarlet &> /dev/null; then + # Check if the file is valid XML + if xmlstarlet val "$SETTINGS_PATH" &> /dev/null; then + echo -e "${YELLOW}Reading existing settings from ${SETTINGS_PATH}...${NC}" + + # Extract existing server ID + DEFAULT_SERVER_ID=$(xmlstarlet sel -t -v "/settings/servers/server[1]/id" "$SETTINGS_PATH" 2>/dev/null || echo "$DEFAULT_SERVER_ID") + + # Extract existing username and password for the server + EXISTING_USERNAME=$(xmlstarlet sel -t -v "/settings/servers/server[id='$DEFAULT_SERVER_ID']/username" "$SETTINGS_PATH" 2>/dev/null || echo "") + EXISTING_PASSWORD=$(xmlstarlet sel -t -v "/settings/servers/server[id='$DEFAULT_SERVER_ID']/password" "$SETTINGS_PATH" 2>/dev/null || echo "") + fi +fi + +# Ask for server ID or use default/existing +read -p "Server ID [$DEFAULT_SERVER_ID]: " SERVER_ID +SERVER_ID=${SERVER_ID:-$DEFAULT_SERVER_ID} + +# Ask for username (show existing if available) +USERNAME_PROMPT="Username" +if [ -n "$EXISTING_USERNAME" ]; then + USERNAME_PROMPT="Username (current: $EXISTING_USERNAME)" +fi +read -p "$USERNAME_PROMPT: " USERNAME +USERNAME=${USERNAME:-$EXISTING_USERNAME} + +# Ask for password (indicate if existing) +PASSWORD_PROMPT="Password" +if [ -n "$EXISTING_PASSWORD" ]; then + PASSWORD_PROMPT="Password (leave empty to keep current)" +fi +read -s -p "$PASSWORD_PROMPT: " PASSWORD +echo +# Only use existing password if the user didn't enter a new one +if [ -z "$PASSWORD" ] && [ -n "$EXISTING_PASSWORD" ]; then + PASSWORD="$EXISTING_PASSWORD" +fi + +# Create .m2 directory if it doesn't exist +M2_DIR=$(dirname "$SETTINGS_PATH") +mkdir -p "$M2_DIR" + +# No GPG configuration needed + +# Function to create a new settings.xml file +create_new_settings() { + echo -e "${YELLOW}Creating new settings.xml file...${NC}" + cat > "$SETTINGS_PATH" << EOF + + + + + $SERVER_ID + $USERNAME + $PASSWORD + + + +EOF +} + +# Check if settings.xml exists +if [ -f "$SETTINGS_PATH" ]; then + echo -e "${YELLOW}Existing settings.xml found. Backing up to ${SETTINGS_PATH}.bak${NC}" + cp "$SETTINGS_PATH" "${SETTINGS_PATH}.bak" + + # Check if the file is valid XML + if ! xmlstarlet val "$SETTINGS_PATH" &> /dev/null; then + echo -e "${RED}Warning: The existing settings.xml is not valid XML.${NC}" + read -p "Do you want to create a new settings.xml file? (y/n): " CREATE_NEW + if [[ $CREATE_NEW =~ ^[Yy]$ ]]; then + create_new_settings + else + echo -e "${RED}Exiting without making changes.${NC}" + exit 1 + fi + else + # Check if servers element exists + if ! xmlstarlet sel -t -v "/settings/servers" "$SETTINGS_PATH" &> /dev/null; then + echo -e "${YELLOW}No servers section found. Adding servers section...${NC}" + xmlstarlet ed --inplace \ + -s "/settings" -t elem -n "servers" \ + -s "/settings/servers" -t elem -n "server" \ + -s "/settings/servers/server" -t elem -n "id" -v "$SERVER_ID" \ + -s "/settings/servers/server" -t elem -n "username" -v "$USERNAME" \ + -s "/settings/servers/server" -t elem -n "password" -v "$PASSWORD" \ + "$SETTINGS_PATH" + else + # Check if server with this ID already exists + if xmlstarlet sel -t -v "/settings/servers/server[id='$SERVER_ID']" "$SETTINGS_PATH" &> /dev/null; then + echo -e "${YELLOW}Server with ID '$SERVER_ID' already exists. Updating credentials...${NC}" + # Update existing server credentials + xmlstarlet ed --inplace \ + -u "/settings/servers/server[id='$SERVER_ID']/username" -v "$USERNAME" \ + -u "/settings/servers/server[id='$SERVER_ID']/password" -v "$PASSWORD" \ + "$SETTINGS_PATH" + else + echo -e "${YELLOW}Adding new server with ID '$SERVER_ID'...${NC}" + # Add new server to existing servers section + xmlstarlet ed --inplace \ + -s "/settings/servers" -t elem -n "server" \ + -s "/settings/servers/server[last()]" -t elem -n "id" -v "$SERVER_ID" \ + -s "/settings/servers/server[last()]" -t elem -n "username" -v "$USERNAME" \ + -s "/settings/servers/server[last()]" -t elem -n "password" -v "$PASSWORD" \ + "$SETTINGS_PATH" + fi + fi + fi +else + create_new_settings +fi + +# Make sure the file has the right permissions +chmod 600 "$SETTINGS_PATH" + +echo -e "${GREEN}Maven settings.xml updated successfully at $SETTINGS_PATH${NC}" +echo -e "${GREEN}Server ID: $SERVER_ID${NC}" +echo -e "${GREEN}Username: $USERNAME${NC}" +echo -e "${GREEN}Password: ********${NC}" +