diff --git a/.github/workflows/integration_test.yml b/.github/workflows/integration_test.yml new file mode 100644 index 0000000..46740e8 --- /dev/null +++ b/.github/workflows/integration_test.yml @@ -0,0 +1,152 @@ +name: Integration Test + +on: + pull_request: + branches: [ "main" ] + +jobs: + e2e-test: + runs-on: macos-13-xl + permissions: + contents: write + checks: write + pull-requests: write + id-token: write + env: + iam_role_to_assume: ${{ secrets.ROLE_ARN }} + device_farm_project_arn: ${{ secrets.DEVICE_FARM_PROJECT_ARN }} + device_farm_pool_arn: ${{ secrets.DEVICE_FARM_POOL_ARN }} + device_farm_test_spec_arn: ${{ secrets.DEVICE_FARM_TEST_SPEC_ARN }} + clickstream_app_id: ${{ secrets.CLICKSTREAM_APP_ID }} + clickstream_endpoint: ${{ secrets.CLICKSTREAM_ENDPOINT }} + steps: + - uses: actions/checkout@v4 + with: + token: ${{ secrets.PROJECT_TOKEN }} + - name: Modify SDK for integration test + run: | + sed -i '' -e "s#private(set) var bundleSequenceId: Int#private(set) var bundleSequenceId: Int\n var allEventJson: String = \"\"#g" Sources/Clickstream/Dependency/Clickstream/Analytics/EventRecorder.swift + sed -i '' -e "s#toPrettierJsonString())\")#toPrettierJsonString())\")\n allEventJson.append(\"Saved event \\\(event.eventType):\\\(eventObject.toJsonString())\\\n\")\n UIPasteboard.general.string = allEventJson#g" Sources/Clickstream/Dependency/Clickstream/Analytics/EventRecorder.swift + sed -i '' -e "s#batchEvent.eventCount) events\")#batchEvent.eventCount) events\")\n allEventJson.append(\"Send \\\(batchEvent.eventCount) events\\\n\")\n UIPasteboard.general.string = allEventJson#g" Sources/Clickstream/Dependency/Clickstream/Analytics/EventRecorder.swift + git diff + - name: Prepare sample iOS app + run: | + cd .. + git clone https://github.com/aws-samples/clickstream-sdk-samples + cd clickstream-sdk-samples/ios + sed -i '' -e "s#\"appId\": \"your appId\"#\"appId\": \"${{ env.clickstream_app_id }}\"#g" ModerneShopping/amplifyconfiguration.json + sed -i '' -e "s#\"endpoint\": \"your endpoint\"#\"endpoint\": \"${{ env.clickstream_endpoint }}\"#g" ModerneShopping/amplifyconfiguration.json + sed -i '' -e "s#if index==0 || index==1 {}#if index==0 || index==1 {\n cart.addToCart(addedProduct: product, quantity: 1)\n }#g" ModerneShopping/Views/ProductViews/ProductList.swift + sed -i '' -e "s#3A1001882A1DDF4300DF72CB /\* XCRemoteSwiftPackageReference \"clickstream-swift\" \*\/,#3A67FCB62B6F26C60098082A /\* XCLocalSwiftPackageReference \"../../clickstream-swift\" \*\/,#g" ModerneShopping.xcodeproj/project.pbxproj + perl -0777 -pi -e 's#/\* Begin XCRemoteSwiftPackageReference section \*/\n(\t+)3A1001882A1DDF4300DF72CB /\* XCRemoteSwiftPackageReference \"clickstream-swift\" \*/ = {\n(\t+)isa = XCRemoteSwiftPackageReference;\n(\t+)repositoryURL = \"https://github.com/awslabs/clickstream-swift.git\";\n(\t+)requirement = {\n(\t+)branch = main;\n(\t+)kind = branch;\n(\t+)};\n(\t+)};#/\* Begin XCLocalSwiftPackageReference section \*/\n\t\t3A67FCB62B6F26C60098082A /\* XCLocalSwiftPackageReference \"../../clickstream-swift\" \*/ = {\n$2isa = XCLocalSwiftPackageReference;\n$3relativePath = \"../../clickstream-swift\";\n$1};\n/\* End XCLocalSwiftPackageReference section \*/\n\n/* Begin XCRemoteSwiftPackageReference section \*/#' ModerneShopping.xcodeproj/project.pbxproj + perl -0777 -pi -e 's#isa = XCSwiftPackageProductDependency;\n(\t+)package = 3A1001882A1DDF4300DF72CB /\* XCRemoteSwiftPackageReference \"clickstream-swift\" \*/;#isa = XCSwiftPackageProductDependency;#' ModerneShopping.xcodeproj/project.pbxproj + perl -0777 -pi -e 's#\n {\n "identity" : "clickstream-swift",\n "kind" : "remoteSourceControl",\n "location" : "https://github.com/awslabs/clickstream-swift.git",\n "state" : {\n "branch" : "main",\n "revision" : "2bd29626068a6dff158f41f9d81295a6eaa59be5"\n }\n },##' ModerneShopping.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved + perl -0777 -pi -e 's#objectVersion = 54;#objectVersion = 60;#' ModerneShopping.xcodeproj/project.pbxproj + git diff + - name: Generate export options + run: | + echo '${{ vars.EXPORT_OPTIONS }}' >> ExportOptions.plist + cat ExportOptions.plist + ls + - name: Install the Apple certificate and provisioning profile + env: + BUILD_CERTIFICATE_BASE64: ${{ secrets.P12_BASE64 }} + P12_PASSWORD: ${{ secrets.CERTIFICATE_PASSWORD }} + BUILD_PROVISION_PROFILE_BASE64: ${{ secrets.MOBILEPROVISION_BASE64 }} + KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }} + run: | + CERTIFICATE_PATH=$RUNNER_TEMP/build_certificate.p12 + PP_PATH=$RUNNER_TEMP/shoppingmacdev.mobileprovision + KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db + # import certificate and provisioning profile from secrets + echo -n "$BUILD_CERTIFICATE_BASE64" | base64 --decode -o $CERTIFICATE_PATH + echo -n "$BUILD_PROVISION_PROFILE_BASE64" | base64 --decode -o $PP_PATH + # create temporary keychain + security create-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH + security set-keychain-settings -lut 21600 $KEYCHAIN_PATH + security unlock-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH + # import certificate to keychain + security import $CERTIFICATE_PATH -P "$P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH + security list-keychain -d user -s $KEYCHAIN_PATH + # apply provisioning profile + mkdir -p ~/Library/MobileDevice/Provisioning\ Profiles + cp $PP_PATH ~/Library/MobileDevice/Provisioning\ Profiles + - name: Build archive + run: | + sudo xcode-select -s '/Applications/Xcode_15.2.app/Contents/Developer' + xcode-select --print-path + cd ../clickstream-sdk-samples/ios/ + xcodebuild -resolvePackageDependencies + echo "start build app" + xcodebuild -scheme "ModerneShopping" \ + -archivePath $RUNNER_TEMP/ModerneShopping.xcarchive \ + -sdk iphoneos \ + -configuration Release \ + -destination generic/platform=iOS \ + clean archive | xcpretty + - name: Export ipa + run: | + EXPORT_OPTS_PATH=ExportOptions.plist + xcodebuild -exportArchive -archivePath $RUNNER_TEMP/ModerneShopping.xcarchive -exportOptionsPlist $EXPORT_OPTS_PATH -exportPath output + cd output + ls + - name: Build Device Farm test file + run: | + cd IntegrationTest + pip install virtualenv + virtualenv --help + virtualenv workspace + cd workspace + source bin/activate + pip install -r ../requirements.txt + mkdir tests + cp ../appium/shopping_test.py tests/ + find tests/ + py.test --collect-only tests/ + cd tests/ + find . -name '__pycache__' -type d -exec rm -r {} + + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + cd .. + pip freeze > requirements.txt + zip -r test_bundle.zip tests/ requirements.txt + ls + cd .. + - name: Configure AWS Credentials + if: ${{ env.iam_role_to_assume != '' }} + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.iam_role_to_assume }} + aws-region: us-west-2 + - name: Execute device farm test + run: | + cd IntegrationTest + pip install -r requirements.txt + cd devicefarm + cp ../../output/ModerneShopping.ipa ./ + cp ../workspace/test_bundle.zip ./ + ls + python -u -c "from automate_device_farm import upload_and_test_ios; upload_and_test_ios('ModerneShopping.ipa', 'test_bundle.zip', '${{ env.device_farm_project_arn }}', '${{ env.device_farm_test_spec_arn }}', '${{ env.device_farm_pool_arn }}')" + - name: Execute logcat test + run: | + cd IntegrationTest/devicefarm + pytest logcat_test.py -s --junitxml=report/logcat_test_report.xml --html=report/logcat_test_report.html + - name: Publish Test Report + uses: mikepenz/action-junit-report@v4 + if: success() || failure() + with: + report_paths: 'IntegrationTest/devicefarm/report/*.xml' + require_tests: true + detailed_summary: true + include_passed: true + fail_on_failure: true + job_name: integration test + - name: Upload test result + uses: actions/upload-artifact@v4 + if: success() || failure() + with: + name: test-result + path: | + IntegrationTest/devicefarm/report/ + IntegrationTest/devicefarm/MyAndroidAppTest-*/** diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6968dd4..21dba0a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,7 @@ jobs: release: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: main fetch-depth: 0 @@ -22,17 +22,33 @@ jobs: chmod +x release.sh ./release.sh ${{ env.NEW_VERSION }} git diff - git config user.name '${{ vars.USER_NAME }}' - git config user.email '${{ vars.USER_EMAIL }}' - git add . - git commit -m 'release: clickstream Swift ${{ env.NEW_VERSION }}' - git push - git tag ${{ env.NEW_VERSION }} - git push origin ${{ env.NEW_VERSION }} - - name: Create GitHub release - uses: softprops/action-gh-release@v1 + git config user.name "github-actions" + git config user.email "github-actions@github.com" + - name: Create Pull Request + id: create-pr + uses: peter-evans/create-pull-request@v5 with: - name: "Clickstream Swift ${{ env.NEW_VERSION }}" - tag_name: "${{ env.NEW_VERSION }}" - prerelease: true - generate_release_notes: true + token: ${{ secrets.PROJECT_TOKEN }} + commit-message: 'release: clickstream Swift ${{ env.NEW_VERSION }}' + title: 'release: clickstream Swift ${{ env.NEW_VERSION }}' + author: github-actions + committer: github-actions + signoff: true + body: | + ## Description + 1. release: clickstream Swift ${{ env.NEW_VERSION }} + + ## General Checklist + + + - [x] Added new tests to cover change, if needed + - [x] Build succeeds using Swift Package Manager + - [x] All unit tests pass + - [x] Documentation update for the change if required + - [x] PR title conforms to conventional commit style + - [x] If breaking change, documentation/changelog update with migration instructions + + By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. + + labels: release + branch: release_${{ env.NEW_VERSION }} diff --git a/.github/workflows/tag_and_release.yml b/.github/workflows/tag_and_release.yml new file mode 100644 index 0000000..9151623 --- /dev/null +++ b/.github/workflows/tag_and_release.yml @@ -0,0 +1,36 @@ +name: Create Tag And Release +on: + push: + branches: [ "main" ] + +jobs: + release: + if: ${{ startsWith(github.event.head_commit.message, 'release:') }} + runs-on: ubuntu-latest + env: + COMMIT_MESSAGE: ${{ github.event.head_commit.message }} + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + with: + ref: main + fetch-depth: 0 + token: ${{ secrets.PROJECT_TOKEN }} + - name: Create new tag + run: | + echo "${{ env.COMMIT_MESSAGE }}" + version=$(echo "${{ env.COMMIT_MESSAGE }}" | grep -oP 'Swift \K\d+\.\d+\.\d+') + echo "release_version=$version" >> "$GITHUB_ENV" + echo $version + git config user.name '${{ vars.USER_NAME }}' + git config user.email '${{ vars.USER_EMAIL }}' + git tag v$version + git push origin v$version + - name: Create GitHub release + uses: softprops/action-gh-release@v1 + with: + name: "Clickstream Swift ${{ env.release_version }}" + tag_name: "${{ env.release_version }}" + prerelease: true + generate_release_notes: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index 3b29812..3876a5b 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ DerivedData/ .swiftpm/config/registries.json .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata .netrc +.idea \ No newline at end of file diff --git a/IntegrationTest/appium/shopping_test.py b/IntegrationTest/appium/shopping_test.py new file mode 100644 index 0000000..e654ef5 --- /dev/null +++ b/IntegrationTest/appium/shopping_test.py @@ -0,0 +1,74 @@ +""" +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance +with the License. A copy of the License is located at + + http://www.apache.org/licenses/LICENSE-2.0 + +or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES +OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions +and limitations under the License. +""" +import pytest +from time import sleep + +from appium import webdriver +from appium.options.ios import XCUITestOptions +from appium.webdriver.common.appiumby import AppiumBy +from selenium.common.exceptions import NoSuchElementException + +capabilities = dict( + platformName='ios', + automationName='xcuitest', + deviceName='iPhone', + bundleId='software.aws.solution.ModerneShopping', + language='en', + locale='US', +) + +appium_server_url = 'http://0.0.0.0:4723/wd/hub' + + +class TestShopping: + def setup_method(self): + self.driver = webdriver.Remote(appium_server_url, options=XCUITestOptions().load_capabilities(capabilities)) + self.driver.implicitly_wait(10) + + def teardown_method(self): + if self.driver: + self.driver.quit() + + @pytest.mark.parametrize("test_suite", [ + "test suite 1", + "test suite 2" + ]) + def test_shopping(self, test_suite): + sleep(3) + self.perform_click_element('Profile') + self.perform_click_element('sign_in') + sleep(3) + self.perform_click_element('Cart') + self.perform_click_element('check_out') + self.perform_click_element('purchase') + self.perform_click_element('Profile') + self.perform_click_element("sign_out") + self.driver.execute_script('mobile: backgroundApp', {"seconds": 5}) + sleep(1) + self.perform_click_element("show_log_text") + event_log = self.driver.find_element(by=AppiumBy.ID, value="event_log") + self.driver.log_event("app_event_log", event_log.text) + print(event_log.text) + sleep(1) + + def perform_click_element(self, element_id): + try: + element = self.driver.find_element(by=AppiumBy.ID, value=element_id) + element.click() + sleep(2) + except NoSuchElementException: + pytest.skip(f"Element with ID '{element_id}' not found. Skipped the test") + + +if __name__ == '__main__': + TestShopping.test_shopping() diff --git a/IntegrationTest/devicefarm/automate_device_farm.py b/IntegrationTest/devicefarm/automate_device_farm.py new file mode 100644 index 0000000..b0792f8 --- /dev/null +++ b/IntegrationTest/devicefarm/automate_device_farm.py @@ -0,0 +1,189 @@ +""" +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance +with the License. A copy of the License is located at + + http://www.apache.org/licenses/LICENSE-2.0 + +or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES +OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions +and limitations under the License. +""" +import datetime +import os +import random +import string +import time + +import boto3 +import requests +import yaml +import zipfile +import shutil +import re + +# The following script runs a test through Device Farm +client = boto3.client('devicefarm') + + +def get_config(app_file_path, test_package, project_arn, test_spec_arn, pool_arn): + return { + # This is our app under test. + "appFilePath": app_file_path, + "projectArn": project_arn, + # Since we care about the most popular devices, we'll use a curated pool. + "testSpecArn": test_spec_arn, + "poolArn": pool_arn, + "namePrefix": "MyiOSAppTest", + # This is our test package. This tutorial won't go into how to make these. + "testPackage": test_package + } + + +def upload_and_test_ios(app_file_path, test_package, project_arn, test_spec_arn, pool_arn): + config = get_config(app_file_path, test_package, project_arn, test_spec_arn, pool_arn) + print(config) + unique = config['namePrefix'] + "-" + (datetime.date.today().isoformat()) + ( + ''.join(random.sample(string.ascii_letters, 8))) + print(f"The unique identifier for this run is going to be {unique} -- all uploads will be prefixed with this.") + + our_upload_arn = upload_df_file(config, unique, config['appFilePath'], "IOS_APP") + our_test_package_arn = upload_df_file(config, unique, config['testPackage'], 'APPIUM_PYTHON_TEST_PACKAGE') + print(our_upload_arn, our_test_package_arn) + # Now that we have those out of the way, we can start the test run... + response = client.schedule_run( + projectArn=config["projectArn"], + appArn=our_upload_arn, + devicePoolArn=config["poolArn"], + name=unique, + test={ + "type": "APPIUM_PYTHON", + "testSpecArn": config["testSpecArn"], + "testPackageArn": our_test_package_arn + } + ) + run_arn = response['run']['arn'] + start_time = datetime.datetime.now() + print(f"Run {unique} is scheduled as arn {run_arn} ") + + try: + while True: + response = client.get_run(arn=run_arn) + state = response['run']['status'] + if state == 'COMPLETED' or state == 'ERRORED': + break + else: + print(f" Run {unique} in state {state}, total time " + str(datetime.datetime.now() - start_time)) + time.sleep(10) + except Exception as e: + # If something goes wrong in this process, we stop the run and exit. + print(e) + client.stop_run(arn=run_arn) + exit(1) + print(f"Tests finished in state {state} after " + str(datetime.datetime.now() - start_time)) + # now, we pull all the logs. + jobs_response = client.list_jobs(arn=run_arn) + # Save the output somewhere. We're using the unique value, but you could use something else + save_path = os.path.join(os.getcwd(), unique) + os.mkdir(save_path) + # Save the last run information + appium_log_path = download_artifacts(jobs_response, save_path) + save_appium_log_path(appium_log_path) + # done + print("Finished") + + +def upload_df_file(config, unique, filename, type_, mime='application/octet-stream'): + response = client.create_upload(projectArn=config['projectArn'], + name=unique + "_" + os.path.basename(filename), + type=type_, + contentType=mime + ) + # Get the upload ARN, which we'll return later. + upload_arn = response['upload']['arn'] + # We're going to extract the URL of the upload and use Requests to upload it + upload_url = response['upload']['url'] + with open(filename, 'rb') as file_stream: + print(f"Uploading {filename} to Device Farm as {response['upload']['name']}... ", end='') + put_req = requests.put(upload_url, data=file_stream, headers={"content-type": mime}) + print(' done') + if not put_req.ok: + raise Exception("Couldn't upload, requests said we're not ok. Requests says: " + put_req.reason) + started = datetime.datetime.now() + while True: + print(f"Upload of {filename} in state {response['upload']['status']} after " + str( + datetime.datetime.now() - started)) + if response['upload']['status'] == 'FAILED': + raise Exception("The upload failed processing. DeviceFarm says reason is: \n" + ( + response['upload']['message'] if 'message' in response['upload'] else response['upload']['metadata'])) + if response['upload']['status'] == 'SUCCEEDED': + break + time.sleep(5) + response = client.get_upload(arn=upload_arn) + print("") + return upload_arn + + +def download_artifacts(jobs_response, save_path): + logcat_paths = [] + for job in jobs_response['jobs']: + # Make a directory for our information + job_name = job['name'] + os.makedirs(os.path.join(save_path, job_name), exist_ok=True) + # Get each suite within the job + suites = client.list_suites(arn=job['arn'])['suites'] + for suite in suites: + if suite['name'] == 'Tests Suite': + for test in client.list_tests(arn=suite['arn'])['tests']: + # Get the artifacts + for artifact_type in ['FILE', 'SCREENSHOT', 'LOG']: + artifacts = client.list_artifacts( + type=artifact_type, + arn=test['arn'] + )['artifacts'] + for artifact in artifacts: + # We replace : because it has a special meaning in Windows & macos + path_to = os.path.join(save_path, job_name) + os.makedirs(path_to, exist_ok=True) + filename = artifact['type'] + "_" + artifact['name'] + "." + artifact['extension'] + if str(filename).endswith(".zip"): + artifact_save_path = os.path.join(path_to, filename) + print("Downloading " + artifact_save_path) + with open(artifact_save_path, 'wb') as fn, requests.get(artifact['url'], + allow_redirects=True) as request: + fn.write(request.content) + appium_log_path = unzip_and_copy(artifact_save_path) + if appium_log_path is not None: + logcat_paths.append(appium_log_path) + return logcat_paths + + +def save_appium_log_path(appium_log_paths): + with open('ios_path.yaml', 'w') as file: + yaml.dump(appium_log_paths, file, default_flow_style=False) + print("appium log paths saved successful") + + +def unzip_and_copy(zip_path): + with zipfile.ZipFile(zip_path, 'r') as zip_ref: + zip_ref.extractall(os.path.dirname(zip_path)) + + origin_path = os.path.dirname(zip_path) + "/Host_Machine_Files/$DEVICEFARM_LOG_DIR/junitreport.xml" + device_name = os.path.basename(os.path.dirname(zip_path)) + rename_path = os.path.dirname(origin_path) + "/" + device_name + " junitreport.xml" + appium_log_path = os.path.dirname(origin_path) + "/appium.log" + if os.path.exists(origin_path): + os.rename(origin_path, rename_path) + report_path = os.path.dirname(os.path.dirname(os.path.dirname(zip_path))) + "/report/" + os.makedirs(report_path, exist_ok=True) + result_path = shutil.copy(rename_path, report_path) + + with open(result_path, 'r', encoding='utf-8') as file: + content = file.read() + modified_content = re.sub(r'\bTestShopping\b', "Appium " + device_name, content) + with open(result_path, 'w', encoding='utf-8') as file: + file.write(modified_content) + return appium_log_path + else: + return None diff --git a/IntegrationTest/devicefarm/logcat_test.py b/IntegrationTest/devicefarm/logcat_test.py new file mode 100644 index 0000000..5dcbebc --- /dev/null +++ b/IntegrationTest/devicefarm/logcat_test.py @@ -0,0 +1,238 @@ +""" +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance +with the License. A copy of the License is located at + + http://www.apache.org/licenses/LICENSE-2.0 + +or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES +OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions +and limitations under the License. +""" +import json +import re + +import pytest +import yaml + + +class TestLogcatIOS: + path = yaml.safe_load(open("ios_path.yaml", "r")) + + def init_events(self, path): + self.recorded_events = get_recorded_events(path) + + @pytest.mark.parametrize("path", path) + def test_upload(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + self.submitted_events = get_submitted_events(path) + # assert all record events are submitted. + assert sum(self.submitted_events) > 0 + assert len(self.recorded_events) > 0 + assert sum(self.submitted_events) >= len(self.recorded_events) + print("Verifying successful upload of all events.") + + @pytest.mark.parametrize("path", path) + def test_launch_events(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert launch events + start_events = [self.recorded_events[0]['event_name'], + self.recorded_events[1]['event_name'], + self.recorded_events[2]['event_name'], + self.recorded_events[3]['event_name']] + assert '_app_start' in start_events + assert '_session_start' in start_events + if '_first_open' not in start_events: + first_open_event = next( + (event for event in self.recorded_events if '_first_open' in event.get('event_name', '')), + None) + assert first_open_event is not None + print("Verifying successful order of launch events.") + + @pytest.mark.parametrize("path", path) + def test_first_screen_view(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert first _screen_view + screen_view_events = [event for event in self.recorded_events if '_screen_view' in event.get('event_name', '')] + sorted_screen_view_events = sorted( + screen_view_events, + key=lambda event: event['event_json'].get('timestamp', float('inf')) + ) + screen_view_event = sorted_screen_view_events[0] + if screen_view_event['event_json'].get('attributes')['_entrances'] == 0: + screen_view_event = sorted_screen_view_events[1] + assert screen_view_event['event_json'].get('attributes')['_entrances'] == 1 + assert '_screen_id' in screen_view_event['event_json'].get('attributes') + assert '_screen_name' in screen_view_event['event_json'].get('attributes') + assert '_screen_unique_id' in screen_view_event['event_json'].get('attributes') + + assert '_session_id' in screen_view_event['event_json'].get('attributes') + assert '_session_start_timestamp' in screen_view_event['event_json'].get('attributes') + assert '_session_duration' in screen_view_event['event_json'].get('attributes') + assert '_session_number' in screen_view_event['event_json'].get('attributes') + print("Verifying successful attributes of all first _screen_view events.") + + @pytest.mark.parametrize("path", path) + def test_last_screen_view(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert last _screen_view + screen_view_event = next( + (event for event in reversed(self.recorded_events) if '_screen_view' in event.get('event_name', '')), + None) + assert screen_view_event['event_json'].get('attributes')['_entrances'] == 0 + assert '_screen_id' in screen_view_event['event_json'].get('attributes') + assert '_screen_name' in screen_view_event['event_json'].get('attributes') + assert '_screen_unique_id' in screen_view_event['event_json'].get('attributes') + + assert '_previous_screen_id' in screen_view_event['event_json'].get('attributes') + assert '_previous_screen_name' in screen_view_event['event_json'].get('attributes') + assert '_previous_screen_unique_id' in screen_view_event['event_json'].get('attributes') + assert '_previous_timestamp' in screen_view_event['event_json'].get('attributes') + + print("Verifying successful attributes of all last _screen_view events.") + + @pytest.mark.parametrize("path", path) + def test_profile_set(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert _profile_set + profile_set_event = [event for event in self.recorded_events if '_profile_set' in event.get('event_name', '')] + assert '_user_id' not in profile_set_event[-1]['event_json']['user'] + assert '_user_id' in profile_set_event[-2]['event_json']['user'] + print("Verifying successful attributes of _profile_set events.") + + @pytest.mark.parametrize("path", path) + def test_login(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert login + login_event = [event for event in self.recorded_events if 'login' in event.get('event_name', '')] + assert len(login_event) > 0 + print("Verifying successful login events.") + + @pytest.mark.parametrize("path", path) + def test_product_exposure(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert product_exposure + product_exposure = next( + (event for event in self.recorded_events if 'product_exposure' in event.get('event_name', '')), + None) + assert len(product_exposure['event_json'].get('items')) > 0 + assert 'id' in product_exposure['event_json'].get('attributes') + print("Verifying successful attributes of product_exposure events.") + + @pytest.mark.parametrize("path", path) + def test_add_to_cart(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert add_to_cart + add_to_cart_event = [event for event in self.recorded_events if 'add_to_cart' in event.get('event_name', '')] + assert len(add_to_cart_event) > 0 + assert 'product_id' in add_to_cart_event[0]['event_json'].get('attributes') + print("Verifying successful attributes of add_to_cart_event events.") + + @pytest.mark.parametrize("path", path) + def test_view_home(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert view_home + view_home_event = [event for event in self.recorded_events if 'view_home' in event.get('event_name', '')] + assert len(view_home_event) > 0 + print("Verifying successful view_home events.") + + @pytest.mark.parametrize("path", path) + def test_view_cart(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert view_cart + view_cart_event = [event for event in self.recorded_events if 'view_cart' in event.get('event_name', '')] + assert len(view_cart_event) > 0 + print("Verifying successful view_cart events.") + + @pytest.mark.parametrize("path", path) + def test_view_profile(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert view_account + view_account_event = [event for event in self.recorded_events if 'view_profile' in event.get('event_name', '')] + assert len(view_account_event) > 0 + print("Verifying successful view_account events.") + + @pytest.mark.parametrize("path", path) + def test_check_out(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert check_out + check_out_event = [event for event in self.recorded_events if 'check_out_click' in event.get('event_name', '')] + assert len(check_out_event) > 0 + assert float(check_out_event[0]['event_json'].get('attributes')["totalPrice"]) > 0 + print("Verifying successful check_out events.") + + @pytest.mark.parametrize("path", path) + def test_user_engagement(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert _user_engagement + user_engagement_event = next( + (event for event in self.recorded_events if '_user_engagement' in event.get('event_name', '')), + None) + assert '_engagement_time_msec' in user_engagement_event['event_json'].get('attributes') + assert user_engagement_event['event_json'].get('attributes')['_engagement_time_msec'] > 1000 + print("Verifying successful attributes of _user_engagement events.") + + @pytest.mark.parametrize("path", path) + def test_app_end(self, path): + print("Start verify: " + str(path)) + self.init_events(path) + # assert _app_end + app_end_event = next((event for event in self.recorded_events if '_app_end' in event.get('event_name', '')), + None) + assert app_end_event is not None + print("Verifying successful completion of _app_end event.") + + +def get_submitted_events(path): + submitted_events = [] + with open(path, 'r') as file: + pattern = re.compile(r' Send (\d+) events') + for line in file: + match = pattern.search(line) + if match: + submitted_events.append(int(match.group(1))) + return submitted_events + + +def get_recorded_events(path): + with open(path, 'r') as file: + log_lines = file.readlines() + events = [] + first_event_pattern = re.compile(r'app_event_log:Saved event (\w+):(.*)$') + event_pattern = re.compile(r' Saved event (\w+):(.*)$') + + current_event_name = '' + + for line in log_lines: + first_event_match = first_event_pattern.search(line) + event_match = event_pattern.search(line) + if first_event_match: + event_match = first_event_match + if event_match: + event_name, event_json = event_match.groups() + if event_name == '_app_start' and ( + current_event_name == '_app_end' or current_event_name == '_user_engagement'): + continue + else: + events.append({ + 'event_name': event_name, + 'event_json': json.loads(event_json) + }) + current_event_name = event_name + else: + continue + return events diff --git a/IntegrationTest/requirements.txt b/IntegrationTest/requirements.txt new file mode 100644 index 0000000..fa911d7 --- /dev/null +++ b/IntegrationTest/requirements.txt @@ -0,0 +1,7 @@ +Appium-Python-Client~=3.1.1 +pytest~=7.4.3 +boto3~=1.34.11 +requests~=2.31.0 +PyYAML~=6.0.1 +pytest-html~=4.1.1 +selenium~=4.17.2 \ No newline at end of file