diff --git a/bundle-workflow/README.md b/bundle-workflow/README.md index cb7c8bf676..c0f9c37fbd 100644 --- a/bundle-workflow/README.md +++ b/bundle-workflow/README.md @@ -121,6 +121,20 @@ The following options are available. This step runs integration tests invoking `integtest.sh` in each component from bundle manifest. +To run integration tests locally, use below command. It pulls down the built bundle and its manifest file from S3, reads all components of the bundle and runs integration tests against each component. + +``` +export AWS_ROLE_ARN=arn:aws:iam:::role/opensearch-test +export AWS_ROLE_SESSION_NAME=dummy-session + +Next, configure temporary credentials in environment w/ +export AWS_SESSION_TOKEN= +export AWS_ACCESS_KEY_ID= +export AWS_SECRET_ACCESS_KEY= + +cd bundle-workflow +./test.sh integ-test --test-run-id --s3-bucket --opensearch-version --build-id --architecture +``` #### Backwards Compatibility Tests This step run backward compatibility invoking `bwctest.sh` in each component from bundle manifest. diff --git a/bundle-workflow/scripts/default/integtest.sh b/bundle-workflow/scripts/default/integtest.sh old mode 100644 new mode 100755 index 6f252ef7d2..4da651f594 --- a/bundle-workflow/scripts/default/integtest.sh +++ b/bundle-workflow/scripts/default/integtest.sh @@ -1,2 +1,89 @@ #!/bin/bash -echo "WARNING: Dummy integtest.sh script invoked: $@" + +set -e + +function usage() { + echo "" + echo "This script is used to run integration tests for plugin installed on a remote OpenSearch/Dashboards cluster." + echo "--------------------------------------------------------------------------" + echo "Usage: $0 [args]" + echo "" + echo "Required arguments:" + echo "None" + echo "" + echo "Optional arguments:" + echo -e "-b BIND_ADDRESS\t, defaults to localhost | 127.0.0.1, can be changed to any IP or domain name for the cluster location." + echo -e "-p BIND_PORT\t, defaults to 9200 or 5601 depends on OpenSearch or Dashboards, can be changed to any port for the cluster location." + echo -e "-s SECURITY_ENABLED\t(true | false), defaults to true. Specify the OpenSearch/Dashboards have security enabled or not." + echo -e "-c CREDENTIAL\t(usename:password), no defaults, effective when SECURITY_ENABLED=true." + echo -e "-v OPENSEARCH_VERSION\t, no defaults" + echo -e "-n SNAPSHOT\t, defaults to false" + echo -e "-h\tPrint this message." + echo "--------------------------------------------------------------------------" +} + +while getopts ":hb:p:s:c:v:n" arg; do + case $arg in + h) + usage + exit 1 + ;; + b) + BIND_ADDRESS=$OPTARG + ;; + p) + BIND_PORT=$OPTARG + ;; + s) + SECURITY_ENABLED=$OPTARG + ;; + c) + CREDENTIAL=$OPTARG + ;; + v) + OPENSEARCH_VERSION=$OPTARG + ;; + n) + SNAPSHOT=$OPTARG + ;; + :) + echo "-${OPTARG} requires an argument" + usage + exit 1 + ;; + ?) + echo "Invalid option: -${OPTARG}" + exit 1 + ;; + esac +done + + +if [ -z "$BIND_ADDRESS" ] +then + BIND_ADDRESS="localhost" +fi + +if [ -z "$BIND_PORT" ] +then + BIND_PORT="9200" +fi + +if [ -z "$SECURITY_ENABLED" ] +then + SECURITY_ENABLED="true" +fi + +if [ -z "$SNAPSHOT" ] +then + SNAPSHOT="false" +fi + +if [ -z "$CREDENTIAL" ] +then + CREDENTIAL="admin:admin" + USERNAME=`echo $CREDENTIAL | awk -F ':' '{print $1}'` + PASSWORD=`echo $CREDENTIAL | awk -F ':' '{print $2}'` +fi + +./gradlew integTest -Dopensearch.version=$OPENSEARCH_VERSION -Dbuild.snapshot=$SNAPSHOT -Dtests.rest.cluster="$BIND_ADDRESS:$BIND_PORT" -Dtests.cluster="$BIND_ADDRESS:$BIND_PORT" -Dtests.clustername="opensearch-integrationtest" -Dhttps=$SECURITY_ENABLED -Duser=$USERNAME -Dpassword=$PASSWORD --console=plain diff --git a/bundle-workflow/src/manifests/build_manifest.py b/bundle-workflow/src/manifests/build_manifest.py index 3962bcc25e..a2ab7aeef6 100644 --- a/bundle-workflow/src/manifests/build_manifest.py +++ b/bundle-workflow/src/manifests/build_manifest.py @@ -4,6 +4,9 @@ # this file be licensed under the Apache-2.0 license or a # compatible open source license. +import os + +from aws.s3_bucket import S3Bucket from manifests.manifest import Manifest """ @@ -54,6 +57,20 @@ def __to_dict__(self): ), } + @staticmethod + def get_build_manifest_relative_location(build_id, opensearch_version, architecture): + return f"builds/{opensearch_version}/{build_id}/{architecture}/manifest.yml" + + @staticmethod + def from_s3(bucket_name, build_id, opensearch_version, architecture, work_dir=None): + work_dir = work_dir if not None else str(os.getcwd()) + manifest_s3_path = BuildManifest.get_build_manifest_relative_location(build_id, opensearch_version, architecture) + S3Bucket(bucket_name).download_file(manifest_s3_path, work_dir) + with open('manifest.yml', 'r') as file: + build_manifest = BuildManifest.from_file(file) + os.remove(os.path.realpath(os.path.join(work_dir, 'manifest.yml'))) + return build_manifest + class Build: def __init__(self, data): self.name = data["name"] diff --git a/bundle-workflow/src/manifests/bundle_manifest.py b/bundle-workflow/src/manifests/bundle_manifest.py index a996c18ff3..0d40c73c7b 100644 --- a/bundle-workflow/src/manifests/bundle_manifest.py +++ b/bundle-workflow/src/manifests/bundle_manifest.py @@ -4,6 +4,9 @@ # this file be licensed under the Apache-2.0 license or a # compatible open source license. +import os + +from aws.s3_bucket import S3Bucket from manifests.manifest import Manifest @@ -45,6 +48,28 @@ def __to_dict__(self): ), } + @staticmethod + def from_s3(bucket_name, build_id, opensearch_version, architecture, work_dir=None): + work_dir = work_dir if not None else str(os.getcwd()) + manifest_s3_path = BundleManifest.get_bundle_manifest_relative_location(build_id, opensearch_version, architecture) + S3Bucket(bucket_name).download_file(manifest_s3_path, work_dir) + with open('manifest.yml', 'r') as file: + bundle_manifest = BundleManifest.from_file(file) + os.remove(os.path.realpath(os.path.join(work_dir, 'manifest.yml'))) + return bundle_manifest + + @staticmethod + def get_tarball_relative_location(build_id, opensearch_version, architecture): + return f"bundles/{opensearch_version}/{build_id}/{architecture}/opensearch-{opensearch_version}-linux-{architecture}.tar.gz" + + @staticmethod + def get_tarball_name(opensearch_version, architecture): + return f"opensearch-{opensearch_version}-linux-{architecture}.tar.gz" + + @staticmethod + def get_bundle_manifest_relative_location(build_id, opensearch_version, architecture): + return f"bundles/{opensearch_version}/{build_id}/{architecture}/manifest.yml" + class Build: def __init__(self, data): self.name = data["name"] diff --git a/bundle-workflow/src/manifests/test_manifest.py b/bundle-workflow/src/manifests/test_manifest.py index f4290e663c..ed7687ec3b 100644 --- a/bundle-workflow/src/manifests/test_manifest.py +++ b/bundle-workflow/src/manifests/test_manifest.py @@ -4,10 +4,10 @@ # this file be licensed under the Apache-2.0 license or a # compatible open source license. -import yaml +from manifests.manifest import Manifest -class TestManifest: +class TestManifest(Manifest): """ TestManifest contains the test support matrix for any component. @@ -29,15 +29,8 @@ class TestManifest: - with-security - without-security """ - - @staticmethod - def from_file(file): - return TestManifest(yaml.safe_load(file)) - def __init__(self, data): - self.version = str(data["schema-version"]) - if self.version != "1.0": - raise ValueError(f"Unsupported schema version: {self.version}") + super().__init__(data) self.components = list( map(lambda entry: self.Component(entry), data["components"]) ) diff --git a/bundle-workflow/src/run_integ_test.py b/bundle-workflow/src/run_integ_test.py index eaaf7f99b7..c17a294242 100755 --- a/bundle-workflow/src/run_integ_test.py +++ b/bundle-workflow/src/run_integ_test.py @@ -29,13 +29,19 @@ def parse_arguments(): parser = argparse.ArgumentParser(description="Test an OpenSearch Bundle") parser.add_argument( - "--bundle-manifest", type=argparse.FileType("r"), help="Bundle Manifest file." + "--s3-bucket", type=str, help="S3 bucket name" ) parser.add_argument( - "--build-manifest", type=argparse.FileType("r"), help="Build Manifest file." + "--opensearch-version", type=str, help="OpenSearch version to test" ) parser.add_argument( - "--test-manifest", type=argparse.FileType("r"), help="Test Manifest file." + "--build-id", type=str, help="The build id for the built artifact" + ) + parser.add_argument( + "--architecture", type=str, help="The os architecture e.g. x64, arm64" + ) + parser.add_argument( + "--test-run-id", type=str, help="The unique execution id for the test" ) parser.add_argument( "--keep", @@ -101,9 +107,8 @@ def sync_dependencies_to_maven_local(work_dir, manifest_build_ver): def main(): args = parse_arguments() console.configure(level=args.logging_level) - bundle_manifest = BundleManifest.from_file(args.bundle_manifest) - build_manifest = BuildManifest.from_file(args.build_manifest) - test_manifest = TestManifest.from_file(args.test_manifest) + test_manifest_path = os.path.join(os.path.dirname(__file__), 'test_workflow/config/test_manifest.yml') + test_manifest = TestManifest.from_path(test_manifest_path) integ_test_config = dict() for component in test_manifest.components: if component.integ_test is not None: @@ -111,6 +116,10 @@ def main(): with TemporaryDirectory(keep=args.keep) as work_dir: logging.info("Switching to temporary work_dir: " + work_dir) os.chdir(work_dir) + bundle_manifest = BundleManifest.from_s3( + args.s3_bucket, args.build_id, args.opensearch_version, args.architecture, work_dir) + build_manifest = BuildManifest.from_s3( + args.s3_bucket, args.build_id, args.opensearch_version, args.architecture, work_dir) pull_common_dependencies(work_dir, build_manifest) sync_dependencies_to_maven_local(work_dir, build_manifest.build.version) for component in bundle_manifest.components: @@ -120,6 +129,7 @@ def main(): integ_test_config[component.name], bundle_manifest, work_dir, + args.s3_bucket ) test_suite.execute() else: diff --git a/bundle-workflow/src/test_workflow/config/test_manifest.yml b/bundle-workflow/src/test_workflow/config/test_manifest.yml index bb5590e535..340ae655e2 100644 --- a/bundle-workflow/src/test_workflow/config/test_manifest.yml +++ b/bundle-workflow/src/test_workflow/config/test_manifest.yml @@ -7,7 +7,6 @@ components: test-configs: - with-security - without-security - - with-less-security bwc-test: dependencies: test-configs: diff --git a/bundle-workflow/src/test_workflow/integ_test/integ_test_suite.py b/bundle-workflow/src/test_workflow/integ_test/integ_test_suite.py index a24ae35074..5eed7baefe 100644 --- a/bundle-workflow/src/test_workflow/integ_test/integ_test_suite.py +++ b/bundle-workflow/src/test_workflow/integ_test/integ_test_suite.py @@ -20,11 +20,12 @@ class IntegTestSuite: test_support_matrix.yml """ - def __init__(self, component, test_config, bundle_manifest, work_dir): + def __init__(self, component, test_config, bundle_manifest, work_dir, s3_bucket_name): self.component = component self.bundle_manifest = bundle_manifest self.work_dir = work_dir self.test_config = test_config + self.s3_bucket_name = s3_bucket_name self.script_finder = ScriptFinder() self.repo = GitRepository( self.component.repository, @@ -74,7 +75,7 @@ def _is_security_enabled(self, config): def _setup_cluster_and_execute_test_config(self, config): security = self._is_security_enabled(config) - with LocalTestCluster.create(self.work_dir, self.bundle_manifest, security) as (test_cluster_endpoint, test_cluster_port): + with LocalTestCluster.create(self.work_dir, self.bundle_manifest, security, self.s3_bucket_name) as (test_cluster_endpoint, test_cluster_port): logging.info("component name: " + self.component.name) os.chdir(self.work_dir) # TODO: (Create issue) Since plugins don't have integtest.sh in version branch, hardcoded it to main @@ -85,7 +86,7 @@ def _execute_integtest_sh(self, endpoint, port, security): self.component.name, self.repo.dir ) if os.path.exists(script): - cmd = f"sh {script} -b {endpoint} -p {port} -s {str(security).lower()}" + cmd = f"{script} -b {endpoint} -p {port} -s {str(security).lower()} -v {self.bundle_manifest.build.version}" (status, stdout, stderr) = execute(cmd, self.repo.dir, True, False) else: logging.info( diff --git a/bundle-workflow/src/test_workflow/integ_test/local_test_cluster.py b/bundle-workflow/src/test_workflow/integ_test/local_test_cluster.py index de4e50e398..09d0ae24a9 100644 --- a/bundle-workflow/src/test_workflow/integ_test/local_test_cluster.py +++ b/bundle-workflow/src/test_workflow/integ_test/local_test_cluster.py @@ -8,10 +8,11 @@ import os import subprocess import time -import urllib.request import requests +from aws.s3_bucket import S3Bucket +from manifests.bundle_manifest import BundleManifest from test_workflow.test_cluster import ClusterCreationException, TestCluster @@ -20,11 +21,12 @@ class LocalTestCluster(TestCluster): Represents an on-box test cluster. This class downloads a bundle (from a BundleManifest) and runs it as a background process. """ - def __init__(self, work_dir, bundle_manifest, security_enabled): + def __init__(self, work_dir, bundle_manifest, security_enabled, s3_bucket_name): self.manifest = bundle_manifest self.work_dir = os.path.join(work_dir, "local-test-cluster") os.makedirs(self.work_dir, exist_ok=True) self.security_enabled = security_enabled + self.bucket_name = s3_bucket_name self.process = None def create_cluster(self): @@ -59,15 +61,20 @@ def destroy(self): def url(self, path=""): return f'{"https" if self.security_enabled else "http"}://{self.endpoint()}:{self.port()}{path}' + def __download_tarball_from_s3(self): + s3_path = BundleManifest.get_tarball_relative_location( + self.manifest.build.id, self.manifest.build.version, self.manifest.build.architecture) + S3Bucket(self.bucket_name).download_file(s3_path, self.work_dir) + return BundleManifest.get_tarball_name(self.manifest.build.version, self.manifest.build.architecture) + def download(self): logging.info(f"Creating local test cluster in {self.work_dir}") os.chdir(self.work_dir) - logging.info(f"Downloading bundle from {self.manifest.build.location}") - urllib.request.urlretrieve(self.manifest.build.location, "bundle.tgz") - logging.info(f'Downloaded bundle to {os.path.realpath("bundle.tgz")}') - + logging.info("Downloading bundle from s3") + bundle_name = self.__download_tarball_from_s3() + logging.info(f'Downloaded bundle to {os.path.realpath(bundle_name)}') logging.info("Unpacking") - subprocess.check_call("tar -xzf bundle.tgz", shell=True) + subprocess.check_call(f"tar -xzf {bundle_name}", shell=True) logging.info("Unpacked") def disable_security(self, dir): diff --git a/bundle-workflow/src/test_workflow/utils/__init__.py b/bundle-workflow/src/test_workflow/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/bundle-workflow/test.sh b/bundle-workflow/test.sh index 611431ca77..07d0635eba 100755 --- a/bundle-workflow/test.sh +++ b/bundle-workflow/test.sh @@ -9,4 +9,13 @@ set -e DIR="$(dirname "$0")" -"$DIR/run.sh" "$DIR/src/test.py" $@ \ No newline at end of file +case $1 in + "integ-test") + echo "${@:2}" + "$DIR/run.sh" "$DIR/src/run_integ_test.py" "${@:2}" + ;; + *) + echo "Invalid Test suite" + ;; +esac + diff --git a/bundle-workflow/tests/tests_manifests/test_build_manifest.py b/bundle-workflow/tests/tests_manifests/test_build_manifest.py index 3ce32b09db..7bfb3f3abb 100644 --- a/bundle-workflow/tests/tests_manifests/test_build_manifest.py +++ b/bundle-workflow/tests/tests_manifests/test_build_manifest.py @@ -6,6 +6,7 @@ import os import unittest +from unittest.mock import mock_open, patch import yaml @@ -48,3 +49,34 @@ def test_to_dict(self): data = self.manifest.to_dict() with open(self.manifest_filename) as f: self.assertEqual(yaml.safe_load(f), data) + + def test_get_manifest_relative_location(self): + actual = BuildManifest.get_build_manifest_relative_location( + "25", "1.1.0", "x64" + ) + expected = "builds/1.1.0/25/x64/manifest.yml" + self.assertEqual( + actual, expected, "the manifest relative location is not as expected" + ) + + @patch("manifests.build_manifest.S3Bucket") + def test_from_s3(self, mock_s3_bucket): + s3_bucket = mock_s3_bucket.return_value + with patch("os.remove"): + with patch("builtins.open", mock_open()): + s3_download_path = BuildManifest.get_build_manifest_relative_location( + self.manifest.build.id, + self.manifest.build.version, + self.manifest.build.architecture, + ) + with patch("manifests.build_manifest.BuildManifest.from_file"): + BuildManifest.from_s3( + "bucket_name", + self.manifest.build.id, + self.manifest.build.version, + self.manifest.build.architecture, + "/xyz", + ) + self.assertEqual(s3_bucket.download_file.call_count, 1) + s3_bucket.download_file.assert_called_with(s3_download_path, "/xyz") + os.remove.assert_called_with("/xyz/manifest.yml") diff --git a/bundle-workflow/tests/tests_manifests/test_bundle_manifest.py b/bundle-workflow/tests/tests_manifests/test_bundle_manifest.py index 6267740e1b..560dcdf250 100644 --- a/bundle-workflow/tests/tests_manifests/test_bundle_manifest.py +++ b/bundle-workflow/tests/tests_manifests/test_bundle_manifest.py @@ -6,6 +6,7 @@ import os import unittest +from unittest.mock import mock_open, patch import yaml @@ -53,3 +54,46 @@ def test_to_dict(self): data = self.manifest.to_dict() with open(self.manifest_filename) as f: self.assertEqual(yaml.safe_load(f), data) + + def test_get_manifest_relative_location(self): + actual = BundleManifest.get_bundle_manifest_relative_location( + "25", "1.1.0", "x64" + ) + expected = "bundles/1.1.0/25/x64/manifest.yml" + self.assertEqual( + actual, expected, "the manifest relative location is not as expected" + ) + + def test_get_tarball_relative_location(self): + actual = BundleManifest.get_tarball_relative_location("25", "1.1.0", "x64") + expected = "bundles/1.1.0/25/x64/opensearch-1.1.0-linux-x64.tar.gz" + self.assertEqual( + actual, expected, "the tarball relative location is not as expected" + ) + + def test_get_tarball_name(self): + actual = BundleManifest.get_tarball_name("1.1.0", "x64") + expected = "opensearch-1.1.0-linux-x64.tar.gz" + self.assertEqual(actual, expected, "the tarball name is not as expected") + + @patch("manifests.bundle_manifest.S3Bucket") + def test_from_s3(self, mock_s3_bucket): + s3_bucket = mock_s3_bucket.return_value + with patch("os.remove"): + with patch("builtins.open", mock_open()): + s3_download_path = BundleManifest.get_bundle_manifest_relative_location( + self.manifest.build.id, + self.manifest.build.version, + self.manifest.build.architecture, + ) + with patch("manifests.bundle_manifest.BundleManifest.from_file"): + BundleManifest.from_s3( + "bucket_name", + self.manifest.build.id, + self.manifest.build.version, + self.manifest.build.architecture, + "/xyz", + ) + self.assertEqual(s3_bucket.download_file.call_count, 1) + s3_bucket.download_file.assert_called_with(s3_download_path, "/xyz") + os.remove.assert_called_with("/xyz/manifest.yml")