Skip to content

Commit

Permalink
[ci] add benchmarks to pipeline (#821)
Browse files Browse the repository at this point in the history
* [WIP] add benchmarks to pipeline

* debug yarn run

* debug add timeout

* add lightweight-bench.lua

* run 5 tests in a row

* run benchmark 10 minutes

* add variables

* move ci files; add artifacts

* change image

* add publish benchmark

* fix pipeline

* emulate specific metric

* finish

* fix rule

* fix rules

* fix fmt
  • Loading branch information
alvicsam committed Feb 15, 2022
1 parent 71edc3c commit 0c8ae8b
Show file tree
Hide file tree
Showing 2 changed files with 285 additions and 41 deletions.
218 changes: 177 additions & 41 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,24 @@
# - Runs on tags that match the pattern v1.0, v2.1rc1 (manual deployment)

variables:
CONTAINER_REPO: "docker.io/parity/substrate-api-sidecar"
HELM_NAMESPACE: "substrate-api-sidecar"
HELM_RELEASE_NAME_1: "kusama-sidecar"
HELM_RELEASE_NAME_2: "polkadot-sidecar"
HELM_CUSTOM_VALUES_FILE_1: "values-kusama.yaml"
HELM_CUSTOM_VALUES_FILE_2: "values-polkadot.yaml"
HELM_REPO: "helm/"
DOCKERFILE_DIRECTORY: "./"
CONTAINER_REPO: "docker.io/parity/substrate-api-sidecar"
HELM_NAMESPACE: "substrate-api-sidecar"
HELM_RELEASE_NAME_1: "kusama-sidecar"
HELM_RELEASE_NAME_2: "polkadot-sidecar"
HELM_CUSTOM_VALUES_FILE_1: "values-kusama.yaml"
HELM_CUSTOM_VALUES_FILE_2: "values-polkadot.yaml"
HELM_REPO: "helm/"
DOCKERFILE_DIRECTORY: "./"
CI_IMAGE: "quay.io/buildah/stable"

default:
retry:
max: 2
when:
- runner_system_failure
- unknown_failure
- api_failure
interruptible: true
before_script:
- |-
if [[ $CI_COMMIT_TAG =~ ^v[0-9]+\.[0-9]+.*$ ]]; then
Expand All @@ -31,20 +39,52 @@ stages:
- build
- deploy-staging
- deploy-production
- benchmark
- check-benchmark
- push-benchmark

.dockerize: &dockerize
stage: build
image: quay.io/buildah/stable
.collect-artifacts: &collect-artifacts
artifacts:
name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}"
when: on_success
expire_in: 1 days
paths:
- ./artifacts/

.test-refs: &test-refs
rules:
- if: $CI_PIPELINE_SOURCE == "pipeline"
when: never
- if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs

.test-refs-manual: &test-refs-manual
rules:
- if: $CI_PIPELINE_SOURCE == "pipeline"
when: never
- if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs
when: manual

.publish-refs: &publish-refs
rules:
- if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+.*$/' # # i.e. v1.0, v2.1rc1
when: always
- if: '$CI_COMMIT_REF_NAME == "master"'
when: always
- if: $CI_PIPELINE_SOURCE == "pipeline"
when: never
- if: $CI_PIPELINE_SOURCE == "schedule"
- if: $CI_COMMIT_REF_NAME == "master"
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1

.dockerize: &dockerize
stage: build
image: $CI_IMAGE
tags:
- kubernetes-parity-build

.deploy-k8s: &deploy-k8s
image: paritytech/kubetools:3.5.3
.kubernetes-env: &kubernetes-env
image: $CI_IMAGE
tags:
- kubernetes-parity-build

.deploy-k8s: &deploy-k8s
image: paritytech/kubetools:3.5.3
script:
- |-
kubectl get cm helm-custom-values -n $HELM_NAMESPACE -o jsonpath='{.data.values-kusama\.yaml}' > values-kusama.yaml
Expand All @@ -71,45 +111,141 @@ stages:
- kubernetes-parity-build

build:
<<: *dockerize
<<: *dockerize
<<: *publish-refs
script:
- echo building "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
- |-
if [[ $BUILD_LATEST_IMAGE ]]; then
buildah bud \
--format=docker \
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG" \
--tag "$CONTAINER_REPO:latest" "$DOCKERFILE_DIRECTORY"
- if [[ $BUILD_LATEST_IMAGE ]]; then
buildah bud
--format=docker
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
--tag "$CONTAINER_REPO:latest" "$DOCKERFILE_DIRECTORY";
else
buildah bud \
--format=docker \
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG" "$DOCKERFILE_DIRECTORY"
buildah bud
--format=docker
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG" "$DOCKERFILE_DIRECTORY";
fi
- echo ${Docker_Hub_Pass_Parity} |
buildah login --username ${Docker_Hub_User_Parity} --password-stdin docker.io
- echo pushing "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
- |-
if [[ $BUILD_LATEST_IMAGE ]]; then
- if [[ $BUILD_LATEST_IMAGE ]]; then
buildah push --format=v2s2 "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
buildah push --format=v2s2 "$CONTAINER_REPO:latest"
buildah push --format=v2s2 "$CONTAINER_REPO:latest";
else
buildah push --format=v2s2 "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
buildah push --format=v2s2 "$CONTAINER_REPO:$DOCKER_IMAGE_TAG";
fi

# checks that dockerimage can be built without publishing
build-pr:
<<: *dockerize
<<: *test-refs
script:
- echo building "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
- if [[ $BUILD_LATEST_IMAGE ]]; then
buildah bud
--format=docker
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
--tag "$CONTAINER_REPO:latest" "$DOCKERFILE_DIRECTORY";
else
buildah bud
--format=docker
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG" "$DOCKERFILE_DIRECTORY";
fi

deploy-staging:
stage: deploy-staging
<<: *deploy-k8s
stage: deploy-staging
<<: *deploy-k8s
<<: *publish-refs
environment:
name: parity-stg
rules:
- if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+.*$/' # i.e. v1.0, v2.1rc1
- if: '$CI_COMMIT_REF_NAME == "master"'
name: parity-stg

deploy-production:
stage: deploy-production
<<: *deploy-k8s
stage: deploy-production
<<: *deploy-k8s
environment:
name: parity-chains
name: parity-chains
rules:
- if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+.*$/' # i.e. v1.0, v2.1rc1
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1
when: manual

.benchmark-template: &benchmark-template
stage: benchmark
<<: *collect-artifacts
<<: *kubernetes-env
before_script: []
variables:
POLKADOT_ADDRESS: "ws://polkadot-node.nodes:9944"
BENCHMARK_DURATION: "15m" # Tests duration
BENCHMARK_THREADS: 4 # Total number of threads to use
BENCHMARK_CONNECTIONS: 12 # Total number of HTTP connections to keep open with each thread handling N = connections/threads
BENCHMARK_TIMEOUT: "120s" # Record a timeout if a response is not received within this amount of time
BENCHMARK_OPTS: "--latency" # Additional options, --latency: print detailed latency statistics
BENCHMARK_SCRIPT: "./scripts/ci/benchmarks/lightweight-bench.lua" # https://github.com/wg/wrk/blob/master/SCRIPTING
CI_IMAGE: "paritytech/node-wrk:latest"
script:
- export SAS_SUBSTRATE_WS_URL=$POLKADOT_ADDRESS
- yarn
- yarn build
- yarn run main &
- sleep 10
- wrk -d${BENCHMARK_DURATION}
-t${BENCHMARK_THREADS}
-c${BENCHMARK_CONNECTIONS}
--timeout ${BENCHMARK_TIMEOUT}
${BENCHMARK_OPTS}
-s ${BENCHMARK_SCRIPT} "http://127.0.0.1:8080" > result.txt
- cat result.txt
- mkdir artifacts/
- mv result.txt artifacts/
tags:
- kubernetes-parity-build

benchmark:
<<: *benchmark-template
<<: *publish-refs
needs:
- build

# manual step to run benchmarks in PR pipeline
benchmark-manual-pr:
<<: *benchmark-template
<<: *test-refs-manual
needs:
- build-pr

check-benchmark:
stage: check-benchmark
<<: *publish-refs
<<: *kubernetes-env
needs:
- benchmark
variables:
PROMETHEUS_URL: "http://vm-longterm.parity-build.parity.io"
CI_IMAGE: "paritytech/benchmarks:latest"
script:
- echo "TBD"

push-benchmark:
stage: push-benchmark
<<: *publish-refs
<<: *kubernetes-env
needs:
- check-benchmark
variables:
PROMETHEUS_URL: "http://vm-longterm.parity-build.parity.io"
CI_IMAGE: "paritytech/benchmarks:latest"
script:
- export RESULT=$(cat artifacts/result.txt | grep AvgRequestTime | awk '{print $2}')
- push_bench_result -t common
-p $CI_PROJECT_NAME
-n sidecar
-r $RESULT
-u ms
-s $PROMETHEUS_URL
- push_bench_result -t specific
-p $CI_PROJECT_NAME
-n sidecar
-r $RESULT
-l 'commit="'$CI_COMMIT_SHORT_SHA'"'
-u ms
-s $PROMETHEUS_URL
108 changes: 108 additions & 0 deletions scripts/ci/benchmarks/lightweight-bench.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
blocks = {
'28831', -- Sudo setKey(0, -> 1)
'29258', -- sudo.sudo(forceTransfer)
'188836', -- sudo.sudoUncheckedWeight runtime upgrade(v5 generalized proxies identity)
'197681', -- sudo.sudo(forceTransfer)
'199405', -- sudo.sudoUncheckedWeight runtime upgrade(v6 council / sudo can move claims)
'200732', -- sudo.sudo(batch assign indices)
'214264', -- sudo.sudoUncheckedWeight runtime upgrade(v7 frozen indices)
'214576', -- proxy sudo batch of transfers
'243601', -- proxy sudo batch of transfers
'244358', -- sudo.sudoUncheckedWeight runtime upgrade(v8 (un)reserve events)
'287352', -- sudo.sudo forceTransfer
'300532', -- proxy.addProxy for `Any` from sudo(direct to proxy module)
'301569', -- proxy sudo mint claim
'302396', -- proxy sudo set vested claim
'303079', -- sudo.sudoUncheckedWeight runtime upgrade(v9 add vested forceTransfer and new origin filtering)
'304468', -- proxy sudo set balance(W3F)(failed)
'313396', -- proxy sudo set storage
'314201', -- sudo.sudoUncheckedWeight runtime upgrade(v10 allow sudo to do anything(i.e.fix new filtering))
'314326', -- proxy sudo set balance(W3F)
'325148', -- scheduler dispatched
'326556', -- sudo.sudo force new era always
'341469', -- proxy sudo force transfer
'342400', -- sudo.sudoUncheckedWeight runtime upgrade(v11 scale validator count functions)
'342477', -- sudo.sudo schedule regular validator set increases
'442600', -- scheduler dispatched
'443963', -- sudo.sudoUncheckedWeight runtime upgrade(v12 new staking rewards curve)
'444722', -- proxy sudo batch of transfers
'516904', -- sudo.sudo batch of transfers
'528470', -- sudo.sudoUncheckedWeight runtime upgrade(v13 payout creates controller allow voting registrar proxy refactor as_sub)
'543510', -- sudo.sudo force transfer
'645697', -- proxy sudo batch of transfers
'744556', -- proxy sudo batch of transfers
'746085', -- sudo.sudoUncheckedWeight runtime upgrade(v15 enable council elections purchase)
'746605', -- sudo.sudoAs add governance proxy
'786421', -- sudo force transfer
'787923', -- sudo.sudoUncheckedWeight runtime upgrade(v16 enable governance)
'790128', -- proxy sudo batch of transfers
'799302', -- runtime upgraded no more sudo
'799310', -- after v17
}

function shuffle(paths)
local j, k
local n = #paths

for i = 1, n do
j, k = math.random(n), math.random(n)
paths[j], paths[k] = paths[k], paths[j]
end

return paths
end

counter = 1

request = function()
local shuffle_data = shuffle(blocks)

if counter > #shuffle_data then
counter = 1
end

local height = shuffle_data[counter]
counter = counter + 1

local path = string.format('/blocks/%s', height)

return wrk.format('GET', path)
end

delay = function()
-- delay each request by 1 millisecond
return 1
end

done = function(summary, latency, requests)

local bytes = summary.bytes
local errors = summary.errors.status -- http status is not at the beginning of 200,300
local requests = summary.requests -- total requests

print("--------------------------\n")
print("Total completed requests: ", summary.requests)
print("Failed requests: ", summary.errors.status)
print("Timeouts: ", summary.errors.status)
print("Average latency: ", (latency.mean/1000).."ms")
print("--------------------------\n")
print("Total completed requests: " .. requests .. "\n")
print("Failed requests: " .. errors .. "\n")
print("Timeouts: " .. errors .. "\n")
print("Avg RequestTime(Latency): "..string.format("%.2f",latency.mean / 1000).."ms".."\n")
print("Max RequestTime(Latency): "..(latency.max / 1000).."ms".."\n")
print("Min RequestTime(Latency): "..(latency.min / 1000).."ms".."\n")
print("--------------------------\n")
print("AvgRequestTime(Latency,ms): "..string.format("%.2f",latency.mean / 1000).."\n")
print("--------------------------\n")
-- Save to a local txt file
-- local file = io.open("./benchmarks/gcp-instance/sidecar-bench-results.txt", "w")
-- file:write("Total completed requests: " .. requests .. "\n")
-- file:write("Failed requests: " .. errors .. "\n")
-- file:write("Timeouts: " .. errors .. "\n")
-- file:write("Avg RequestTime(Latency): "..string.format("%.2f",latency.mean / 1000).."ms".."\n")
-- file:write("Max RequestTime(Latency): "..(latency.max / 1000).."ms".."\n")
-- file:write("Min RequestTime(Latency): "..(latency.min / 1000).."ms".."\n")
-- file:write("--------------------------\n")
-- file:close()
end

0 comments on commit 0c8ae8b

Please sign in to comment.