From 6d3a6d85a0306598a685c51211c4083cb96cdd10 Mon Sep 17 00:00:00 2001 From: Sebastian Kunert Date: Mon, 13 May 2024 11:03:49 +0200 Subject: [PATCH 1/2] `CheckWeight` SE: Check for extrinsic length + proof size combined (#4326) Currently the `CheckWeight` `SignedExtension` was tracking the size of the proof and the extrinsic length separately. But in reality we need one more check that ensures we don't hit the PoV limit with both combined. The rest of the logic remains unchanged. One scenario where the changes make a difference is when we enter this branch: https://github.com/paritytech/polkadot-sdk/blob/f34d8e3cf033e2a22a41b505c437972a5dc83d78/substrate/frame/system/src/extensions/check_weight.rs#L185-L198 This was previously allowing to some extrinsics that is exceeding the block limit but are withing the reserved area of `BlockWeights`. This will now be caught by the later check I introduced. I think the new behaviour makes sense, since the proof size dimension is designed for parachains and they don't want to go over the limit and get rejected. In the long run we should maybe get rid of `RuntimeBlockLength` alltogether, however that would require a deprecation process and can come at a later point. --------- Co-authored-by: Adrian Catangiu --- prdoc/pr_4326.prdoc | 16 +++ .../system/src/extensions/check_weight.rs | 125 +++++++++++++++--- 2 files changed, 124 insertions(+), 17 deletions(-) create mode 100644 prdoc/pr_4326.prdoc diff --git a/prdoc/pr_4326.prdoc b/prdoc/pr_4326.prdoc new file mode 100644 index 000000000000..b448bd7e52e7 --- /dev/null +++ b/prdoc/pr_4326.prdoc @@ -0,0 +1,16 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: CheckWeight checks for combined extrinsic length and proof size + +doc: + - audience: Runtime Dev + description: | + The `CheckWeight` `SignedExtension` will now perform an additional check. The extension was verifying the extrinsic length and + weight limits individually. However, the proof size dimension of the weight and extrinsic length together are bound by the PoV size limit. + The `CheckWeight` extension will now check that the combined size of the proof and the extrinsic lengths will not + exceed the PoV size limit. + +crates: + - name: frame-system + bump: minor diff --git a/substrate/frame/system/src/extensions/check_weight.rs b/substrate/frame/system/src/extensions/check_weight.rs index 70d1e7563327..061d543f8c31 100644 --- a/substrate/frame/system/src/extensions/check_weight.rs +++ b/substrate/frame/system/src/extensions/check_weight.rs @@ -64,17 +64,6 @@ where } } - /// Checks if the current extrinsic can fit into the block with respect to block weight limits. - /// - /// Upon successes, it returns the new block weight as a `Result`. - fn check_block_weight( - info: &DispatchInfoOf, - ) -> Result { - let maximum_weight = T::BlockWeights::get(); - let all_weight = Pallet::::block_weight(); - calculate_consumed_weight::(maximum_weight, all_weight, info) - } - /// Checks if the current extrinsic can fit into the block with respect to block length limits. /// /// Upon successes, it returns the new block length as a `Result`. @@ -113,7 +102,12 @@ where len: usize, ) -> Result<(), TransactionValidityError> { let next_len = Self::check_block_length(info, len)?; - let next_weight = Self::check_block_weight(info)?; + + let all_weight = Pallet::::block_weight(); + let maximum_weight = T::BlockWeights::get(); + let next_weight = + calculate_consumed_weight::(&maximum_weight, all_weight, info)?; + check_combined_proof_size(&maximum_weight, next_len, &next_weight)?; Self::check_extrinsic_weight(info)?; crate::AllExtrinsicsLen::::put(next_len); @@ -136,8 +130,32 @@ where } } +/// Check that the combined extrinsic length and proof size together do not exceed the PoV limit. +pub fn check_combined_proof_size( + maximum_weight: &BlockWeights, + next_len: u32, + next_weight: &crate::ConsumedWeight, +) -> Result<(), TransactionValidityError> { + // This extra check ensures that the extrinsic length does not push the + // PoV over the limit. + let total_pov_size = next_weight.total().proof_size().saturating_add(next_len as u64); + if total_pov_size > maximum_weight.max_block.proof_size() { + log::debug!( + target: LOG_TARGET, + "Extrinsic exceeds total pov size: {}kb, limit: {}kb", + total_pov_size as f64/1024.0, + maximum_weight.max_block.proof_size() as f64/1024.0 + ); + return Err(InvalidTransaction::ExhaustsResources.into()) + } + Ok(()) +} + +/// Checks if the current extrinsic can fit into the block with respect to block weight limits. +/// +/// Upon successes, it returns the new block weight as a `Result`. pub fn calculate_consumed_weight( - maximum_weight: BlockWeights, + maximum_weight: &BlockWeights, mut all_weight: crate::ConsumedWeight, info: &DispatchInfoOf, ) -> Result @@ -742,17 +760,90 @@ mod tests { // when assert_ok!(calculate_consumed_weight::<::RuntimeCall>( - maximum_weight.clone(), + &maximum_weight, all_weight.clone(), - &mandatory1 + &mandatory1, )); assert_err!( calculate_consumed_weight::<::RuntimeCall>( - maximum_weight, + &maximum_weight, all_weight, - &mandatory2 + &mandatory2, ), InvalidTransaction::ExhaustsResources ); } + + #[test] + fn maximum_proof_size_includes_length() { + let maximum_weight = BlockWeights::builder() + .base_block(Weight::zero()) + .for_class(DispatchClass::non_mandatory(), |w| { + w.base_extrinsic = Weight::zero(); + w.max_total = Some(Weight::from_parts(20, 10)); + }) + .for_class(DispatchClass::Mandatory, |w| { + w.base_extrinsic = Weight::zero(); + w.reserved = Some(Weight::from_parts(5, 10)); + w.max_total = None; + }) + .build_or_panic(); + + assert_eq!(maximum_weight.max_block, Weight::from_parts(20, 10)); + + // We have 10 reftime and 5 proof size left over. + let next_weight = crate::ConsumedWeight::new(|class| match class { + DispatchClass::Normal => Weight::from_parts(10, 5), + DispatchClass::Operational => Weight::from_parts(0, 0), + DispatchClass::Mandatory => Weight::zero(), + }); + + // Simple checks for the length + assert_ok!(check_combined_proof_size(&maximum_weight, 0, &next_weight)); + assert_ok!(check_combined_proof_size(&maximum_weight, 5, &next_weight)); + assert_err!( + check_combined_proof_size(&maximum_weight, 6, &next_weight), + InvalidTransaction::ExhaustsResources + ); + + // We have 10 reftime and 0 proof size left over. + let next_weight = crate::ConsumedWeight::new(|class| match class { + DispatchClass::Normal => Weight::from_parts(10, 10), + DispatchClass::Operational => Weight::from_parts(0, 0), + DispatchClass::Mandatory => Weight::zero(), + }); + assert_ok!(check_combined_proof_size(&maximum_weight, 0, &next_weight)); + assert_err!( + check_combined_proof_size(&maximum_weight, 1, &next_weight), + InvalidTransaction::ExhaustsResources + ); + + // We have 10 reftime and 2 proof size left over. + // Used weight is spread across dispatch classes this time. + let next_weight = crate::ConsumedWeight::new(|class| match class { + DispatchClass::Normal => Weight::from_parts(10, 5), + DispatchClass::Operational => Weight::from_parts(0, 3), + DispatchClass::Mandatory => Weight::zero(), + }); + assert_ok!(check_combined_proof_size(&maximum_weight, 0, &next_weight)); + assert_ok!(check_combined_proof_size(&maximum_weight, 2, &next_weight)); + assert_err!( + check_combined_proof_size(&maximum_weight, 3, &next_weight), + InvalidTransaction::ExhaustsResources + ); + + // Ref time is over the limit. Should not happen, but we should make sure that it is + // ignored. + let next_weight = crate::ConsumedWeight::new(|class| match class { + DispatchClass::Normal => Weight::from_parts(30, 5), + DispatchClass::Operational => Weight::from_parts(0, 0), + DispatchClass::Mandatory => Weight::zero(), + }); + assert_ok!(check_combined_proof_size(&maximum_weight, 0, &next_weight)); + assert_ok!(check_combined_proof_size(&maximum_weight, 5, &next_weight)); + assert_err!( + check_combined_proof_size(&maximum_weight, 6, &next_weight), + InvalidTransaction::ExhaustsResources + ); + } } From 477a120893ecd35f5e4f808cba10525424b5711d Mon Sep 17 00:00:00 2001 From: Alexander Samusev <41779041+alvicsam@users.noreply.github.com> Date: Mon, 13 May 2024 11:54:32 +0200 Subject: [PATCH 2/2] [ci] Add forklift to GHA ARC (#4372) PR adds forklift settings and forklift to test-github-actions cc https://github.com/paritytech/ci_cd/issues/939 --- .forklift/config.toml | 30 +++++++++++++++++++++++ .github/workflows/quick-checks.yml | 14 +++++------ .github/workflows/test-github-actions.yml | 11 +++++++-- .gitlab-ci.yml | 2 +- 4 files changed, 46 insertions(+), 11 deletions(-) create mode 100644 .forklift/config.toml diff --git a/.forklift/config.toml b/.forklift/config.toml new file mode 100644 index 000000000000..403a452aa036 --- /dev/null +++ b/.forklift/config.toml @@ -0,0 +1,30 @@ +[compression] +type = "zstd" + +[compression.zstd] +compressionLevel = 3 + +[general] +jobNameVariable = "CI_JOB_NAME" +jobsBlackList = [] +logLevel = "warn" +threadsCount = 6 + +[metrics] +enabled = true +pushEndpoint = "placeholder" + +[metrics.extraLabels] +environment = "production" +job_name = "$CI_JOB_NAME" +project_name = "$CI_PROJECT_PATH" + +[storage] +type = "s3" + +[storage.s3] +accessKeyId = "placeholder" +bucketName = "placeholder" +concurrency = 10 +endpointUrl = "placeholder" +secretAccessKey = "placeholder" diff --git a/.github/workflows/quick-checks.yml b/.github/workflows/quick-checks.yml index 7bf1983a1f69..a56bf12bb162 100644 --- a/.github/workflows/quick-checks.yml +++ b/.github/workflows/quick-checks.yml @@ -14,7 +14,7 @@ jobs: # GitHub Actions allows using 'env' in a container context. # However, env variables don't work for forks: https://github.com/orgs/community/discussions/44322 # This workaround sets the container image for each job using 'set-image' job output. - runs-on: arc-runners-polkadot-sdk-default + runs-on: arc-runners-polkadot-sdk timeout-minutes: 10 outputs: IMAGE: ${{ steps.set_image.outputs.IMAGE }} @@ -24,7 +24,7 @@ jobs: - id: set_image run: cat .github/env >> $GITHUB_OUTPUT fmt: - runs-on: arc-runners-polkadot-sdk-default + runs-on: arc-runners-polkadot-sdk timeout-minutes: 10 needs: [set-image] container: @@ -34,7 +34,7 @@ jobs: - name: Cargo fmt run: cargo +nightly fmt --all -- --check check-dependency-rules: - runs-on: arc-runners-polkadot-sdk-default + runs-on: arc-runners-polkadot-sdk timeout-minutes: 10 needs: [set-image] container: @@ -46,8 +46,7 @@ jobs: cd substrate/ ../.gitlab/ensure-deps.sh check-rust-feature-propagation: - runs-on: arc-runners-polkadot-sdk-default - # runs-on: ubuntu-latest + runs-on: arc-runners-polkadot-sdk timeout-minutes: 10 needs: [set-image] container: @@ -57,8 +56,7 @@ jobs: - name: run zepter run: zepter run check test-rust-features: - runs-on: arc-runners-polkadot-sdk-default - # runs-on: ubuntu-latest + runs-on: arc-runners-polkadot-sdk timeout-minutes: 10 needs: [set-image] container: @@ -68,7 +66,7 @@ jobs: - name: run rust features run: bash .gitlab/rust-features.sh . check-toml-format: - runs-on: arc-runners-polkadot-sdk-default + runs-on: arc-runners-polkadot-sdk timeout-minutes: 10 needs: [set-image] container: diff --git a/.github/workflows/test-github-actions.yml b/.github/workflows/test-github-actions.yml index e35ee0994863..5dbdb7156d51 100644 --- a/.github/workflows/test-github-actions.yml +++ b/.github/workflows/test-github-actions.yml @@ -8,6 +8,13 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +env: + FORKLIFT_storage_s3_bucketName: ${{ secrets.FORKLIFT_storage_s3_bucketName }} + FORKLIFT_storage_s3_accessKeyId: ${{ secrets.FORKLIFT_storage_s3_accessKeyId }} + FORKLIFT_storage_s3_secretAccessKey: ${{ secrets.FORKLIFT_storage_s3_secretAccessKey }} + FORKLIFT_storage_s3_endpointUrl: ${{ secrets.FORKLIFT_storage_s3_endpointUrl }} + FORKLIFT_metrics_pushEndpoint: ${{ secrets.FORKLIFT_metrics_pushEndpoint }} + jobs: set-image: # GitHub Actions allows using 'env' in a container context. @@ -38,7 +45,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: script - run: WASM_BUILD_NO_COLOR=1 time cargo test -p staging-node-cli --release --locked -- --ignored + run: WASM_BUILD_NO_COLOR=1 time forklift cargo test -p staging-node-cli --release --locked -- --ignored quick-benchmarks: runs-on: arc-runners-polkadot-sdk-beefy timeout-minutes: 30 @@ -54,4 +61,4 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: script - run: time cargo run --locked --release -p staging-node-cli --bin substrate-node --features runtime-benchmarks -- benchmark pallet --chain dev --pallet "*" --extrinsic "*" --steps 2 --repeat 1 --quiet + run: time forklift cargo run --locked --release -p staging-node-cli --bin substrate-node --features runtime-benchmarks -- benchmark pallet --chain dev --pallet "*" --extrinsic "*" --steps 2 --repeat 1 --quiet diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 5e57dd86f141..73a8c52c448f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -120,7 +120,7 @@ default: .forklift-cache: before_script: - mkdir ~/.forklift - - cp $FL_FORKLIFT_CONFIG ~/.forklift/config.toml + - cp .forklift/config.toml ~/.forklift/config.toml - > if [ "$FORKLIFT_BYPASS" != "true" ]; then echo "FORKLIFT_BYPASS not set";