Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Dynamic authority sets in GRANDPA #1014

Merged
merged 40 commits into from
Nov 15, 2018
Merged
Show file tree
Hide file tree
Changes from 31 commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
7d89d65
grandpa-compatible digest items
rphmeier Oct 4, 2018
2f9c1d2
round-localized signatures on GRANDPA messages
rphmeier Oct 17, 2018
1146ca7
generalize some import_queue params
rphmeier Oct 17, 2018
edc9cd9
start import queue in chain_ops as well
rphmeier Oct 17, 2018
38d4eb0
localize messages to set-index and add pending changes
rphmeier Oct 21, 2018
0d786bc
Merge branch 'master' into rh-grandpa-dynamic2
rphmeier Oct 22, 2018
a03d48f
note future changes when importing block
rphmeier Oct 25, 2018
370c109
use generic block number in GRANDPA
rphmeier Oct 25, 2018
0dfa924
apply authority set changes
rphmeier Oct 26, 2018
7f51db9
compiles
rphmeier Oct 27, 2018
c0c6597
add some tests for authority set logic
rphmeier Oct 27, 2018
a33180f
restart voter with new authority set as necessary
rphmeier Oct 27, 2018
3f26058
Merge branch 'master' into rh-grandpa-dynamic2
rphmeier Oct 27, 2018
2160e44
minimize public interface
rphmeier Oct 28, 2018
0768c11
add substrate finality grandpa primitives for WASM
rphmeier Oct 28, 2018
a3667df
call into chain WASM to check for changes
rphmeier Oct 28, 2018
13ee7a7
write authority set to DB
rphmeier Oct 29, 2018
9478b90
read authority set from DB on startup
rphmeier Oct 29, 2018
be260ac
refactor import queue to use explicit block import references
rphmeier Oct 29, 2018
2362386
generalizable data for peers in test network
rphmeier Oct 29, 2018
914f21c
tests compile using new test framework
rphmeier Oct 29, 2018
4b29c0d
extract out tests to file
rphmeier Oct 29, 2018
6a7e6cf
tests compile after changes
rphmeier Oct 30, 2018
770f1f6
initial test to ensure auxiliary data is generated correctly
rphmeier Oct 31, 2018
82115aa
set aux in block import
rphmeier Oct 17, 2018
a47d765
implement aux-storing in client-db
rphmeier Oct 17, 2018
aea36fc
client sets aux in import pipeline
rphmeier Oct 17, 2018
c552349
finish GRANDPA test: dynamic authority sets
rphmeier Nov 1, 2018
a6acdc6
use published finality-grandpa
rphmeier Nov 1, 2018
dd045a7
Merge branch 'master' into rh-grandpa-dynamic2
rphmeier Nov 1, 2018
427dd54
all tests compile
rphmeier Nov 2, 2018
13a4d9a
API id module
rphmeier Nov 3, 2018
437f0b8
ignore test temporarily
rphmeier Nov 3, 2018
1522b5c
fix test
rphmeier Nov 3, 2018
d436cf8
re-export ApiId from sr-api
rphmeier Nov 5, 2018
f26699a
add some docs
rphmeier Nov 5, 2018
df928f3
Merge branch 'master' into rh-grandpa-dynamic2
rphmeier Nov 14, 2018
152f481
address grumbles
rphmeier Nov 14, 2018
782363a
start addressing basti comments
rphmeier Nov 15, 2018
23b075c
Fixes `GrandpaApi` implementation and usage
bkchr Nov 15, 2018
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 20 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ members = [
"core/consensus/rhd",
"core/executor",
"core/finality-grandpa",
"core/finality-grandpa/primitives",
"core/keyring",
"core/network",
"core/primitives",
Expand Down
21 changes: 21 additions & 0 deletions core/client/db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,18 @@ pub struct BlockImportOperation<Block: BlockT, H: Hasher> {
updates: MemoryDB<H>,
changes_trie_updates: MemoryDB<H>,
pending_block: Option<PendingBlock<Block>>,
aux_ops: Vec<(Vec<u8>, Option<Vec<u8>>)>,
}

impl<Block: BlockT, H: Hasher> BlockImportOperation<Block, H> {
fn apply_aux(&mut self, transaction: &mut DBTransaction) {
for (key, maybe_val) in self.aux_ops.drain(..) {
match maybe_val {
Some(val) => transaction.put_vec(columns::AUX, &key, val),
None => transaction.delete(columns::AUX, &key),
}
}
}
}

impl<Block> client::backend::BlockImportOperation<Block, Blake2Hasher>
Expand Down Expand Up @@ -348,6 +360,13 @@ where Block: BlockT<Hash=H256>,
self.changes_trie_updates = update;
Ok(())
}

fn set_aux<I>(&mut self, ops: I) -> Result<(), client::error::Error>
where I: IntoIterator<Item=(Vec<u8>, Option<Vec<u8>>)>
{
self.aux_ops = ops.into_iter().collect();
Ok(())
}
}

struct StorageDb<Block: BlockT> {
Expand Down Expand Up @@ -605,13 +624,15 @@ impl<Block> client::backend::Backend<Block, Blake2Hasher> for Backend<Block> whe
old_state: state,
updates: MemoryDB::default(),
changes_trie_updates: MemoryDB::default(),
aux_ops: Vec::new(),
})
}

fn commit_operation(&self, mut operation: Self::BlockImportOperation)
-> Result<(), client::error::Error>
{
let mut transaction = DBTransaction::new();
operation.apply_aux(&mut transaction);

if let Some(pending_block) = operation.pending_block {
let hash = pending_block.header.hash();
Expand Down
17 changes: 13 additions & 4 deletions core/client/db/src/light.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ pub(crate) mod columns {
pub const HEADER: Option<u32> = Some(2);
pub const CACHE: Option<u32> = Some(3);
pub const CHT: Option<u32> = Some(4);
pub const AUX: Option<u32> = Some(5);
}

/// Light blockchain storage. Stores most recent headers + CHTs for older headers.
Expand Down Expand Up @@ -238,6 +239,7 @@ impl<Block> LightBlockchainStorage<Block> for LightStorage<Block>
header: Block::Header,
authorities: Option<Vec<AuthorityId>>,
leaf_state: NewBlockState,
aux_ops: Vec<(Vec<u8>, Option<Vec<u8>>)>,
) -> ClientResult<()> {
let mut transaction = DBTransaction::new();

Expand All @@ -253,6 +255,13 @@ impl<Block> LightBlockchainStorage<Block> for LightStorage<Block>
::utils::number_and_hash_to_lookup_key(number, hash)
};

for (key, maybe_val) in aux_ops {
match maybe_val {
Some(val) => transaction.put_vec(columns::AUX, &key, val),
None => transaction.delete(columns::AUX, &key),
}
}

if leaf_state.is_best() {
// handle reorg.
{
Expand Down Expand Up @@ -427,7 +436,7 @@ pub(crate) mod tests {
) -> Hash {
let header = prepare_header(parent, number, extrinsics_root);
let hash = header.hash();
db.import_header(header, authorities, NewBlockState::Best).unwrap();
db.import_header(header, authorities, NewBlockState::Best, Vec::new()).unwrap();
hash
}

Expand All @@ -439,7 +448,7 @@ pub(crate) mod tests {
) -> Hash {
let header = prepare_header(parent, number, Default::default());
let hash = header.hash();
db.import_header(header, authorities, NewBlockState::Best).unwrap();
db.import_header(header, authorities, NewBlockState::Best, Vec::new()).unwrap();
hash
}

Expand All @@ -451,7 +460,7 @@ pub(crate) mod tests {
) -> Hash {
let header = prepare_header(parent, number, Default::default());
let hash = header.hash();
db.import_header(header, authorities, NewBlockState::Final).unwrap();
db.import_header(header, authorities, NewBlockState::Final, Vec::new()).unwrap();
hash
}

Expand All @@ -463,7 +472,7 @@ pub(crate) mod tests {
) -> Hash {
let header = prepare_header(parent, number, Default::default());
let hash = header.hash();
db.import_header(header, authorities, NewBlockState::Normal).unwrap();
db.import_header(header, authorities, NewBlockState::Normal, Vec::new()).unwrap();
hash
}

Expand Down
3 changes: 3 additions & 0 deletions core/client/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,9 @@ where
fn reset_storage(&mut self, top: StorageMap, children: ChildrenStorageMap) -> error::Result<H::Out>;
/// Inject changes trie data into the database.
fn update_changes_trie(&mut self, update: MemoryDB<H>) -> error::Result<()>;
/// Update auxiliary keys. Values are `None` if should be deleted.
fn set_aux<I>(&mut self, ops: I) -> error::Result<()>
where I: IntoIterator<Item=(Vec<u8>, Option<Vec<u8>>)>;
}

/// Client backend. Manages the data layer.
Expand Down
10 changes: 8 additions & 2 deletions core/client/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -502,6 +502,7 @@ impl<B, E, Block> Client<B, E, Block> where
body: Option<Vec<Block::Extrinsic>>,
authorities: Option<Vec<AuthorityId>>,
finalized: bool,
aux: Vec<(Vec<u8>, Option<Vec<u8>>)>,
) -> error::Result<ImportResult> {
let parent_hash = import_headers.post().parent_hash().clone();
match self.backend.blockchain().status(BlockId::Hash(hash))? {
Expand Down Expand Up @@ -592,6 +593,8 @@ impl<B, E, Block> Client<B, E, Block> where
if let Some(Some(changes_update)) = changes_update {
transaction.update_changes_trie(changes_update)?;
}

transaction.set_aux(aux)?;
self.backend.commit_operation(transaction)?;

if make_notifications {
Expand Down Expand Up @@ -798,7 +801,9 @@ impl<B, E, Block> Client<B, E, Block> where
/// TODO [snd] possibly implement this on blockchain::Backend and just redirect here
/// Returns `Ok(None)` if `target_hash` is not found in search space.
/// TODO [snd] write down time complexity
pub fn best_containing(&self, target_hash: Block::Hash, maybe_max_number: Option<NumberFor<Block>>) -> error::Result<Option<Block::Hash>> {
pub fn best_containing(&self, target_hash: Block::Hash, maybe_max_number: Option<NumberFor<Block>>)
-> error::Result<Option<Block::Hash>>
{
let target_header = {
match self.backend.blockchain().header(BlockId::Hash(target_hash))? {
Some(x) => x,
Expand Down Expand Up @@ -913,7 +918,7 @@ impl<B, E, Block> consensus::BlockImport<Block> for Client<B, E, Block> where
post_runtime_digests,
body,
finalized,
..
auxiliary,
} = import_block;
let parent_hash = header.parent_hash().clone();

Expand Down Expand Up @@ -945,6 +950,7 @@ impl<B, E, Block> consensus::BlockImport<Block> for Client<B, E, Block> where
body,
new_authorities,
finalized,
auxiliary,
);

*self.importing_block.write() = None;
Expand Down
37 changes: 31 additions & 6 deletions core/client/src/in_mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ struct BlockchainStorage<Block: BlockT> {
genesis_hash: Block::Hash,
cht_roots: HashMap<NumberFor<Block>, Block::Hash>,
leaves: LeafSet<Block::Hash, NumberFor<Block>>,
aux: HashMap<Vec<u8>, Vec<u8>>,
}

/// In-memory blockchain. Supports concurrent reads.
Expand Down Expand Up @@ -144,6 +145,7 @@ impl<Block: BlockT> Blockchain<Block> {
genesis_hash: Default::default(),
cht_roots: HashMap::new(),
leaves: LeafSet::new(),
aux: HashMap::new(),
}));
Blockchain {
storage: storage.clone(),
Expand Down Expand Up @@ -247,6 +249,16 @@ impl<Block: BlockT> Blockchain<Block> {
self.storage.write().finalized_hash = hash;
Ok(())
}

fn write_aux(&self, ops: Vec<(Vec<u8>, Option<Vec<u8>>)>) {
let mut storage = self.storage.write();
for (k, v) in ops {
match v {
Some(v) => storage.aux.insert(k, v),
None => storage.aux.remove(&k),
};
}
}
}

impl<Block: BlockT> HeaderBackend<Block> for Blockchain<Block> {
Expand Down Expand Up @@ -320,6 +332,7 @@ impl<Block: BlockT> light::blockchain::Storage<Block> for Blockchain<Block>
header: Block::Header,
authorities: Option<Vec<AuthorityId>>,
state: NewBlockState,
aux_ops: Vec<(Vec<u8>, Option<Vec<u8>>)>,
) -> error::Result<()> {
let hash = header.hash();
let parent_hash = *header.parent_hash();
Expand All @@ -328,6 +341,7 @@ impl<Block: BlockT> light::blockchain::Storage<Block> for Blockchain<Block>
self.cache.insert(parent_hash, authorities);
}

self.write_aux(aux_ops);
Ok(())
}

Expand Down Expand Up @@ -356,6 +370,7 @@ pub struct BlockImportOperation<Block: BlockT, H: Hasher> {
old_state: InMemory<H>,
new_state: Option<InMemory<H>>,
changes_trie_update: Option<MemoryDB<H>>,
aux: Option<Vec<(Vec<u8>, Option<Vec<u8>>)>>,
}

impl<Block, H> backend::BlockImportOperation<Block, H> for BlockImportOperation<Block, H>
Expand Down Expand Up @@ -426,6 +441,13 @@ where
self.new_state = Some(InMemory::from(transaction));
Ok(root)
}

fn set_aux<I>(&mut self, ops: I) -> error::Result<()>
where I: IntoIterator<Item=(Vec<u8>, Option<Vec<u8>>)>
{
self.aux = Some(ops.into_iter().collect());
Ok(())
}
}

/// In-memory backend. Keeps all states and blocks in memory. Useful for testing.
Expand All @@ -438,7 +460,6 @@ where
states: RwLock<HashMap<Block::Hash, InMemory<H>>>,
changes_trie_storage: InMemoryChangesTrieStorage<H>,
blockchain: Blockchain<Block>,
aux: RwLock<HashMap<Vec<u8>, Vec<u8>>>,
}

impl<Block, H> Backend<Block, H>
Expand All @@ -453,7 +474,6 @@ where
states: RwLock::new(HashMap::new()),
changes_trie_storage: InMemoryChangesTrieStorage::new(),
blockchain: Blockchain::new(),
aux: RwLock::new(HashMap::new()),
}
}
}
Expand Down Expand Up @@ -481,6 +501,7 @@ where
old_state: state,
new_state: None,
changes_trie_update: None,
aux: None,
})
}

Expand Down Expand Up @@ -508,6 +529,10 @@ where
self.blockchain.cache.insert(parent_hash, operation.pending_authorities);
}
}

if let Some(ops) = operation.aux {
self.blockchain.write_aux(ops);
}
Ok(())
}

Expand Down Expand Up @@ -535,18 +560,18 @@ where
}

fn insert_aux<'a, 'b: 'a, 'c: 'a, I: IntoIterator<Item=&'a (&'c [u8], &'c [u8])>, D: IntoIterator<Item=&'a &'b [u8]>>(&self, insert: I, delete: D) -> error::Result<()> {
let mut aux = self.aux.write();
let mut storage = self.blockchain.storage.write();
for (k, v) in insert {
aux.insert(k.to_vec(), v.to_vec());
storage.aux.insert(k.to_vec(), v.to_vec());
}
for k in delete {
aux.remove(*k);
storage.aux.remove(*k);
}
Ok(())
}

fn get_aux(&self, key: &[u8]) -> error::Result<Option<Vec<u8>>> {
Ok(self.aux.read().get(key).cloned())
Ok(self.blockchain.storage.read().aux.get(key).cloned())
}
}

Expand Down
Loading