Convert upper-case acronyms as suggested by CamelCase convention

see https://rust-lang.github.io/rust-clippy/master/index.html#upper_case_acronyms
This commit is contained in:
Riccardo Casatta
2021-03-30 16:33:07 +02:00
parent 745be7bea8
commit 192965413c
21 changed files with 188 additions and 188 deletions

View File

@@ -237,7 +237,7 @@ impl Blockchain for CompactFiltersBlockchain {
let skip_blocks = self.skip_blocks.unwrap_or(0);
let cf_sync = Arc::new(CFSync::new(Arc::clone(&self.headers), skip_blocks, 0x00)?);
let cf_sync = Arc::new(CfSync::new(Arc::clone(&self.headers), skip_blocks, 0x00)?);
let initial_height = self.headers.get_height()?;
let total_bundles = (first_peer.get_version().start_height as usize)
@@ -537,11 +537,11 @@ pub enum CompactFiltersError {
NoPeers,
/// Internal database error
DB(rocksdb::Error),
Db(rocksdb::Error),
/// Internal I/O error
IO(std::io::Error),
Io(std::io::Error),
/// Invalid BIP158 filter
BIP158(bitcoin::util::bip158::Error),
Bip158(bitcoin::util::bip158::Error),
/// Internal system time error
Time(std::time::SystemTimeError),
@@ -557,9 +557,9 @@ impl fmt::Display for CompactFiltersError {
impl std::error::Error for CompactFiltersError {}
impl_error!(rocksdb::Error, DB, CompactFiltersError);
impl_error!(std::io::Error, IO, CompactFiltersError);
impl_error!(bitcoin::util::bip158::Error, BIP158, CompactFiltersError);
impl_error!(rocksdb::Error, Db, CompactFiltersError);
impl_error!(std::io::Error, Io, CompactFiltersError);
impl_error!(bitcoin::util::bip158::Error, Bip158, CompactFiltersError);
impl_error!(std::time::SystemTimeError, Time, CompactFiltersError);
impl From<crate::error::Error> for CompactFiltersError {

View File

@@ -120,7 +120,7 @@ impl Encodable for BundleStatus {
BundleStatus::Init => {
written += 0x00u8.consensus_encode(&mut e)?;
}
BundleStatus::CFHeaders { cf_headers } => {
BundleStatus::CfHeaders { cf_headers } => {
written += 0x01u8.consensus_encode(&mut e)?;
written += VarInt(cf_headers.len() as u64).consensus_encode(&mut e)?;
for header in cf_headers {
@@ -171,7 +171,7 @@ impl Decodable for BundleStatus {
cf_headers.push(FilterHeader::consensus_decode(&mut d)?);
}
Ok(BundleStatus::CFHeaders { cf_headers })
Ok(BundleStatus::CfHeaders { cf_headers })
}
0x02 => {
let num = VarInt::consensus_decode(&mut d)?;
@@ -623,26 +623,26 @@ impl<T: StoreType> fmt::Debug for ChainStore<T> {
pub enum BundleStatus {
Init,
CFHeaders { cf_headers: Vec<FilterHeader> },
CfHeaders { cf_headers: Vec<FilterHeader> },
CFilters { cf_filters: Vec<Vec<u8>> },
Processed { cf_filters: Vec<Vec<u8>> },
Tip { cf_filters: Vec<Vec<u8>> },
Pruned,
}
pub struct CFStore {
pub struct CfStore {
store: Arc<RwLock<DB>>,
filter_type: u8,
}
type BundleEntry = (BundleStatus, FilterHeader);
impl CFStore {
impl CfStore {
pub fn new(
headers_store: &ChainStore<Full>,
filter_type: u8,
) -> Result<Self, CompactFiltersError> {
let cf_store = CFStore {
let cf_store = CfStore {
store: Arc::clone(&headers_store.store),
filter_type,
};
@@ -782,7 +782,7 @@ impl CFStore {
}
let key = StoreEntry::CFilterTable((self.filter_type, Some(bundle))).get_key();
let value = (BundleStatus::CFHeaders { cf_headers }, checkpoint);
let value = (BundleStatus::CfHeaders { cf_headers }, checkpoint);
read_store.put(key, value.serialize())?;

View File

@@ -25,22 +25,22 @@ use crate::error::Error;
pub(crate) const BURIED_CONFIRMATIONS: usize = 100;
pub struct CFSync {
pub struct CfSync {
headers_store: Arc<ChainStore<Full>>,
cf_store: Arc<CFStore>,
cf_store: Arc<CfStore>,
skip_blocks: usize,
bundles: Mutex<VecDeque<(BundleStatus, FilterHeader, usize)>>,
}
impl CFSync {
impl CfSync {
pub fn new(
headers_store: Arc<ChainStore<Full>>,
skip_blocks: usize,
filter_type: u8,
) -> Result<Self, CompactFiltersError> {
let cf_store = Arc::new(CFStore::new(&headers_store, filter_type)?);
let cf_store = Arc::new(CfStore::new(&headers_store, filter_type)?);
Ok(CFSync {
Ok(CfSync {
headers_store,
cf_store,
skip_blocks,
@@ -151,7 +151,7 @@ impl CFSync {
checkpoint,
headers_resp.filter_hashes,
)? {
BundleStatus::CFHeaders { cf_headers } => cf_headers,
BundleStatus::CfHeaders { cf_headers } => cf_headers,
_ => return Err(CompactFiltersError::InvalidResponse),
};
@@ -171,7 +171,7 @@ impl CFSync {
.cf_store
.advance_to_cf_filters(index, checkpoint, cf_headers, filters)?;
}
if let BundleStatus::CFHeaders { cf_headers } = status {
if let BundleStatus::CfHeaders { cf_headers } = status {
log::trace!("status: CFHeaders");
peer.get_cf_filters(

View File

@@ -33,7 +33,7 @@ use bitcoin::{BlockHeader, Script, Transaction, Txid};
use electrum_client::{Client, ConfigBuilder, ElectrumApi, Socks5Config};
use self::utils::{ELSGetHistoryRes, ElectrumLikeSync};
use self::utils::{ElectrumLikeSync, ElsGetHistoryRes};
use super::*;
use crate::database::BatchDatabase;
use crate::error::Error;
@@ -107,7 +107,7 @@ impl ElectrumLikeSync for Client {
fn els_batch_script_get_history<'s, I: IntoIterator<Item = &'s Script> + Clone>(
&self,
scripts: I,
) -> Result<Vec<Vec<ELSGetHistoryRes>>, Error> {
) -> Result<Vec<Vec<ElsGetHistoryRes>>, Error> {
self.batch_script_get_history(scripts)
.map(|v| {
v.into_iter()
@@ -116,7 +116,7 @@ impl ElectrumLikeSync for Client {
.map(
|electrum_client::GetHistoryRes {
height, tx_hash, ..
}| ELSGetHistoryRes {
}| ElsGetHistoryRes {
height,
tx_hash,
},

View File

@@ -39,7 +39,7 @@ use bitcoin::hashes::hex::{FromHex, ToHex};
use bitcoin::hashes::{sha256, Hash};
use bitcoin::{BlockHash, BlockHeader, Script, Transaction, Txid};
use self::utils::{ELSGetHistoryRes, ElectrumLikeSync};
use self::utils::{ElectrumLikeSync, ElsGetHistoryRes};
use super::*;
use crate::database::BatchDatabase;
use crate::error::Error;
@@ -210,7 +210,7 @@ impl UrlClient {
async fn _script_get_history(
&self,
script: &Script,
) -> Result<Vec<ELSGetHistoryRes>, EsploraError> {
) -> Result<Vec<ElsGetHistoryRes>, EsploraError> {
let mut result = Vec::new();
let scripthash = Self::script_to_scripthash(script);
@@ -227,7 +227,7 @@ impl UrlClient {
.json::<Vec<EsploraGetHistory>>()
.await?
.into_iter()
.map(|x| ELSGetHistoryRes {
.map(|x| ElsGetHistoryRes {
tx_hash: x.txid,
height: x.status.block_height.unwrap_or(0) as i32,
}),
@@ -261,7 +261,7 @@ impl UrlClient {
debug!("... adding {} confirmed transactions", len);
result.extend(response.into_iter().map(|x| ELSGetHistoryRes {
result.extend(response.into_iter().map(|x| ElsGetHistoryRes {
tx_hash: x.txid,
height: x.status.block_height.unwrap_or(0) as i32,
}));
@@ -291,7 +291,7 @@ impl ElectrumLikeSync for UrlClient {
fn els_batch_script_get_history<'s, I: IntoIterator<Item = &'s Script>>(
&self,
scripts: I,
) -> Result<Vec<Vec<ELSGetHistoryRes>>, Error> {
) -> Result<Vec<Vec<ElsGetHistoryRes>>, Error> {
let future = async {
let mut results = vec![];
for chunk in ChunksIterator::new(scripts.into_iter(), self.concurrency as usize) {
@@ -299,7 +299,7 @@ impl ElectrumLikeSync for UrlClient {
for script in chunk {
futs.push(self._script_get_history(&script));
}
let partial_results: Vec<Vec<ELSGetHistoryRes>> = futs.try_collect().await?;
let partial_results: Vec<Vec<ElsGetHistoryRes>> = futs.try_collect().await?;
results.extend(partial_results);
}
Ok(stream::iter(results).collect().await)

View File

@@ -26,7 +26,7 @@ use crate::wallet::time::Instant;
use crate::wallet::utils::ChunksIterator;
#[derive(Debug)]
pub struct ELSGetHistoryRes {
pub struct ElsGetHistoryRes {
pub height: i32,
pub tx_hash: Txid,
}
@@ -37,7 +37,7 @@ pub trait ElectrumLikeSync {
fn els_batch_script_get_history<'s, I: IntoIterator<Item = &'s Script> + Clone>(
&self,
scripts: I,
) -> Result<Vec<Vec<ELSGetHistoryRes>>, Error>;
) -> Result<Vec<Vec<ElsGetHistoryRes>>, Error>;
fn els_batch_transaction_get<'s, I: IntoIterator<Item = &'s Txid> + Clone>(
&self,
@@ -77,7 +77,7 @@ pub trait ElectrumLikeSync {
for (i, chunk) in ChunksIterator::new(script_iter, stop_gap).enumerate() {
// TODO if i == last, should create another chunk of addresses in db
let call_result: Vec<Vec<ELSGetHistoryRes>> =
let call_result: Vec<Vec<ElsGetHistoryRes>> =
maybe_await!(self.els_batch_script_get_history(chunk.iter()))?;
let max_index = call_result
.iter()
@@ -87,7 +87,7 @@ pub trait ElectrumLikeSync {
if let Some(max) = max_index {
max_indexes.insert(keychain, max + (i * chunk_size) as u32);
}
let flattened: Vec<ELSGetHistoryRes> = call_result.into_iter().flatten().collect();
let flattened: Vec<ElsGetHistoryRes> = call_result.into_iter().flatten().collect();
debug!("#{} of {:?} results:{}", i, keychain, flattened.len());
if flattened.is_empty() {
// Didn't find anything in the last `stop_gap` script_pubkeys, breaking