Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 10 additions & 5 deletions examples/indexer.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::{path::PathBuf, sync::Arc, time::Instant};

use tx_indexer_heuristics::ast::SignalsRbf;
use tx_indexer_fingerprints::HasInputFingerprints;
use tx_indexer_pipeline::{context::PipelineContext, engine::Engine, ops::AllDenseTxs};
use tx_indexer_primitives::{UnifiedStorage, dense::DenseStorageBuilder, test_utils::temp_dir};

Expand Down Expand Up @@ -79,16 +79,21 @@ fn main() {

let source = AllDenseTxs::new(&ctx);
let all_txs = source.txs();
let rbf_mask = SignalsRbf::new(all_txs);
let rbf_mask = all_txs.filter(|tx_id, ctx| {
tx_id
.with(ctx.unified_storage())
.inputs()
.any(|input| input.signals_rbf())
});

// 3. Evaluate
let eval_start = Instant::now();
let result = engine.eval(&rbf_mask);
let result = engine.eval(&rbf_mask).into_owned();
let eval_elapsed = eval_start.elapsed();

// 4. Print results
let rbf_count = result.values().filter(|&&v| v).count();
let total = result.len();
let rbf_count = result.len();
let total = engine.eval(&all_txs).into_owned().len();

println!();
println!("--- RBF signaling analysis ({eval_elapsed:.2?}) ---");
Expand Down
53 changes: 2 additions & 51 deletions src/crates/heuristics/src/ast/fingerprint.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::collections::HashMap;

use tx_indexer_fingerprints::{
InputSortingType, classify_script_pubkey,
input::HasInputFingerprints,
Expand All @@ -13,59 +11,12 @@ use tx_indexer_pipeline::{
engine::EvalContext,
expr::Expr,
node::{Node, NodeId},
value::{Mask, NormalizedFingerprints, TxSet},
value::{NormalizedFingerprints, TxSet},
};
use tx_indexer_primitives::{
AbstractTransaction, AbstractTxIn, HasScriptPubkey, handle::TxOutHandle, unified::AnyTxId,
AbstractTransaction, AbstractTxIn, HasScriptPubkey, handle::TxOutHandle,
};

/// Node that detects transactions signaling opt-in RBF.
///
/// A transaction signals RBF if any of its inputs has sequence < 0xfffffffe (BIP 125).
/// TODO: this is scaffolding. In the pipeline we should just be able to use map/reduce semantics for any fingerprint. Not build custom nodes for each fingerprint.
pub struct SignalsRbfNode {
input: Expr<TxSet>,
}

impl SignalsRbfNode {
pub fn new(input: Expr<TxSet>) -> Self {
Self { input }
}
}

impl Node for SignalsRbfNode {
type OutputValue = Mask<AnyTxId>;

fn dependencies(&self) -> Vec<NodeId> {
vec![self.input.id()]
}

fn evaluate(&self, ctx: &EvalContext) -> HashMap<AnyTxId, bool> {
let tx_ids = ctx.get(&self.input);
tx_ids
.iter()
.map(|tx_id| {
let tx = tx_id.with(ctx.unified_storage());
let any_rbf = tx.inputs().any(|input| input.signals_rbf());
(*tx_id, any_rbf)
})
.collect()
}

fn name(&self) -> &'static str {
"SignalsRbf"
}
}

pub struct SignalsRbf;

impl SignalsRbf {
pub fn new(input: Expr<TxSet>) -> Expr<Mask<AnyTxId>> {
let ctx = input.context().clone();
ctx.register(SignalsRbfNode::new(input))
}
}

fn sorted_deduped(vals: impl Iterator<Item = u32>) -> Vec<u32> {
let mut v: Vec<u32> = vals.collect();
v.sort_unstable();
Expand Down
1 change: 0 additions & 1 deletion src/crates/heuristics/src/ast/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ pub use change::{
};
pub use coinjoin::{IsCoinJoin, IsCoinJoinNode};
pub use common_input::{MultiInputHeuristic, MultiInputHeuristicNode};
pub use fingerprint::{SignalsRbf, SignalsRbfNode};
pub use same_address::SameAddressClusteringNode;
pub use uih::{
UnnecessaryInputHeuristic1, UnnecessaryInputHeuristic1Node, UnnecessaryInputHeuristic2,
Expand Down
Loading