Skip to content

Commit

Permalink
Modularize tracing executor and metrics rename (#6424)
Browse files Browse the repository at this point in the history
* Tracing executor and metrics rename

* Appease clippy

* Merge branch 'unstable' into modularise-task-executor
  • Loading branch information
AgeManning authored Oct 28, 2024
1 parent 8188e03 commit e31ac50
Show file tree
Hide file tree
Showing 59 changed files with 364 additions and 323 deletions.
373 changes: 184 additions & 189 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ members = [
"common/eth2_interop_keypairs",
"common/eth2_network_config",
"common/eth2_wallet_manager",
"common/lighthouse_metrics",
"common/metrics",
"common/lighthouse_version",
"common/lockfile",
"common/logging",
Expand Down Expand Up @@ -141,6 +141,7 @@ milhouse = "0.3"
num_cpus = "1"
parking_lot = "0.12"
paste = "1"
prometheus = "0.13"
quickcheck = "1"
quickcheck_macros = "1"
quote = "1"
Expand Down Expand Up @@ -213,7 +214,7 @@ gossipsub = { path = "beacon_node/lighthouse_network/gossipsub/" }
http_api = { path = "beacon_node/http_api" }
int_to_bytes = { path = "consensus/int_to_bytes" }
kzg = { path = "crypto/kzg" }
lighthouse_metrics = { path = "common/lighthouse_metrics" }
metrics = { path = "common/metrics" }
lighthouse_network = { path = "beacon_node/lighthouse_network" }
lighthouse_version = { path = "common/lighthouse_version" }
lockfile = { path = "common/lockfile" }
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/beacon_chain/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ hex = { workspace = true }
int_to_bytes = { workspace = true }
itertools = { workspace = true }
kzg = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
lighthouse_version = { workspace = true }
logging = { workspace = true }
lru = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/beacon_chain/src/block_verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ use derivative::Derivative;
use eth2::types::{BlockGossip, EventKind};
use execution_layer::PayloadStatus;
pub use fork_choice::{AttestationFromBlock, PayloadVerificationStatus};
use lighthouse_metrics::TryExt;
use metrics::TryExt;
use parking_lot::RwLockReadGuard;
use proto_array::Block as ProtoBlock;
use safe_arith::ArithError;
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/beacon_chain/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use crate::observed_attesters::SlotSubcommitteeIndex;
use crate::types::consts::altair::SYNC_COMMITTEE_SUBNET_COUNT;
use crate::{BeaconChain, BeaconChainError, BeaconChainTypes};
use bls::FixedBytesExtended;
pub use lighthouse_metrics::*;
pub use metrics::*;
use slot_clock::SlotClock;
use std::sync::LazyLock;
use types::{BeaconState, Epoch, EthSpec, Hash256, Slot};
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/beacon_chain/tests/attestation_production.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ async fn produces_attestations_from_attestation_simulator_service() {
let expected_miss_metrics_count = 0;
let expected_hit_metrics_count =
num_blocks_produced - UNAGGREGATED_ATTESTATION_LAG_SLOTS as u64;
lighthouse_metrics::gather().iter().for_each(|mf| {
metrics::gather().iter().for_each(|mf| {
if hit_prometheus_metrics.contains(&mf.get_name()) {
assert_eq!(
mf.get_metric()[0].get_counter().get_value() as u64,
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/beacon_processor/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ task_executor = { workspace = true }
slot_clock = { workspace = true }
lighthouse_network = { workspace = true }
types = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
parking_lot = { workspace = true }
num_cpus = { workspace = true }
serde = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/beacon_processor/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub use lighthouse_metrics::*;
pub use metrics::*;
use std::sync::LazyLock;

/*
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/client/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ sensitive_url = { workspace = true }
genesis = { workspace = true }
task_executor = { workspace = true }
environment = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
time = "0.3.5"
directory = { workspace = true }
http_api = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/client/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub use lighthouse_metrics::*;
pub use metrics::*;
use std::sync::LazyLock;

pub static SYNC_SLOTS_PER_SECOND: LazyLock<Result<IntGauge>> = LazyLock::new(|| {
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/eth1/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ logging = { workspace = true }
superstruct = { workspace = true }
tokio = { workspace = true }
state_processing = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
task_executor = { workspace = true }
eth2 = { workspace = true }
sensitive_url = { workspace = true }
2 changes: 1 addition & 1 deletion beacon_node/eth1/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub use lighthouse_metrics::*;
pub use metrics::*;
use std::sync::LazyLock;

/*
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/execution_layer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ slot_clock = { workspace = true }
tempfile = { workspace = true }
rand = { workspace = true }
zeroize = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
ethers-core = { workspace = true }
builder_client = { path = "../builder_client" }
fork_choice = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/execution_layer/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub use lighthouse_metrics::*;
pub use metrics::*;
use std::sync::LazyLock;

pub const HIT: &str = "hit";
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/http_api/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ lighthouse_network = { workspace = true }
eth1 = { workspace = true }
state_processing = { workspace = true }
lighthouse_version = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
warp_utils = { workspace = true }
slot_clock = { workspace = true }
ethereum_ssz = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/http_api/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub use lighthouse_metrics::*;
pub use metrics::*;
use std::sync::LazyLock;

pub static HTTP_API_PATHS_TOTAL: LazyLock<Result<IntCounterVec>> = LazyLock::new(|| {
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/http_metrics/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ beacon_chain = { workspace = true }
store = { workspace = true }
lighthouse_network = { workspace = true }
slot_clock = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
lighthouse_version = { workspace = true }
warp_utils = { workspace = true }
malloc_utils = { workspace = true }
Expand Down
8 changes: 4 additions & 4 deletions beacon_node/http_metrics/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use crate::Context;
use beacon_chain::BeaconChainTypes;
use lighthouse_metrics::TextEncoder;
use lighthouse_network::prometheus_client::encoding::text::encode;
use malloc_utils::scrape_allocator_metrics;
use metrics::TextEncoder;

pub fn gather_prometheus_metrics<T: BeaconChainTypes>(
ctx: &Context<T>,
Expand All @@ -17,13 +17,13 @@ pub fn gather_prometheus_metrics<T: BeaconChainTypes>(
// - Statically updated: things which are only updated at the time of the scrape (used where we
// can avoid cluttering up code with metrics calls).
//
// The `lighthouse_metrics` crate has a `DEFAULT_REGISTRY` global singleton
// The `metrics` crate has a `DEFAULT_REGISTRY` global singleton
// which keeps the state of all the metrics. Dynamically updated things will already be
// up-to-date in the registry (because they update themselves) however statically updated
// things need to be "scraped".
//
// We proceed by, first updating all the static metrics using `scrape_for_metrics(..)`. Then,
// using `lighthouse_metrics::gather(..)` to collect the global `DEFAULT_REGISTRY` metrics into
// using `metrics::gather(..)` to collect the global `DEFAULT_REGISTRY` metrics into
// a string that can be returned via HTTP.

if let Some(beacon_chain) = ctx.chain.as_ref() {
Expand All @@ -48,7 +48,7 @@ pub fn gather_prometheus_metrics<T: BeaconChainTypes>(
}

encoder
.encode_utf8(&lighthouse_metrics::gather(), &mut buffer)
.encode_utf8(&metrics::gather(), &mut buffer)
.unwrap();
// encode gossipsub metrics also if they exist
if let Some(registry) = ctx.gossipsub_registry.as_ref() {
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/lighthouse_network/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ futures = { workspace = true }
error-chain = { workspace = true }
dirs = { workspace = true }
fnv = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
smallvec = { workspace = true }
tokio-io-timeout = "1"
lru = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/lighthouse_network/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub use lighthouse_metrics::*;
pub use metrics::*;
use std::sync::LazyLock;

pub static NAT_OPEN: LazyLock<Result<IntGaugeVec>> = LazyLock::new(|| {
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/network/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ smallvec = { workspace = true }
rand = { workspace = true }
fnv = { workspace = true }
alloy-rlp = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
logging = { workspace = true }
task_executor = { workspace = true }
igd-next = "0.14"
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/network/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ use beacon_chain::{
sync_committee_verification::Error as SyncCommitteeError,
};
use fnv::FnvHashMap;
pub use lighthouse_metrics::*;
use lighthouse_network::{
peer_manager::peerdb::client::ClientKind, types::GossipKind, GossipTopic, Gossipsub,
NetworkGlobals,
};
pub use metrics::*;
use std::sync::{Arc, LazyLock};
use strum::IntoEnumIterator;
use types::EthSpec;
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/network/src/sync/range_sync/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ use crate::sync::{network_context::SyncNetworkContext, BatchOperationOutcome, Ba
use beacon_chain::block_verification_types::RpcBlock;
use beacon_chain::BeaconChainTypes;
use fnv::FnvHashMap;
use lighthouse_metrics::set_int_gauge;
use lighthouse_network::service::api_types::Id;
use lighthouse_network::{PeerAction, PeerId};
use metrics::set_int_gauge;
use rand::seq::SliceRandom;
use rand::Rng;
use slog::{crit, debug, o, warn};
Expand Down
4 changes: 2 additions & 2 deletions beacon_node/operation_pool/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ edition = { workspace = true }
[dependencies]
derivative = { workspace = true }
itertools = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
parking_lot = { workspace = true }
types = { workspace = true }
state_processing = { workspace = true }
Expand All @@ -25,4 +25,4 @@ tokio = { workspace = true }
maplit = { workspace = true }

[features]
portable = ["beacon_chain/portable"]
portable = ["beacon_chain/portable"]
2 changes: 1 addition & 1 deletion beacon_node/operation_pool/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub use lighthouse_metrics::*;
pub use metrics::*;
use std::sync::LazyLock;

pub static BUILD_REWARD_CACHE_TIME: LazyLock<Result<Histogram>> = LazyLock::new(|| {
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/store/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ safe_arith = { workspace = true }
state_processing = { workspace = true }
slog = { workspace = true }
serde = { workspace = true }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
lru = { workspace = true }
sloggers = { workspace = true }
directory = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/store/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub use lighthouse_metrics::{set_gauge, try_create_int_gauge, *};
pub use metrics::{set_gauge, try_create_int_gauge, *};

use directory::size_of_dir;
use std::path::Path;
Expand Down
10 changes: 0 additions & 10 deletions common/lighthouse_metrics/Cargo.toml

This file was deleted.

2 changes: 1 addition & 1 deletion common/logging/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ test_logger = [] # Print log output to stderr when running tests instead of drop

[dependencies]
chrono = { version = "0.4", default-features = false, features = ["clock", "std"] }
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
parking_lot = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
Expand Down
4 changes: 1 addition & 3 deletions common/logging/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
use lighthouse_metrics::{
inc_counter, try_create_int_counter, IntCounter, Result as MetricsResult,
};
use metrics::{inc_counter, try_create_int_counter, IntCounter, Result as MetricsResult};
use slog::Logger;
use slog_term::Decorator;
use std::io::{Result, Write};
Expand Down
1 change: 0 additions & 1 deletion common/logging/src/tracing_metrics_layer.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
//! Exposes [`MetricsLayer`]: A tracing layer that registers metrics of logging events.

use lighthouse_metrics as metrics;
use std::sync::LazyLock;
use tracing_log::NormalizeEvent;

Expand Down
2 changes: 1 addition & 1 deletion common/malloc_utils/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ authors = ["Paul Hauner <[email protected]>"]
edition = { workspace = true }

[dependencies]
lighthouse_metrics = { workspace = true }
metrics = { workspace = true }
libc = "0.2.79"
parking_lot = { workspace = true }
tikv-jemalloc-ctl = { version = "0.6.0", optional = true, features = ["stats"] }
Expand Down
Loading

0 comments on commit e31ac50

Please sign in to comment.