Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 85 additions & 23 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 5 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,11 @@ aws-config = { version = "1.6.3", features = ["behavior-version-latest"] }
aws-sdk-kms = "1.75.0"
actix-web = "4"
log = "0.4"
simplelog = "0.12"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "tracing-log"] }
tracing-error = { version = "0.2" }
tracing-appender = "0.2"
tracing-actix-web = "0.7"
prometheus = "0.14"
lazy_static = "1.5"
dotenvy = "0.15"
Expand Down
22 changes: 22 additions & 0 deletions src/constants/logging.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
//! Logging configuration constants

/// Default maximum log file size in bytes (1GB)
pub const DEFAULT_MAX_LOG_FILE_SIZE: u64 = 1_073_741_824;

/// Default log level when not specified
pub const DEFAULT_LOG_LEVEL: &str = "info";

/// Default log format when not specified
pub const DEFAULT_LOG_FORMAT: &str = "compact";

/// Default log mode when not specified
pub const DEFAULT_LOG_MODE: &str = "stdout";

/// Default log directory for file logging
pub const DEFAULT_LOG_DIR: &str = "./logs";

/// Default log directory when running in Docker
pub const DOCKER_LOG_DIR: &str = "logs/";

/// Log file name
pub const LOG_FILE_NAME: &str = "relayer.log";
3 changes: 3 additions & 0 deletions src/constants/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,6 @@ pub use transactions::*;

mod network_tags;
pub use network_tags::*;

mod logging;
pub use logging::*;
3 changes: 3 additions & 0 deletions src/jobs/handlers/notification_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
//! This module implements the notification handling worker that processes
//! notification jobs from the queue.

use crate::setup_job_tracing;
use actix_web::web::ThinData;
use apalis::prelude::{Attempt, Data, *};
use eyre::Result;
Expand All @@ -29,6 +30,8 @@ pub async fn notification_handler(
context: Data<ThinData<DefaultAppState>>,
attempt: Attempt,
) -> Result<(), Error> {
setup_job_tracing!(job, attempt);

info!("handling notification: {:?}", job.data);

let result = handle_request(job.data, context).await;
Expand Down
3 changes: 3 additions & 0 deletions src/jobs/handlers/solana_swap_request_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
//! This module implements the solana token swap request handling worker that processes
//! notification jobs from the queue.

use crate::setup_job_tracing;
use actix_web::web::ThinData;
use apalis::prelude::{Attempt, Data, *};
use eyre::Result;
Expand All @@ -29,6 +30,8 @@ pub async fn solana_token_swap_request_handler(
context: Data<ThinData<DefaultAppState>>,
attempt: Attempt,
) -> Result<(), Error> {
setup_job_tracing!(job, attempt);

info!("handling solana token swap request: {:?}", job.data);

let result = handle_request(job.data, context).await;
Expand Down
3 changes: 3 additions & 0 deletions src/jobs/handlers/transaction_request_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
//!
//! Handles the validation and preparation of transactions before they are
//! submitted to the network
use crate::setup_job_tracing;
use actix_web::web::ThinData;
use apalis::prelude::{Attempt, Context, Data, TaskId, Worker, *};
use apalis_redis::RedisContext;
Expand All @@ -23,6 +24,8 @@ pub async fn transaction_request_handler(
task_id: TaskId,
ctx: RedisContext,
) -> Result<(), Error> {
setup_job_tracing!(job, attempt);

info!("Handling transaction request: {:?}", job.data);
info!("Attempt: {:?}", attempt);
info!("Worker: {:?}", worker);
Expand Down
3 changes: 3 additions & 0 deletions src/jobs/handlers/transaction_status_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
use actix_web::web::ThinData;
use apalis::prelude::{Attempt, Data, *};

use crate::setup_job_tracing;
use eyre::Result;
use log::info;

Expand All @@ -22,6 +23,8 @@ pub async fn transaction_status_handler(
state: Data<ThinData<DefaultAppState>>,
attempt: Attempt,
) -> Result<(), Error> {
setup_job_tracing!(job, attempt);

info!("Handling transaction status job: {:?}", job.data);

let result = handle_request(job.data, state).await;
Expand Down
3 changes: 3 additions & 0 deletions src/jobs/handlers/transaction_submission_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
//! - Handles different submission commands (Submit, Cancel, Resubmit)
//! - Updates transaction status after submission
//! - Enqueues status monitoring jobs
use crate::setup_job_tracing;
use actix_web::web::ThinData;
use apalis::prelude::{Attempt, Data, *};
use eyre::Result;
Expand All @@ -22,6 +23,8 @@ pub async fn transaction_submission_handler(
state: Data<ThinData<DefaultAppState>>,
attempt: Attempt,
) -> Result<(), Error> {
setup_job_tracing!(job, attempt);

info!("handling transaction submission: {:?}", job.data);

let result = handle_request(job.data, state).await;
Expand Down
7 changes: 7 additions & 0 deletions src/jobs/job.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ pub struct Job<T> {
pub timestamp: String,
pub job_type: JobType,
pub data: T,
#[serde(skip_serializing_if = "Option::is_none")]
pub request_id: Option<String>,
}

impl<T> Job<T> {
Expand All @@ -29,8 +31,13 @@ impl<T> Job<T> {
timestamp: Utc::now().timestamp().to_string(),
job_type,
data,
request_id: None,
}
}
pub fn with_request_id(mut self, id: Option<String>) -> Self {
self.request_id = id;
self
}
}

// Enum to represent different message types
Expand Down
16 changes: 11 additions & 5 deletions src/jobs/job_producer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ use crate::{
Job, NotificationSend, Queue, TransactionRequest, TransactionSend, TransactionStatusCheck,
},
models::RelayerError,
observability::request_id::get_request_id,
};
use apalis::prelude::Storage;
use apalis_redis::RedisError;
Expand Down Expand Up @@ -124,7 +125,8 @@ impl JobProducerTrait for JobProducer {
transaction_process_job
);
let mut queue = self.queue.lock().await;
let job = Job::new(JobType::TransactionRequest, transaction_process_job);
let job = Job::new(JobType::TransactionRequest, transaction_process_job)
.with_request_id(get_request_id());

match scheduled_on {
Some(scheduled_on) => {
Expand All @@ -148,7 +150,8 @@ impl JobProducerTrait for JobProducer {
scheduled_on: Option<i64>,
) -> Result<(), JobProducerError> {
let mut queue = self.queue.lock().await;
let job = Job::new(JobType::TransactionSend, transaction_submit_job);
let job = Job::new(JobType::TransactionSend, transaction_submit_job)
.with_request_id(get_request_id());

match scheduled_on {
Some(on) => {
Expand All @@ -172,7 +175,8 @@ impl JobProducerTrait for JobProducer {
let job = Job::new(
JobType::TransactionStatusCheck,
transaction_status_check_job,
);
)
.with_request_id(get_request_id());
match scheduled_on {
Some(on) => {
queue.transaction_status_queue.schedule(job, on).await?;
Expand All @@ -191,7 +195,8 @@ impl JobProducerTrait for JobProducer {
scheduled_on: Option<i64>,
) -> Result<(), JobProducerError> {
let mut queue = self.queue.lock().await;
let job = Job::new(JobType::NotificationSend, notification_send_job);
let job = Job::new(JobType::NotificationSend, notification_send_job)
.with_request_id(get_request_id());

match scheduled_on {
Some(on) => {
Expand All @@ -212,7 +217,8 @@ impl JobProducerTrait for JobProducer {
scheduled_on: Option<i64>,
) -> Result<(), JobProducerError> {
let mut queue = self.queue.lock().await;
let job = Job::new(JobType::SolanaTokenSwapRequest, solana_swap_request_job);
let job = Job::new(JobType::SolanaTokenSwapRequest, solana_swap_request_job)
.with_request_id(get_request_id());

match scheduled_on {
Some(on) => {
Expand Down
Loading
Loading