Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions application/apps/indexer/gui/application/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ chrono.workspace = true
chrono-tz.workspace = true
dirs.workspace = true
serialport.workspace = true
dlt-core.workspace = true
rustc-hash = "2.1"

#TODO: Replace env logger with log4rs
env_logger.workspace = true
Expand Down
7 changes: 7 additions & 0 deletions application/apps/indexer/gui/application/src/host/command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ pub enum HostCommand {
stream: StreamNames,
parser: ParserNames,
},
DltStatistics(Box<DltStatisticsParam>),
StartSession(Box<StartSessionParam>),
CloseSessionSetup(Uuid),
CloseMultiSetup(Uuid),
Expand All @@ -43,6 +44,12 @@ pub enum HostCommand {
},
}

#[derive(Debug, Clone)]
pub struct DltStatisticsParam {
pub session_setup_id: Uuid,
pub source_paths: Vec<PathBuf>,
}

#[derive(Debug, Clone)]
pub struct StartSessionParam {
pub parser: ParserConfig,
Expand Down
176 changes: 176 additions & 0 deletions application/apps/indexer/gui/application/src/host/common/dlt_stats.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
use dlt_core::{
dlt::LogLevel,
parse::DltParseError,
read::DltMessageReader,
statistics::{Statistic, StatisticCollector, collect_statistics},
};
use rustc_hash::{FxHashMap, FxHashSet};
use std::{fs::File, path::PathBuf};

/// Collects the DLT statistics from the given source files.
pub fn dlt_statistics(sources: Vec<PathBuf>) -> Result<DltStatistics, String> {
let mut statistics = DltStatistics::default();

for source in sources {
if let Ok(path) = source.clone().into_os_string().into_string()
&& let Ok(file) = File::open(path)
{
let mut reader = DltMessageReader::new(file, true);
if let Err(error) = collect_statistics(&mut reader, &mut statistics) {
return Err(format!("{:?}: {}", source, error));
}
} else {
return Err(format!("invalid source: {:?}", source));
}
}

Ok(statistics)
}

/// The statistics-info of DLT files.
#[derive(Debug, Default, Clone)]
pub struct DltStatistics {
counter: usize,
pub total: LevelDistribution,
pub app_ids: FxHashMap<String, LevelDistribution>,
pub ctx_ids: FxHashMap<String, LevelDistribution>,
pub ecu_ids: FxHashMap<String, LevelDistribution>,
}

impl DltStatistics {
pub fn count(&self) -> usize {
self.app_ids.len() + self.ctx_ids.len() + self.ecu_ids.len()
}
}

/// The Level distribution of DLT messages.
#[derive(Debug, Default, Clone)]
pub struct LevelDistribution {
pub fatal: FxHashSet<usize>,
pub error: FxHashSet<usize>,
pub warn: FxHashSet<usize>,
pub info: FxHashSet<usize>,
pub debug: FxHashSet<usize>,
pub verbose: FxHashSet<usize>,
pub none: FxHashSet<usize>,
pub invalid: FxHashSet<usize>,
}

impl LevelDistribution {
pub fn count(&self) -> usize {
self.fatal.len()
+ self.error.len()
+ self.warn.len()
+ self.info.len()
+ self.debug.len()
+ self.verbose.len()
+ self.none.len()
+ self.invalid.len()
}

pub fn values(&self) -> [usize; 8] {
[
self.fatal.len(),
self.error.len(),
self.warn.len(),
self.info.len(),
self.debug.len(),
self.verbose.len(),
self.none.len(),
self.invalid.len(),
]
}

pub fn merge(&mut self, other: &LevelDistribution) -> &mut Self {
self.fatal.extend(other.fatal.iter().copied());
self.error.extend(other.error.iter().copied());
self.warn.extend(other.warn.iter().copied());
self.info.extend(other.info.iter().copied());
self.debug.extend(other.debug.iter().copied());
self.verbose.extend(other.verbose.iter().copied());
self.none.extend(other.none.iter().copied());
self.invalid.extend(other.invalid.iter().copied());
self
}

pub fn intersect(&mut self, other: &LevelDistribution) -> &mut Self {
self.fatal.retain(|item| other.fatal.contains(item));
self.error.retain(|item| other.error.contains(item));
self.warn.retain(|item| other.warn.contains(item));
self.info.retain(|item| other.info.contains(item));
self.debug.retain(|item| other.debug.contains(item));
self.verbose.retain(|item| other.verbose.contains(item));
self.none.retain(|item| other.none.contains(item));
self.invalid.retain(|item| other.invalid.contains(item));
self
}
}

impl StatisticCollector for DltStatistics {
fn collect_statistic(&mut self, statistic: Statistic) -> Result<(), DltParseError> {
self.counter += 1;
let msg = self.counter;

let level = statistic.log_level;
add_for_level(&mut self.total, level, msg);

let header = statistic.standard_header;
add_for_id(
&mut self.ecu_ids,
header.ecu_id.unwrap_or("NONE".to_string()),
level,
msg,
);

if let Some(header) = statistic.extended_header {
add_for_id(&mut self.app_ids, header.application_id, level, msg);
add_for_id(&mut self.ctx_ids, header.context_id, level, msg);
}

Ok(())
}
}

fn add_for_id(
ids: &mut FxHashMap<String, LevelDistribution>,
id: String,
level: Option<LogLevel>,
msg: usize,
) {
if let Some(levels) = ids.get_mut(&id) {
add_for_level(levels, level, msg);
} else {
let mut levels = LevelDistribution::default();
add_for_level(&mut levels, level, msg);
ids.insert(id, levels);
}
}

fn add_for_level(levels: &mut LevelDistribution, level: Option<LogLevel>, msg: usize) {
match level {
None => {
levels.none.insert(msg);
}
Some(LogLevel::Fatal) => {
levels.fatal.insert(msg);
}
Some(LogLevel::Error) => {
levels.error.insert(msg);
}
Some(LogLevel::Warn) => {
levels.warn.insert(msg);
}
Some(LogLevel::Info) => {
levels.info.insert(msg);
}
Some(LogLevel::Debug) => {
levels.debug.insert(msg);
}
Some(LogLevel::Verbose) => {
levels.verbose.insert(msg);
}
Some(LogLevel::Invalid(_)) => {
levels.invalid.insert(msg);
}
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod colors;
pub mod dlt_stats;
pub mod file_utls;
pub mod parsers;
pub mod sources;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ pub struct ServiceHandle {
}

/// Provide functions to send host messages and waking up the UI on them.
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct ServiceSenders {
message_tx: mpsc::Sender<HostMessage>,
notification_tx: mpsc::Sender<AppNotification>,
Expand Down
11 changes: 9 additions & 2 deletions application/apps/indexer/gui/application/src/host/message.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
use uuid::Uuid;

use crate::{
host::ui::{multi_setup::state::MultiFileState, session_setup::state::SessionSetupState},
host::{
common::dlt_stats::DltStatistics,
ui::{multi_setup::state::MultiFileState, session_setup::state::SessionSetupState},
},
session::InitSessionParams,
};

Expand All @@ -14,7 +17,11 @@ pub enum HostMessage {
SessionSetupClosed { id: Uuid },
/// Close multiple files setup with the provided id.
MultiSetupClose { id: Uuid },

/// The collected DLT statistics on a file for a SessionSetup
DltStatistics {
setup_session_id: Uuid,
statistics: Option<DltStatistics>,
Comment thread
kruss marked this conversation as resolved.
},
/// A new session has been successfully created.
SessionCreated {
session_params: InitSessionParams,
Expand Down
88 changes: 82 additions & 6 deletions application/apps/indexer/gui/application/src/host/service/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,16 @@ use itertools::Itertools;
use tokio::runtime::Handle;
use uuid::Uuid;

use parsers::dlt::DltFilterConfig;
use stypes::{
DltParserSettings, FileFormat, ObserveOptions, ObserveOrigin, ParserType, SomeIpParserSettings,
Transport,
};

use crate::{
host::{
command::{HostCommand, StartSessionParam},
common::{parsers::ParserNames, sources::StreamNames},
command::{DltStatisticsParam, HostCommand, StartSessionParam},
common::{dlt_stats::dlt_statistics, parsers::ParserNames, sources::StreamNames},
communication::ServiceHandle,
error::HostError,
message::HostMessage,
Expand Down Expand Up @@ -110,6 +111,16 @@ impl HostService {
self.connection_session_setup(stream, parser).await
}

HostCommand::DltStatistics(statistics_param) => {
let DltStatisticsParam {
session_setup_id,
source_paths,
} = *statistics_param;

self.collect_statistics(session_setup_id, source_paths)
.await?;
}

HostCommand::StartSession(start_params) => {
let StartSessionParam {
parser,
Expand Down Expand Up @@ -154,7 +165,7 @@ impl HostService {
FileFormat::Text => ParserConfig::Text,
FileFormat::Binary => {
if Self::is_dlt_file(&file_path) {
ParserConfig::Dlt(DltParserConfig::new(true))
ParserConfig::Dlt(DltParserConfig::new(true, Some(vec![file_path.clone()])))
} else {
return Err(HostError::InitSessionError(InitSessionError::Other(
format!(
Expand Down Expand Up @@ -329,7 +340,7 @@ impl HostService {
}
_ => {}
}
ParserConfig::Dlt(DltParserConfig::new(true))
ParserConfig::Dlt(DltParserConfig::new(true, Some(files.clone())))
}
};

Expand Down Expand Up @@ -401,7 +412,7 @@ impl HostService {
};

let parser = match parser {
ParserNames::Dlt => ParserConfig::Dlt(DltParserConfig::new(false)),
ParserNames::Dlt => ParserConfig::Dlt(DltParserConfig::new(false, None)),
ParserNames::SomeIP => ParserConfig::SomeIP(SomeIpParserConfig::default()),
ParserNames::Text => ParserConfig::Text,
ParserNames::Plugins => todo!(),
Expand All @@ -415,6 +426,43 @@ impl HostService {
.await;
}

async fn collect_statistics(
&self,
setup_session_id: Uuid,
source_paths: Vec<PathBuf>,
) -> Result<(), HostError> {
let senders = self.communication.senders.clone();
tokio::task::spawn_blocking(move || {
match dlt_statistics(source_paths) {
Ok(statistics) => {
Handle::current().block_on(async move {
senders
.send_message(HostMessage::DltStatistics {
setup_session_id,
statistics: Some(statistics),
})
.await;
});
}
Err(error) => {
Handle::current().block_on(async move {
senders
.send_notification(AppNotification::Error(error))
.await;
senders
.send_message(HostMessage::DltStatistics {
setup_session_id,
statistics: None,
})
.await;
});
}
};
});

Ok(())
}

async fn start_session(
&self,
source: ByteSourceConfig,
Expand Down Expand Up @@ -450,6 +498,34 @@ impl HostService {

let parser = match parser {
ParserConfig::Dlt(config) => {
let (app_ids, ctx_ids, ecu_ids) = (
config.dlt_tables.app_table.selected_ids,
config.dlt_tables.ctx_table.selected_ids,
config.dlt_tables.ecu_table.selected_ids,
);

let app_id_count = app_ids.len() as i64;
let app_ids =
(app_id_count > 0).then(|| app_ids.into_iter().collect::<Vec<String>>());

let context_id_count = ctx_ids.len() as i64;
let context_ids =
(context_id_count > 0).then(|| ctx_ids.into_iter().collect::<Vec<String>>());

let ecu_ids = ecu_ids
.is_empty()
.not()
.then(|| ecu_ids.into_iter().collect::<Vec<String>>());

let filter_config = DltFilterConfig {
min_log_level: Some(config.log_level as u8),
app_ids,
ecu_ids,
context_ids,
app_id_count,
context_id_count,
};

let fibex_file_paths = config.fibex_files.is_empty().not().then(|| {
config
.fibex_files
Expand All @@ -459,7 +535,7 @@ impl HostService {
});

let dlt_config = DltParserSettings {
filter_config: None,
filter_config: Some(filter_config),
fibex_file_paths,
with_storage_header: config.with_storage_header,
tz: config.timezone,
Expand Down
Loading