Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 25 additions & 24 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
on:
push:
branches:
- main
- "*"
tags:
- "*"
pull_request:
Expand Down Expand Up @@ -82,28 +82,29 @@ jobs:
- name: cargo doc
run: cargo doc --locked -p puffin -p puffin_egui -p puffin_http -p --lib --no-deps --all-features

cargo-vet:
name: Vet Dependencies
runs-on: ubuntu-latest
env:
CARGO_VET_VERSION: 0.9.1
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- uses: actions/cache@v3
with:
path: ${{ runner.tool_cache }}/cargo-vet
key: cargo-vet-bin-${{ env.CARGO_VET_VERSION }}
- name: Add the tool cache directory to the search path
run: echo "${{ runner.tool_cache }}/cargo-vet/bin" >> $GITHUB_PATH
- name: Ensure that the tool cache is populated with the cargo-vet binary
# build from source, as are not published binaries yet :(
# tracked in https://github.com/mozilla/cargo-vet/issues/484
run: cargo +stable install --root ${{ runner.tool_cache }}/cargo-vet --version ${{ env.CARGO_VET_VERSION }} cargo-vet
- name: Invoke cargo-vet
run: |
cargo vet --locked
cargo vet --locked >> $GITHUB_STEP_SUMMARY
# cargo-vet:
# name: Vet Dependencies
# runs-on: ubuntu-latest
# env:
# CARGO_VET_VERSION: 0.9.1
# steps:
# - uses: actions/checkout@v4
# - uses: dtolnay/rust-toolchain@stable
# - uses: actions/cache@v3
# with:
# path: ${{ runner.tool_cache }}/cargo-vet
# key: cargo-vet-bin-${{ env.CARGO_VET_VERSION }}
# - name: Add the tool cache directory to the search path
# run: echo "${{ runner.tool_cache }}/cargo-vet/bin" >> $GITHUB_PATH
# - name: Ensure that the tool cache is populated with the cargo-vet binary
# # build from source, as are not published binaries yet :(
# # tracked in https://github.com/mozilla/cargo-vet/issues/484
# # TODO: cargo-vet now have prebuild binaries https://github.com/mozilla/cargo-vet/releases/tag/v0.10.0
# run: cargo +stable install --root ${{ runner.tool_cache }}/cargo-vet --version ${{ env.CARGO_VET_VERSION }} cargo-vet
# - name: Invoke cargo-vet
# run: |
# cargo vet --locked
# cargo vet --locked >> $GITHUB_STEP_SUMMARY

taplo:
name: Toml format check
Expand All @@ -112,4 +113,4 @@ jobs:
- uses: actions/checkout@v4
- uses: gwen-lg/taplo-action@v1
with:
format: true
format: true
50 changes: 50 additions & 0 deletions puffin/src/data_header.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
use std::{fmt::Display, str::from_utf8};

/// Header of serialized data.
///
/// Used to differentiate data of `ScopeCollection` and `FrameData` by example.
#[derive(Debug, Clone, Copy)]
pub struct DataHeader([u8; 4]);

impl DataHeader {
/// Tried to read header from reader.
pub fn try_read(read: &mut impl std::io::Read) -> std::result::Result<Self, std::io::Error> {
let mut header = [0_u8; 4];
read.read_exact(&mut header)?;
Ok(DataHeader(header))
}

/// Return a slice containing the entire header.
pub fn as_slice(&self) -> &[u8] {
&self.0
}

/// Return the header as array.
pub fn bytes(&self) -> [u8; 4] {
self.0
}
}

impl Display for DataHeader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let header = from_utf8(&self.0).unwrap_or("????");
write!(f, "{header}")
}
}

impl From<DataHeader> for [u8; 4] {
fn from(val: DataHeader) -> Self {
val.0
}
}

impl PartialEq<[u8; 4]> for &DataHeader {
fn eq(&self, other: &[u8; 4]) -> bool {
&self.0 == other
}
}
impl PartialEq<&[u8]> for &DataHeader {
fn eq(&self, other: &&[u8]) -> bool {
&self.0 == other
}
}
37 changes: 12 additions & 25 deletions puffin/src/frame_data.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
#[cfg(feature = "packing")]
#[cfg(feature = "serialization")]
use crate::DataHeader;
use crate::ScopeDetails;
use crate::{Error, FrameIndex, NanoSecond, Result, StreamInfo, ThreadInfo};
#[cfg(feature = "packing")]
Expand Down Expand Up @@ -566,11 +569,7 @@ impl FrameData {
/// Writes one [`FrameData`] into a stream, prefixed by its length ([`u32`] le).
#[cfg(not(target_arch = "wasm32"))] // compression not supported on wasm
#[cfg(feature = "serialization")]
pub fn write_into(
&self,
scope_collection: Option<&crate::ScopeCollection>,
write: &mut impl std::io::Write,
) -> anyhow::Result<()> {
pub fn write_into(&self, write: &mut impl std::io::Write) -> anyhow::Result<()> {
use bincode::Options as _;
use byteorder::{LE, WriteBytesExt as _};

Expand All @@ -588,13 +587,7 @@ impl FrameData {
write.write_u8(packed_streams.compression_kind as u8)?;
write.write_all(&packed_streams.bytes)?;

let to_serialize_scopes: Vec<_> = if let Some(scope_collection) = scope_collection {
scope_collection.scopes_by_id().values().cloned().collect()
} else {
self.scope_delta.clone()
};

let serialized_scopes = bincode::options().serialize(&to_serialize_scopes)?;
let serialized_scopes = bincode::options().serialize(&self.scope_delta)?;
write.write_u32::<LE>(serialized_scopes.len() as u32)?;
write.write_all(&serialized_scopes)?;
Ok(())
Expand All @@ -605,20 +598,14 @@ impl FrameData {
/// [`None`] is returned if the end of the stream is reached (EOF),
/// or an end-of-stream sentinel of `0u32` is read.
#[cfg(feature = "serialization")]
pub fn read_next(read: &mut impl std::io::Read) -> anyhow::Result<Option<Self>> {
pub fn read_next(
read: &mut impl std::io::Read,
header: &DataHeader,
) -> anyhow::Result<Option<Self>> {
use anyhow::Context as _;
use bincode::Options as _;
use byteorder::{LE, ReadBytesExt};

let mut header = [0_u8; 4];
if let Err(err) = read.read_exact(&mut header) {
if err.kind() == std::io::ErrorKind::UnexpectedEof {
return Ok(None);
} else {
return Err(err.into());
}
}

#[derive(Clone, serde::Deserialize, serde::Serialize)]
pub struct LegacyFrameData {
pub frame_index: FrameIndex,
Expand Down Expand Up @@ -657,9 +644,9 @@ impl FrameData {
}
}

if header == [0_u8; 4] {
if header.bytes() == [0_u8; 4] {
Ok(None) // end-of-stream sentinel.
} else if header.starts_with(b"PFD") {
} else if header.as_slice().starts_with(b"PFD") {
if &header == b"PFD0" {
// Like PDF1, but compressed with `lz4_flex`.
// We stopped supporting this in 2021-11-16 in order to remove `lz4_flex` dependency.
Expand Down Expand Up @@ -792,7 +779,7 @@ impl FrameData {
}
} else {
// Very old packet without magic header
let mut bytes = vec![0_u8; u32::from_le_bytes(header) as usize];
let mut bytes = vec![0_u8; u32::from_le_bytes(header.bytes()) as usize];
read.read_exact(&mut bytes)?;

use bincode::Options as _;
Expand Down
4 changes: 4 additions & 0 deletions puffin/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
#![deny(missing_docs)]

mod data;
#[cfg(feature = "serialization")]
mod data_header;
mod frame_data;
mod global_profiler;
mod merge;
Expand All @@ -36,6 +38,8 @@ use std::sync::atomic::{AtomicBool, Ordering};

/// TODO: Improve encapsulation.
pub use data::{Error, Reader, Result, Scope, ScopeRecord, Stream, StreamInfo, StreamInfoRef};
#[cfg(feature = "serialization")]
pub use data_header::DataHeader;
pub use frame_data::{FrameData, FrameMeta, UnpackedFrameData};
pub use global_profiler::{FrameSink, GlobalProfiler};
pub use merge::{MergeScope, merge_scopes_for_thread};
Expand Down
54 changes: 48 additions & 6 deletions puffin/src/profile_view.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ use std::{
sync::Arc,
};

#[cfg(feature = "serialization")]
use crate::DataHeader;
use crate::{FrameData, FrameSinkId, ScopeCollection};

/// A view of recent and slowest frames, used by GUIs.
Expand Down Expand Up @@ -60,6 +62,15 @@ impl FrameView {
&self.scope_collection
}

/// Allow init scope_collection of the FrameView
pub fn init_scope_collection(&mut self, scope_collection: ScopeCollection) {
assert!(
self.scope_collection.scopes_by_id().is_empty()
&& self.scope_collection.scopes_by_name().is_empty()
);
self.scope_collection = scope_collection;
}

/// Adds a new frame to the view.
pub fn add_frame(&mut self, new_frame: Arc<FrameData>) {
// Register all scopes from the new frame into the scope collection.
Expand Down Expand Up @@ -227,33 +238,64 @@ impl FrameView {
#[cfg(feature = "serialization")]
#[cfg(not(target_arch = "wasm32"))] // compression not supported on wasm
pub fn write(&self, write: &mut impl std::io::Write) -> anyhow::Result<()> {
write.write_all(b"PUF0")?;
write.write_all(b"PUF1")?;

self.scope_collection.write_into(write)?;

for frame in self.all_uniq() {
frame.write_into(None, write)?;
frame.write_into(write)?;
}
Ok(())
}

/// Import profile data from a `.puffin` file/stream.
#[cfg(feature = "serialization")]
pub fn read(read: &mut impl std::io::Read) -> anyhow::Result<Self> {
const MAGIC_0: &[u8; 4] = b"PUF0";
const MAGIC_1: &[u8; 4] = b"PUF1";

let mut magic = [0_u8; 4];
read.read_exact(&mut magic)?;
if &magic != b"PUF0" {
anyhow::bail!("Expected .puffin magic header of 'PUF0', found {:?}", magic);
if &magic != MAGIC_0 && &magic != MAGIC_1 {
anyhow::bail!(
"Expected .puffin magic header of '{:?}' ok `{:?}`, found {:?}",
MAGIC_0,
MAGIC_1,
magic
);
}

let mut slf = Self {
max_recent: usize::MAX,
..Default::default()
};
while let Some(frame) = FrameData::read_next(read)? {
slf.add_frame(frame.into());

while let Some(header) = Self::read_header(read)? {
if header.bytes().starts_with(b"PSC") {
slf.init_scope_collection(ScopeCollection::read(read, &header)?);
} else if header.bytes().starts_with(b"PFD") {
if let Some(frame) = FrameData::read_next(read, &header)? {
slf.add_frame(frame.into());
}
}
}

Ok(slf)
}

#[cfg(feature = "serialization")]
fn read_header(read: &mut impl std::io::Read) -> Result<Option<DataHeader>, anyhow::Error> {
match DataHeader::try_read(read) {
Ok(header) => Ok(Some(header)),
Err(err) => {
if err.kind() == std::io::ErrorKind::UnexpectedEof {
Ok(None)
} else {
Err(err.into())
}
}
}
}
}

// ----------------------------------------------------------------------------
Expand Down
42 changes: 42 additions & 0 deletions puffin/src/scope_details.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,48 @@ impl ScopeCollection {
pub fn scopes_by_id(&self) -> &HashMap<ScopeId, Arc<ScopeDetails>> {
&self.0.scope_id_to_details
}

#[cfg(feature = "serialization")]
const PSC1: &[u8] = b"PSC1";

/// Writes [`ScopeCollection`] into a stream.
#[cfg(feature = "serialization")]
pub fn write_into(&self, write: &mut impl std::io::Write) -> anyhow::Result<()> {
use bincode::Options as _;

write.write_all(Self::PSC1)?;
let scope_collection = self.scopes_by_id().values().collect::<Vec<_>>();
bincode::options().serialize_into(write, &scope_collection)?;

Ok(())
}

/// Read [`ScopeCollection`] from a stream.
#[cfg(feature = "serialization")]
pub fn read(read: &mut impl std::io::Read, header: &crate::DataHeader) -> anyhow::Result<Self> {
use anyhow::anyhow;
match header.as_slice() {
Self::PSC1 => Self::read_scope_collection_format1(read),
_ => Err(anyhow!("`{header}` is not a valid ScopeCollection header")),
}
}

#[cfg(feature = "serialization")]
fn read_scope_collection_format1(read: &mut impl std::io::Read) -> Result<Self, anyhow::Error> {
use anyhow::Context;
use bincode::Options;

let scopes: Vec<ScopeDetails> = bincode::options()
.deserialize_from(read)
.context("read scopes collection")?;

let mut scope_collection = Self::default();
for scope in scopes {
scope_collection.insert(scope.into());
}

Ok(scope_collection)
}
}

/// Scopes are identified by user-provided name while functions are identified by the function name.
Expand Down
Loading
Loading