Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
259 changes: 169 additions & 90 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion src/bin/evtx_dump.rs
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ impl EvtxDump {
if self.display_allocation {
record_display = format!("{} [{}]", record_display, r.allocation)
}
writeln!(self.output, "{}", record_display)?;
writeln!(self.output, "{record_display}")?;
}
writeln!(self.output, "{}", r.data)?;
}
Expand Down
6 changes: 3 additions & 3 deletions src/binxml/assemble.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ pub fn create_record_model<'a>(

Cow::Owned(BinXMLDeserializedTokens::Attribute(ref attr))
| Cow::Borrowed(&BinXMLDeserializedTokens::Attribute(ref attr)) => {
trace!("BinXMLDeserializedTokens::Attribute(attr) - {:?}", attr);
trace!("BinXMLDeserializedTokens::Attribute(attr) - {attr:?}");
if current_element.is_none() {
return Err(EvtxError::FailedToCreateRecordModel(
"attribute - Bad parser state",
Expand All @@ -199,7 +199,7 @@ pub fn create_record_model<'a>(
current_element = Some(builder);
}
Cow::Owned(BinXMLDeserializedTokens::Value(value)) => {
trace!("BinXMLDeserializedTokens::Value(value) - {:?}", value);
trace!("BinXMLDeserializedTokens::Value(value) - {value:?}");
match current_element {
None => match value {
BinXmlValue::EvtXml => {
Expand All @@ -217,7 +217,7 @@ pub fn create_record_model<'a>(
}
}
Cow::Borrowed(BinXMLDeserializedTokens::Value(value)) => {
trace!("BinXMLDeserializedTokens::Value(value) - {:?}", value);
trace!("BinXMLDeserializedTokens::Value(value) - {value:?}");
match current_element {
None => match value {
BinXmlValue::EvtXml => {
Expand Down
17 changes: 8 additions & 9 deletions src/binxml/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ pub fn read_template<'a>(
value_descriptors.push(TemplateValueDescriptor { size, value_type })
}

trace!("{:?}", value_descriptors);
trace!("{value_descriptors:?}");

let mut substitution_array = Vec::with_capacity(number_of_substitutions as usize);

Expand All @@ -80,7 +80,7 @@ pub fn read_template<'a>(
ansi_codec,
)?;

trace!("\t {:?}", value);
trace!("\t {value:?}");
// NullType can mean deleted substitution (and data need to be skipped)
if value == BinXmlValue::NullType {
trace!("\t Skipping `NullType` descriptor");
Expand Down Expand Up @@ -171,7 +171,7 @@ pub fn read_template_definition<'a>(
pub fn read_entity_ref(cursor: &mut Cursor<&[u8]>) -> Result<BinXmlEntityReference> {
trace!("Offset `0x{:08x}` - EntityReference", cursor.position());
let name = BinXmlNameRef::from_stream(cursor)?;
trace!("\t name: {:?}", name);
trace!("\t name: {name:?}");

Ok(BinXmlEntityReference { name })
}
Expand Down Expand Up @@ -204,7 +204,7 @@ pub fn read_processing_instruction_target(
);

let name = BinXmlNameRef::from_stream(cursor)?;
trace!("\tPITarget Name - {:?}", name);
trace!("\tPITarget Name - {name:?}");
Ok(BinXMLProcessingInstructionTarget { name })
}

Expand All @@ -215,7 +215,7 @@ pub fn read_processing_instruction_data(cursor: &mut Cursor<&[u8]>) -> Result<St
);

let data = try_read!(cursor, len_prefixed_utf_16_str, "pi_data")?.unwrap_or_default();
trace!("PIData - {}", data,);
trace!("PIData - {data}");
Ok(data)
}

Expand Down Expand Up @@ -267,8 +267,7 @@ pub fn read_open_start_element(
try_read!(cursor, u16, "open_start_element_dependency_identifier")?;

trace!(
"\t Dependency Identifier - `0x{:04x} ({})`",
_dependency_identifier, _dependency_identifier
"\t Dependency Identifier - `0x{_dependency_identifier:04x} ({_dependency_identifier})`"
);
}

Expand All @@ -294,9 +293,9 @@ pub fn read_open_start_element(
}
}

trace!("\t Data Size - {}", data_size);
trace!("\t Data Size - {data_size}");
let name = BinXmlNameRef::from_stream(cursor)?;
trace!("\t Name - {:?}", name);
trace!("\t Name - {name:?}");

let _attribute_list_data_size = if has_attributes {
try_read!(cursor, u32, "open_start_element_attribute_list_data_size")?
Expand Down
14 changes: 4 additions & 10 deletions src/evtx_chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,10 +107,7 @@ impl EvtxChunkData {
0
};

debug!(
"Expected checksum: {:?}, found: {:?}",
expected_checksum, computed_checksum
);
debug!("Expected checksum: {expected_checksum:?}, found: {computed_checksum:?}");

computed_checksum == expected_checksum
}
Expand Down Expand Up @@ -141,10 +138,7 @@ impl EvtxChunkData {
0
};

debug!(
"Expected checksum: {:?}, found: {:?}",
expected_checksum, computed_checksum
);
debug!("Expected checksum: {expected_checksum:?}, found: {computed_checksum:?}");

computed_checksum == expected_checksum
}
Expand Down Expand Up @@ -296,11 +290,11 @@ impl<'a> Iterator for IterChunkRecords<'a> {
};

info!("Record id - {}", record_header.event_record_id);
debug!("Record header - {:?}", record_header);
debug!("Record header - {record_header:?}");

let binxml_data_size = record_header.record_data_size();

trace!("Need to deserialize {} bytes of binxml", binxml_data_size);
trace!("Need to deserialize {binxml_data_size} bytes of binxml");

// `EvtxChunk` only owns `template_table`, which we want to loan to the Deserializer.
// `data` and `string_cache` are both references and are `Copy`ed when passed to init.
Expand Down
5 changes: 2 additions & 3 deletions src/evtx_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ impl<T: ReadSeek> EvtxParser<T> {
};
let chunk_count = chunk_data_size / EVTX_CHUNK_SIZE as u64;

debug!("EVTX Header: {:#?}", evtx_header);
debug!("EVTX Header: {evtx_header:#?}");
Ok(EvtxParser {
data: read_seek,
header: evtx_header,
Expand Down Expand Up @@ -335,8 +335,7 @@ impl<T: ReadSeek> EvtxParser<T> {
let chunk_offset = EVTX_FILE_HEADER_SIZE + chunk_number as usize * EVTX_CHUNK_SIZE;

trace!(
"Offset `0x{:08x} ({})` - Reading chunk number `{}`",
chunk_offset, chunk_offset, chunk_number
"Offset `0x{chunk_offset:08x} ({chunk_offset})` - Reading chunk number `{chunk_number}`"
);

data.seek(SeekFrom::Start(chunk_offset as u64))
Expand Down
20 changes: 10 additions & 10 deletions src/json_output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -215,29 +215,28 @@ impl JsonOutput {
}
})?;
// We do a linear probe in case XML contains duplicate keys
if let Some(old_attribute) =
value.insert(format!("{}_attributes", name), Value::Null)
if let Some(old_attribute) = value.insert(format!("{name}_attributes"), Value::Null)
{
if let Some(old_value) = value.insert(name.to_string(), Value::Null) {
let mut free_slot = 1;
// If it is a concrete value, we look for another slot.
while value.get(&format!("{}_{}", name, free_slot)).is_some()
while value.get(&format!("{name}_{free_slot}")).is_some()
|| value
.get(&format!("{}_{}_attributes", name, free_slot))
.get(&format!("{name}_{free_slot}_attributes"))
.is_some()
{
// Value is an empty object - we can override it's value.
free_slot += 1
}
if let Some(old_value_object) = old_value.as_object() {
if !old_value_object.is_empty() {
value.insert(format!("{}_{}", name, free_slot), old_value);
value.insert(format!("{name}_{free_slot}"), old_value);
}
};
if let Some(old_attribute_object) = old_attribute.as_object() {
if !old_attribute_object.is_empty() {
value.insert(
format!("{}_{}_attributes", name, free_slot),
format!("{name}_{free_slot}_attributes"),
old_attribute,
);
};
Expand Down Expand Up @@ -267,11 +266,11 @@ impl JsonOutput {
if !map.is_empty() {
let mut free_slot = 1;
// If it is a concrete value, we look for another slot.
while container.get(&format!("{}_{}", name, free_slot)).is_some() {
while container.get(&format!("{name}_{free_slot}")).is_some() {
// Value is an empty object - we can override it's value.
free_slot += 1
}
container.insert(format!("{}_{}", name, free_slot), old_value);
container.insert(format!("{name}_{free_slot}"), old_value);
}
}
};
Expand Down Expand Up @@ -334,7 +333,7 @@ impl BinXmlOutput for JsonOutput {

fn visit_close_element(&mut self, _element: &XmlElement) -> SerializationResult<()> {
let p = self.stack.pop();
trace!("visit_close_element: {:?}", p);
trace!("visit_close_element: {p:?}");
Ok(())
}

Expand Down Expand Up @@ -439,7 +438,7 @@ impl BinXmlOutput for JsonOutput {
let entity_ref = "&".to_string() + entity.as_str() + ";";

let xml_event = BytesText::from_escaped(&entity_ref);
match xml_event.unescape() {
match xml_event.decode() {
Ok(escaped) => {
let as_string = escaped.to_string();

Expand Down Expand Up @@ -557,6 +556,7 @@ mod tests {
output.visit_end_of_stream().expect("End of stream");
break;
}
Event::GeneralRef(_) => unimplemented!(),
},
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
}
Expand Down
2 changes: 1 addition & 1 deletion src/template_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ impl<'chunk> TemplateCache<'chunk> {

cache.insert(table_offset, definition);

trace!("Next template will be at {}", next_template_offset);
trace!("Next template will be at {next_template_offset}");

if next_template_offset == 0 {
break;
Expand Down
2 changes: 1 addition & 1 deletion src/xml_output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ impl<W: Write> BinXmlOutput for XmlOutput<W> {
}

fn visit_open_start_element(&mut self, element: &XmlElement) -> SerializationResult<()> {
trace!("visit_open_start_element: {:?}", element);
trace!("visit_open_start_element: {element:?}");

let mut event_builder = BytesStart::new(element.name.as_ref().as_str());

Expand Down
Loading