more efficient rust implementation

This commit is contained in:
2024-11-18 13:29:47 +11:00
parent c001b872a8
commit 279664fc84
2 changed files with 59 additions and 99 deletions

View File

@@ -1,30 +0,0 @@
This is the secret confession of Richard Buckland
to be revealed by anonymous email if I should
mysteriously vanish. I have left the last few hex
digits of the SHA256 hash of this message with my
trusted solicitor, Dennis Denuto, which will verify
that this is indeed my intended and unaltered
confession written by me Richard Buckland.
Dennis has not seen this confession he has only seen
the last few digits of the hash. I have also sent copies
of the last few digits to my bank manager and to my priest
Father Brown.
On the 10th of February I saw Mark Zukerberg near my
house and we struck up a conversation. He explained all
the things he was doing to ensure that Facebook respects
privacy - both of its users and of others. It was very
impressive.
I feel awful that I have been criticising Facebook publicly
for so long. I apologised to him in our conversation and
now I want to confess to the world that actually Facebook
has more than enough privacy features, and that the reason
I spend so much time criticising Facebook is that I am
envious of Mark and wish I was a clever and smart and wise
as he is. I feel so bad for having been so mean to him for
so many years that I am considering retreating to the outback.
I may well cut off all contact with the world and live as a
hermit from now on. So do not worry if I vanish it is just
that I feel so guilty that I have been so unfair to Facebook.

View File

@@ -1,40 +1,40 @@
use rayon::prelude::*;
use sha2::{Digest, Sha256};
use std::collections::HashMap;
use std::fs;
use std::sync::{Arc, Mutex};
use std::fs::{self, File};
use std::io::{BufRead, BufReader};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc};
use std::time::Instant;
// Function to calculate the hash for the given modified file content
fn calculate_hash(file_lines: &[String], num_chars: usize) -> String {
let concatenated = file_lines.join("\n");
// Function to calculate the hash for the given file content
fn calculate_hash(file_content: &[String], num_chars: usize) -> String {
let concatenated = file_content.concat(); // Concatenate without joining with newlines
let hash = Sha256::digest(concatenated.as_bytes());
format!("{:x}", hash)[64 - num_chars..].to_string()
}
// Function to generate a modified file by adding spaces based on the bit pattern
fn modify_and_hash(
real_og: &[String],
fake_og: &[String],
num_chars: usize,
bit_pattern: u64,
) -> (String, String, Vec<String>, Vec<String>) {
let real_modified: Vec<String> = real_og
fn modify_fake_file(fake_og: &[String], bit_pattern: u64) -> Vec<String> {
fake_og
.iter()
.enumerate()
.map(|(idx, line)| format!("{}{}", line, " ".repeat(((bit_pattern >> idx) & 1) as usize)))
.collect();
.collect()
}
let fake_modified: Vec<String> = fake_og
.iter()
.enumerate()
.map(|(idx, line)| format!("{}{}", line, " ".repeat(((bit_pattern >> idx) & 1) as usize)))
.collect();
// Function to read a file while preserving line endings
fn read_file_preserving_newlines(file_path: &str) -> Vec<String> {
let file = File::open(file_path).expect("Failed to open file");
let reader = BufReader::new(file);
let real_hash = calculate_hash(&real_modified, num_chars);
let fake_hash = calculate_hash(&fake_modified, num_chars);
(real_hash, fake_hash, real_modified, fake_modified)
reader
.lines()
.map(|line| {
let mut line = line.expect("Failed to read line");
line.push('\n'); // Ensure the newline is preserved
line
})
.collect()
}
fn main() {
@@ -49,74 +49,65 @@ fn main() {
let fake_file = &args[2];
let num_chars: usize = args[3].parse().expect("Invalid number of characters");
// Read the original files
let real_og = fs::read_to_string(real_file)
.expect("Failed to read real file")
.lines()
.map(|s| s.to_string())
.collect::<Vec<String>>();
// Read the original files while preserving line endings
let real_og = read_file_preserving_newlines(real_file);
let fake_og = read_file_preserving_newlines(fake_file);
let fake_og = fs::read_to_string(fake_file)
.expect("Failed to read fake file")
.lines()
.map(|s| s.to_string())
.collect::<Vec<String>>();
// Calculate the target hash of the original real file
let target_real_hash = calculate_hash(&real_og, num_chars);
println!("Target hash (real file): {}", target_real_hash);
// Shared data structures for hash tracking
let all_real_hashes = Arc::new(Mutex::new(HashMap::new()));
let all_fake_hashes = Arc::new(Mutex::new(HashMap::new()));
// Atomic flag to stop threads once a hash match is found
let found_collision = Arc::new(AtomicBool::new(false));
let mut found_collision = false;
let mut total_hashes = 0;
let batch_size = 100; // Number of combinations to process in parallel
let batch_size = 10_000;
let start_time = Instant::now();
// Main loop
// Main loop to modify the fake file until its hash matches the real file's hash
let mut i = 0;
while !found_collision {
while !found_collision.load(Ordering::SeqCst) {
let bit_patterns: Vec<u64> = (i..i + batch_size).collect();
// Parallel processing using Rayon
let results: Vec<_> = bit_patterns
.into_par_iter()
.map(|bit_pattern| modify_and_hash(&real_og, &fake_og, num_chars, bit_pattern))
.collect();
bit_patterns.into_par_iter().for_each(|bit_pattern| {
if found_collision.load(Ordering::SeqCst) {
return; // Stop processing if a collision is found
}
total_hashes += results.len();
// Modify the fake file based on the current bit pattern
let fake_modified = modify_fake_file(&fake_og, bit_pattern);
// Process results and check for collisions
for (real_hash, fake_hash, real_modified, fake_modified) in results {
let mut real_hashes = all_real_hashes.lock().unwrap();
let mut fake_hashes = all_fake_hashes.lock().unwrap();
real_hashes.insert(real_hash.clone(), real_modified.clone());
fake_hashes.insert(fake_hash.clone(), fake_modified.clone());
if real_hashes.contains_key(&fake_hash) || fake_hashes.contains_key(&real_hash) {
let collision_hash = if real_hashes.contains_key(&fake_hash) {
fake_hash.clone()
} else {
real_hash.clone()
};
// Calculate the hash of the modified fake file
let fake_hash = calculate_hash(&fake_modified, num_chars);
// Check if the modified fake file's hash matches the real file's hash
if fake_hash == target_real_hash {
// Print collision message
println!(
"\n[+] Collision found! {}.out and {}.out have the same hash: {}",
real_file, fake_file, collision_hash
"\n[+] Collision found! The modified fake file matches the hash of the real file: {}",
target_real_hash
);
// Write the output files with the collision
// Write the real and fake output files
let real_output = format!("{}.out", real_file);
let fake_output = format!("{}.out", fake_file);
fs::write(&real_output, real_hashes.get(&collision_hash).unwrap().join("\n"))
// Write the real output (original real file)
fs::write(&real_output, real_og.concat())
.expect("Failed to write real output file");
fs::write(&fake_output, fake_hashes.get(&collision_hash).unwrap().join("\n"))
// Write the fake output (modified fake file)
fs::write(&fake_output, fake_modified.concat())
.expect("Failed to write fake output file");
found_collision = true;
break;
// Set the flag to stop further processing
found_collision.store(true, Ordering::SeqCst);
return; // Stop further processing
}
}
});
total_hashes += batch_size;
// Update progress
let elapsed = start_time.elapsed();
@@ -129,4 +120,3 @@ fn main() {
i += batch_size;
}
}