more efficient rust implementation
This commit is contained in:
@@ -1,30 +0,0 @@
|
|||||||
This is the secret confession of Richard Buckland
|
|
||||||
to be revealed by anonymous email if I should
|
|
||||||
mysteriously vanish. I have left the last few hex
|
|
||||||
digits of the SHA256 hash of this message with my
|
|
||||||
trusted solicitor, Dennis Denuto, which will verify
|
|
||||||
that this is indeed my intended and unaltered
|
|
||||||
confession written by me Richard Buckland.
|
|
||||||
|
|
||||||
Dennis has not seen this confession he has only seen
|
|
||||||
the last few digits of the hash. I have also sent copies
|
|
||||||
of the last few digits to my bank manager and to my priest
|
|
||||||
Father Brown.
|
|
||||||
|
|
||||||
On the 10th of February I saw Mark Zukerberg near my
|
|
||||||
house and we struck up a conversation. He explained all
|
|
||||||
the things he was doing to ensure that Facebook respects
|
|
||||||
privacy - both of its users and of others. It was very
|
|
||||||
impressive.
|
|
||||||
|
|
||||||
I feel awful that I have been criticising Facebook publicly
|
|
||||||
for so long. I apologised to him in our conversation and
|
|
||||||
now I want to confess to the world that actually Facebook
|
|
||||||
has more than enough privacy features, and that the reason
|
|
||||||
I spend so much time criticising Facebook is that I am
|
|
||||||
envious of Mark and wish I was a clever and smart and wise
|
|
||||||
as he is. I feel so bad for having been so mean to him for
|
|
||||||
so many years that I am considering retreating to the outback.
|
|
||||||
I may well cut off all contact with the world and live as a
|
|
||||||
hermit from now on. So do not worry if I vanish it is just
|
|
||||||
that I feel so guilty that I have been so unfair to Facebook.
|
|
||||||
@@ -1,40 +1,40 @@
|
|||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use std::collections::HashMap;
|
use std::fs::{self, File};
|
||||||
use std::fs;
|
use std::io::{BufRead, BufReader};
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
use std::sync::{Arc};
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
// Function to calculate the hash for the given modified file content
|
// Function to calculate the hash for the given file content
|
||||||
fn calculate_hash(file_lines: &[String], num_chars: usize) -> String {
|
fn calculate_hash(file_content: &[String], num_chars: usize) -> String {
|
||||||
let concatenated = file_lines.join("\n");
|
let concatenated = file_content.concat(); // Concatenate without joining with newlines
|
||||||
let hash = Sha256::digest(concatenated.as_bytes());
|
let hash = Sha256::digest(concatenated.as_bytes());
|
||||||
format!("{:x}", hash)[64 - num_chars..].to_string()
|
format!("{:x}", hash)[64 - num_chars..].to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Function to generate a modified file by adding spaces based on the bit pattern
|
// Function to generate a modified file by adding spaces based on the bit pattern
|
||||||
fn modify_and_hash(
|
fn modify_fake_file(fake_og: &[String], bit_pattern: u64) -> Vec<String> {
|
||||||
real_og: &[String],
|
fake_og
|
||||||
fake_og: &[String],
|
|
||||||
num_chars: usize,
|
|
||||||
bit_pattern: u64,
|
|
||||||
) -> (String, String, Vec<String>, Vec<String>) {
|
|
||||||
let real_modified: Vec<String> = real_og
|
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(idx, line)| format!("{}{}", line, " ".repeat(((bit_pattern >> idx) & 1) as usize)))
|
.map(|(idx, line)| format!("{}{}", line, " ".repeat(((bit_pattern >> idx) & 1) as usize)))
|
||||||
.collect();
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
let fake_modified: Vec<String> = fake_og
|
// Function to read a file while preserving line endings
|
||||||
.iter()
|
fn read_file_preserving_newlines(file_path: &str) -> Vec<String> {
|
||||||
.enumerate()
|
let file = File::open(file_path).expect("Failed to open file");
|
||||||
.map(|(idx, line)| format!("{}{}", line, " ".repeat(((bit_pattern >> idx) & 1) as usize)))
|
let reader = BufReader::new(file);
|
||||||
.collect();
|
|
||||||
|
|
||||||
let real_hash = calculate_hash(&real_modified, num_chars);
|
reader
|
||||||
let fake_hash = calculate_hash(&fake_modified, num_chars);
|
.lines()
|
||||||
|
.map(|line| {
|
||||||
(real_hash, fake_hash, real_modified, fake_modified)
|
let mut line = line.expect("Failed to read line");
|
||||||
|
line.push('\n'); // Ensure the newline is preserved
|
||||||
|
line
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
@@ -49,74 +49,65 @@ fn main() {
|
|||||||
let fake_file = &args[2];
|
let fake_file = &args[2];
|
||||||
let num_chars: usize = args[3].parse().expect("Invalid number of characters");
|
let num_chars: usize = args[3].parse().expect("Invalid number of characters");
|
||||||
|
|
||||||
// Read the original files
|
// Read the original files while preserving line endings
|
||||||
let real_og = fs::read_to_string(real_file)
|
let real_og = read_file_preserving_newlines(real_file);
|
||||||
.expect("Failed to read real file")
|
let fake_og = read_file_preserving_newlines(fake_file);
|
||||||
.lines()
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect::<Vec<String>>();
|
|
||||||
|
|
||||||
let fake_og = fs::read_to_string(fake_file)
|
// Calculate the target hash of the original real file
|
||||||
.expect("Failed to read fake file")
|
let target_real_hash = calculate_hash(&real_og, num_chars);
|
||||||
.lines()
|
println!("Target hash (real file): {}", target_real_hash);
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect::<Vec<String>>();
|
|
||||||
|
|
||||||
// Shared data structures for hash tracking
|
// Atomic flag to stop threads once a hash match is found
|
||||||
let all_real_hashes = Arc::new(Mutex::new(HashMap::new()));
|
let found_collision = Arc::new(AtomicBool::new(false));
|
||||||
let all_fake_hashes = Arc::new(Mutex::new(HashMap::new()));
|
|
||||||
|
|
||||||
let mut found_collision = false;
|
|
||||||
let mut total_hashes = 0;
|
let mut total_hashes = 0;
|
||||||
let batch_size = 100; // Number of combinations to process in parallel
|
let batch_size = 10_000;
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
|
|
||||||
// Main loop
|
// Main loop to modify the fake file until its hash matches the real file's hash
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
while !found_collision {
|
while !found_collision.load(Ordering::SeqCst) {
|
||||||
let bit_patterns: Vec<u64> = (i..i + batch_size).collect();
|
let bit_patterns: Vec<u64> = (i..i + batch_size).collect();
|
||||||
|
|
||||||
// Parallel processing using Rayon
|
// Parallel processing using Rayon
|
||||||
let results: Vec<_> = bit_patterns
|
bit_patterns.into_par_iter().for_each(|bit_pattern| {
|
||||||
.into_par_iter()
|
if found_collision.load(Ordering::SeqCst) {
|
||||||
.map(|bit_pattern| modify_and_hash(&real_og, &fake_og, num_chars, bit_pattern))
|
return; // Stop processing if a collision is found
|
||||||
.collect();
|
}
|
||||||
|
|
||||||
total_hashes += results.len();
|
// Modify the fake file based on the current bit pattern
|
||||||
|
let fake_modified = modify_fake_file(&fake_og, bit_pattern);
|
||||||
|
|
||||||
// Process results and check for collisions
|
// Calculate the hash of the modified fake file
|
||||||
for (real_hash, fake_hash, real_modified, fake_modified) in results {
|
let fake_hash = calculate_hash(&fake_modified, num_chars);
|
||||||
let mut real_hashes = all_real_hashes.lock().unwrap();
|
|
||||||
let mut fake_hashes = all_fake_hashes.lock().unwrap();
|
|
||||||
|
|
||||||
real_hashes.insert(real_hash.clone(), real_modified.clone());
|
|
||||||
fake_hashes.insert(fake_hash.clone(), fake_modified.clone());
|
|
||||||
|
|
||||||
if real_hashes.contains_key(&fake_hash) || fake_hashes.contains_key(&real_hash) {
|
|
||||||
let collision_hash = if real_hashes.contains_key(&fake_hash) {
|
|
||||||
fake_hash.clone()
|
|
||||||
} else {
|
|
||||||
real_hash.clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
|
// Check if the modified fake file's hash matches the real file's hash
|
||||||
|
if fake_hash == target_real_hash {
|
||||||
|
// Print collision message
|
||||||
println!(
|
println!(
|
||||||
"\n[+] Collision found! {}.out and {}.out have the same hash: {}",
|
"\n[+] Collision found! The modified fake file matches the hash of the real file: {}",
|
||||||
real_file, fake_file, collision_hash
|
target_real_hash
|
||||||
);
|
);
|
||||||
|
|
||||||
// Write the output files with the collision
|
// Write the real and fake output files
|
||||||
let real_output = format!("{}.out", real_file);
|
let real_output = format!("{}.out", real_file);
|
||||||
let fake_output = format!("{}.out", fake_file);
|
let fake_output = format!("{}.out", fake_file);
|
||||||
|
|
||||||
fs::write(&real_output, real_hashes.get(&collision_hash).unwrap().join("\n"))
|
// Write the real output (original real file)
|
||||||
|
fs::write(&real_output, real_og.concat())
|
||||||
.expect("Failed to write real output file");
|
.expect("Failed to write real output file");
|
||||||
fs::write(&fake_output, fake_hashes.get(&collision_hash).unwrap().join("\n"))
|
|
||||||
|
// Write the fake output (modified fake file)
|
||||||
|
fs::write(&fake_output, fake_modified.concat())
|
||||||
.expect("Failed to write fake output file");
|
.expect("Failed to write fake output file");
|
||||||
|
|
||||||
found_collision = true;
|
// Set the flag to stop further processing
|
||||||
break;
|
found_collision.store(true, Ordering::SeqCst);
|
||||||
|
return; // Stop further processing
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
|
|
||||||
|
total_hashes += batch_size;
|
||||||
|
|
||||||
// Update progress
|
// Update progress
|
||||||
let elapsed = start_time.elapsed();
|
let elapsed = start_time.elapsed();
|
||||||
@@ -129,4 +120,3 @@ fn main() {
|
|||||||
i += batch_size;
|
i += batch_size;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user