rust lol
This commit is contained in:
16
.gitignore
vendored
16
.gitignore
vendored
@@ -231,3 +231,19 @@ pip-selfcheck.json
|
||||
|
||||
# .nfs files are created when an open file is removed but is still being accessed
|
||||
.nfs*
|
||||
|
||||
### Rust ###
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
debug/
|
||||
target/
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||
Cargo.lock
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||
*.pdb
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
This is the secret confession of Richard Buckland
|
||||
to be revealed by anonymous email if I should
|
||||
mysteriously vanish. I have left the last few hex
|
||||
mysteriously vanish. I have left the last few hex
|
||||
digits of the SHA256 hash of this message with my
|
||||
trusted solicitor, Dennis Denuto, which will verify
|
||||
that this is indeed my intended and unaltered
|
||||
confession written by me Richard Buckland.
|
||||
trusted solicitor, Dennis Denuto, which will verify
|
||||
that this is indeed my intended and unaltered
|
||||
confession written by me Richard Buckland.
|
||||
|
||||
Dennis has not seen this confession he has only seen
|
||||
the last few digits of the hash. I have also sent copies
|
||||
of the last few digits to my bank manager and to my priest
|
||||
Father Brown.
|
||||
|
||||
On the 10th of February I saw Mark Zukerberg near my
|
||||
|
||||
On the 10th of February I saw Mark Zukerberg near my
|
||||
house and we struck up a conversation. He explained all
|
||||
the things he was doing to ensure that Facebook respects
|
||||
privacy - both of its users and of others. It was very
|
||||
impressive.
|
||||
|
||||
impressive.
|
||||
|
||||
I feel awful that I have been criticising Facebook publicly
|
||||
for so long. I apologised to him in our conversation and
|
||||
now I want to confess to the world that actually Facebook
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
This is the secret confession of Richard Buckland
|
||||
to be revealed by anonymous email if I should
|
||||
mysteriously vanish. I have left the last few hex
|
||||
digits of the SHA256 hash of this message with my
|
||||
trusted solicitor, Dennis Denuto, which will verify
|
||||
that this is indeed my intended and unaltered
|
||||
confession written by me Richard Buckland.
|
||||
|
||||
Dennis has not seen this confession he has only seen
|
||||
the last few digits of the hash. I have also sent copies
|
||||
of the last few digits to my bank manager and to my priest
|
||||
Father Brown.
|
||||
|
||||
On the 10th of February I saw Mark Zukerberg peeping
|
||||
through my window and recording my private and personal
|
||||
conversation with my friend.
|
||||
|
||||
I confronted him and he was very embarrassed. He
|
||||
promised to pay me $1 million a year if I would stay
|
||||
silent and not tell anyone I had seen him do this. I
|
||||
agreed but now I worry that it would be cheaper for him
|
||||
to make me vanish than to keep paying me.
|
||||
18
enforcer.py
18
enforcer.py
@@ -1,3 +1,5 @@
|
||||
import sys
|
||||
import time
|
||||
from multiprocessing import Pool
|
||||
from hashlib import sha256
|
||||
|
||||
@@ -36,18 +38,23 @@ def main(real_file, fake_file, num_chars):
|
||||
all_fake_hashes = {}
|
||||
|
||||
found_collision = False
|
||||
total_hashes = 0
|
||||
batch_size = 100 # Number of combinations to process in parallel
|
||||
start_time = time.time() # Start time to measure hashes per second
|
||||
|
||||
# Use multiprocessing Pool
|
||||
with Pool() as pool:
|
||||
i = 0
|
||||
while not found_collision:
|
||||
# Prepare a batch of bit patterns to process in parallel
|
||||
batch_size = 100 # Number of combinations to process in parallel
|
||||
bit_patterns = [(real_og, fake_og, num_chars, pattern) for pattern in range(i, i + batch_size)]
|
||||
|
||||
# Process the batch in parallel
|
||||
results = pool.map(modify_and_hash, bit_patterns)
|
||||
|
||||
# Update the total count of hashes processed
|
||||
total_hashes += len(results)
|
||||
|
||||
# Check the results for a hash collision
|
||||
for real_hash, fake_hash, real_modified, fake_modified in results:
|
||||
all_real_hashes[real_hash] = real_modified
|
||||
@@ -55,7 +62,7 @@ def main(real_file, fake_file, num_chars):
|
||||
|
||||
if real_hash in all_fake_hashes or fake_hash in all_real_hashes:
|
||||
collision_hash = real_hash if real_hash in all_fake_hashes else fake_hash
|
||||
print(f"Collision found! {real_file}.out and {fake_file}.out have the same hash: {collision_hash}")
|
||||
print(f"\n[+] Collision found! {real_file}.out and {fake_file}.out have the same hash: {collision_hash}")
|
||||
|
||||
with open(f"{real_file}.out", 'w') as f_out:
|
||||
f_out.writelines("\n".join(all_real_hashes[collision_hash]))
|
||||
@@ -65,10 +72,15 @@ def main(real_file, fake_file, num_chars):
|
||||
found_collision = True
|
||||
break
|
||||
|
||||
# Update progress every batch
|
||||
elapsed_time = time.time() - start_time
|
||||
hashes_per_sec = total_hashes / elapsed_time
|
||||
print(f"\rProcessed {total_hashes} hashes in {elapsed_time:.2f} seconds, {hashes_per_sec:.2f} H/s", end='')
|
||||
|
||||
# Increment the bit pattern range
|
||||
i += batch_size
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
if len(sys.argv) != 4:
|
||||
print(f"Usage: {sys.argv[0]} <real_file> <fake_file> <num_chars>")
|
||||
sys.exit(1)
|
||||
|
||||
9
hash_collision/Cargo.toml
Normal file
9
hash_collision/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "hash_collision"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
rayon = "1.5"
|
||||
sha2 = "0.9"
|
||||
|
||||
132
hash_collision/src/main.rs
Normal file
132
hash_collision/src/main.rs
Normal file
@@ -0,0 +1,132 @@
|
||||
use rayon::prelude::*;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::Instant;
|
||||
|
||||
// Function to calculate the hash for the given modified file content
|
||||
fn calculate_hash(file_lines: &[String], num_chars: usize) -> String {
|
||||
let concatenated = file_lines.join("\n");
|
||||
let hash = Sha256::digest(concatenated.as_bytes());
|
||||
format!("{:x}", hash)[64 - num_chars..].to_string()
|
||||
}
|
||||
|
||||
// Function to generate a modified file by adding spaces based on the bit pattern
|
||||
fn modify_and_hash(
|
||||
real_og: &[String],
|
||||
fake_og: &[String],
|
||||
num_chars: usize,
|
||||
bit_pattern: u64,
|
||||
) -> (String, String, Vec<String>, Vec<String>) {
|
||||
let real_modified: Vec<String> = real_og
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, line)| format!("{}{}", line, " ".repeat(((bit_pattern >> idx) & 1) as usize)))
|
||||
.collect();
|
||||
|
||||
let fake_modified: Vec<String> = fake_og
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, line)| format!("{}{}", line, " ".repeat(((bit_pattern >> idx) & 1) as usize)))
|
||||
.collect();
|
||||
|
||||
let real_hash = calculate_hash(&real_modified, num_chars);
|
||||
let fake_hash = calculate_hash(&fake_modified, num_chars);
|
||||
|
||||
(real_hash, fake_hash, real_modified, fake_modified)
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// Command line arguments
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
if args.len() != 4 {
|
||||
eprintln!("Usage: {} <real_file> <fake_file> <num_chars>", args[0]);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let real_file = &args[1];
|
||||
let fake_file = &args[2];
|
||||
let num_chars: usize = args[3].parse().expect("Invalid number of characters");
|
||||
|
||||
// Read the original files
|
||||
let real_og = fs::read_to_string(real_file)
|
||||
.expect("Failed to read real file")
|
||||
.lines()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let fake_og = fs::read_to_string(fake_file)
|
||||
.expect("Failed to read fake file")
|
||||
.lines()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
// Shared data structures for hash tracking
|
||||
let all_real_hashes = Arc::new(Mutex::new(HashMap::new()));
|
||||
let all_fake_hashes = Arc::new(Mutex::new(HashMap::new()));
|
||||
|
||||
let mut found_collision = false;
|
||||
let mut total_hashes = 0;
|
||||
let batch_size = 100; // Number of combinations to process in parallel
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Main loop
|
||||
let mut i = 0;
|
||||
while !found_collision {
|
||||
let bit_patterns: Vec<u64> = (i..i + batch_size).collect();
|
||||
|
||||
// Parallel processing using Rayon
|
||||
let results: Vec<_> = bit_patterns
|
||||
.into_par_iter()
|
||||
.map(|bit_pattern| modify_and_hash(&real_og, &fake_og, num_chars, bit_pattern))
|
||||
.collect();
|
||||
|
||||
total_hashes += results.len();
|
||||
|
||||
// Process results and check for collisions
|
||||
for (real_hash, fake_hash, real_modified, fake_modified) in results {
|
||||
let mut real_hashes = all_real_hashes.lock().unwrap();
|
||||
let mut fake_hashes = all_fake_hashes.lock().unwrap();
|
||||
|
||||
real_hashes.insert(real_hash.clone(), real_modified.clone());
|
||||
fake_hashes.insert(fake_hash.clone(), fake_modified.clone());
|
||||
|
||||
if real_hashes.contains_key(&fake_hash) || fake_hashes.contains_key(&real_hash) {
|
||||
let collision_hash = if real_hashes.contains_key(&fake_hash) {
|
||||
fake_hash.clone()
|
||||
} else {
|
||||
real_hash.clone()
|
||||
};
|
||||
|
||||
println!(
|
||||
"\n[+] Collision found! {}.out and {}.out have the same hash: {}",
|
||||
real_file, fake_file, collision_hash
|
||||
);
|
||||
|
||||
// Write the output files with the collision
|
||||
let real_output = format!("{}.out", real_file);
|
||||
let fake_output = format!("{}.out", fake_file);
|
||||
|
||||
fs::write(&real_output, real_hashes.get(&collision_hash).unwrap().join("\n"))
|
||||
.expect("Failed to write real output file");
|
||||
fs::write(&fake_output, fake_hashes.get(&collision_hash).unwrap().join("\n"))
|
||||
.expect("Failed to write fake output file");
|
||||
|
||||
found_collision = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress
|
||||
let elapsed = start_time.elapsed();
|
||||
let hashes_per_sec = total_hashes as f64 / elapsed.as_secs_f64();
|
||||
print!(
|
||||
"\rProcessed {} hashes in {:.2?}, {:.2} H/s",
|
||||
total_hashes, elapsed, hashes_per_sec
|
||||
);
|
||||
|
||||
i += batch_size;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user