I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging.
Certainly! Below is a self-contained Rust program that reads a text file located at `/tmp/file.txt`, splits it into four approximately equal parts without breaking words, counts the occurrence of each word in parallel across four threads, and prints out four separate JSON dictionaries—one per line—for each thread's result. ### **Program Overview** 1. **File Reading**: The program reads the entire content of `/tmp/file.txt` into a string. 2. **Chunk Division**: It calculates the approximate byte size for each of the four chunks and adjusts the boundaries to ensure words are not split between chunks. 3. **Parallel Processing**: Using Rust's standard library threads, the program spawns four threads, each processing its designated chunk to count word occurrences. 4. **JSON Serialization**: Each thread serializes its word count `HashMap` into a JSON-formatted string. 5. **Output**: The main thread collects the JSON strings from all threads and prints each on a separate line to `stdout`. 6. **Assumptions**: - Words are separated by whitespace. - Punctuation attached to words is considered part of the word (e.g., "is," is different from "is"). - The file is encoded in UTF-8. ### **Complete Rust Program**
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 | use std::collections::HashMap; use std::fs::File; use std::io::{self, Read}; use std::sync::mpsc; use std::thread; fn main() -> io::Result<()> { // Path to the input file let file_path = "/tmp/file.txt"; // Read the entire file into a String let mut file = File::open(file_path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; // Total number of bytes in the file let total_bytes = contents.len(); // Calculate approximate chunk size let chunk_size = total_bytes / 4; // Find split indices to avoid breaking words let mut split_indices = Vec::new(); split_indices.push(0); // Start of first chunk for i in 1..4 { let mut split = i * chunk_size; // Ensure we don't split in the middle of a UTF-8 character while !contents.is_char_boundary(split) && split < total_bytes { split += 1; } // Move to the next whitespace character to avoid splitting a word while split < total_bytes && !contents.chars().nth(split).unwrap().is_whitespace() { split += 1; } split_indices.push(split); } split_indices.push(total_bytes); // End of last chunk // Create a channel to collect results from threads let (tx, rx) = mpsc::channel(); // Spawn 4 threads for i in 0..4 { let tx_clone = tx.clone(); let start = split_indices[i]; let end = split_indices[i + 1]; let chunk = contents[start..end].to_string(); thread::spawn(move || { let word_count = count_words(&chunk); let json = hashmap_to_json(&word_count); tx_clone.send(json).expect("Failed to send data to main thread"); }); } // Drop the original sender to avoid hanging drop(tx); // Collect and print the JSON results for received in rx { println!("{}", received); } Ok(()) } /// Counts the occurrences of each word in the given text. fn count_words(text: &str) -> HashMap<String, usize> { let mut map = HashMap::new(); for word in text.split_whitespace() { *map.entry(word.to_string()).or_insert(0) += 1; } map } /// Converts a HashMap of word counts to a JSON-formatted string. fn hashmap_to_json(map: &HashMap<String, usize>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push_str(", "); } first = false; // Escape double quotes in keys let escaped_key = key.replace('"', "\\\""); json.push_str(&format!("\"{}\": {}", escaped_key, value)); } json.push('}'); json } |
1 2 | rustc rust_word_count.rs |
1 2 | ./rust_word_count |
1 | This is is is, an example |
1 2 3 4 | {"This": 1, "is": 2} {"is,": 1} {"an": 1} {"example": 1} |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 | use std::collections::HashMap; use std::fs::File; use std::io::{self, Read}; use std::sync::mpsc; use std::thread; fn main() -> io::Result<()> { // Path to the input file let file_path = "/tmp/file.txt"; // Read the entire file into a String let mut file = File::open(file_path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; // Total number of bytes in the file let total_bytes = contents.len(); // Calculate approximate chunk size let chunk_size = total_bytes / 4; // Find split indices to avoid breaking words let mut split_indices = Vec::new(); split_indices.push(0); // Start of first chunk for i in 1..4 { let mut split = i * chunk_size; // Ensure we don't split in the middle of a UTF-8 character while !contents.is_char_boundary(split) && split < total_bytes { split += 1; } // Move to the next whitespace character to avoid splitting a word while split < total_bytes && !contents.chars().nth(split).unwrap().is_whitespace() { split += 1; } split_indices.push(split); } split_indices.push(total_bytes); // End of last chunk // Create a channel to collect results from threads let (tx, rx) = mpsc::channel(); // Spawn 4 threads for i in 0..4 { let tx_clone = tx.clone(); let start = split_indices[i]; let end = split_indices[i + 1]; let chunk = contents[start..end].to_string(); thread::spawn(move || { let word_count = count_words(&chunk); let json = hashmap_to_json(&word_count); tx_clone.send(json).expect("Failed to send data to main thread"); }); } // Drop the original sender to avoid hanging drop(tx); // Collect and print the JSON results for received in rx { println!("{}", received); } Ok(()) } /// Counts the occurrences of each word in the given text. fn count_words(text: &str) -> HashMap<String, usize> { let mut map = HashMap::new(); for word in text.split_whitespace() { *map.entry(word.to_string()).or_insert(0) += 1; } map } /// Converts a HashMap of word counts to a JSON-formatted string. fn hashmap_to_json(map: &HashMap<String, usize>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push_str(", "); } first = false; // Escape double quotes in keys let escaped_key = key.replace('"', "\\\""); json.push_str(&format!("\"{}\": {}", escaped_key, value)); } json.push('}'); json } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{self, Read}; use std::sync::mpsc; use std::thread; fn main() -> io::Result<()> { // Path to the input file let file_path = "/tmp/file.txt"; // Read the entire file into a String let mut file = File::open(file_path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; // Total number of bytes in the file let total_bytes = contents.len(); // Calculate approximate chunk size let chunk_size = total_bytes / 4; // Find split indices to avoid breaking words let mut split_indices = Vec::new(); split_indices.push(0); // Start of first chunk for i in 1..4 { let mut split = i * chunk_size; // Ensure we don't split in the middle of a UTF-8 character while !contents.is_char_boundary(split) && split < total_bytes { split += 1; } // Move to the next whitespace character to avoid splitting a word while split < total_bytes && !contents.chars().nth(split).unwrap().is_whitespace() { split += 1; } split_indices.push(split); } split_indices.push(total_bytes); // End of last chunk // Create a channel to collect results from threads let (tx, rx) = mpsc::channel(); // Spawn 4 threads for i in 0..4 { let tx_clone = tx.clone(); let start = split_indices[i]; let end = split_indices[i + 1]; let chunk = contents[start..end].to_string(); thread::spawn(move || { let word_count = count_words(&chunk); let json = hashmap_to_json(&word_count); tx_clone.send(json).expect("Failed to send data to main thread"); }); } // Drop the original sender to avoid hanging drop(tx); // Collect and print the JSON results for received in rx { println!("{}", received); } Ok(()) } /// Counts the occurrences of each word in the given text. fn count_words(text: &str) -> HashMap<String, usize> { let mut map = HashMap::new(); for word in text.split_whitespace() { *map.entry(word.to_string()).or_insert(0) += 1; } map } /// Converts a HashMap of word counts to a JSON-formatted string. fn hashmap_to_json(map: &HashMap<String, usize>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push_str(", "); } first = false; // Escape double quotes in keys let escaped_key = key.replace('"', "\\\""); json.push_str(&format!("\"{}\": {}", escaped_key, value)); } json.push('}'); json } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 | use std::collections::HashMap; use std::fs::File; use std::io::{self, Read}; use std::sync::mpsc; use std::thread; fn main() -> io::Result<()> { // Path to the input file let file_path = "/tmp/file.txt"; // Read the entire file into a String let mut file = File::open(file_path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; // Total number of bytes in the file let total_bytes = contents.len(); // Calculate approximate chunk size let chunk_size = total_bytes / 4; // Find split indices to avoid breaking words let mut split_indices = Vec::new(); split_indices.push(0); // Start of first chunk for i in 1..4 { let mut split = i * chunk_size; // Ensure we don't split in the middle of a UTF-8 character while !contents.is_char_boundary(split) && split < total_bytes { split += 1; } // Move to the next whitespace character to avoid splitting a word while split < total_bytes && !contents.chars().nth(split).unwrap().is_whitespace() { split += 1; } split_indices.push(split); } split_indices.push(total_bytes); // End of last chunk // Create a channel to collect results from threads let (tx, rx) = mpsc::channel(); // Spawn 4 threads for i in 0..4 { let tx_clone = tx.clone(); let start = split_indices[i]; let end = split_indices[i + 1]; let chunk = contents[start..end].to_string(); thread::spawn(move || { let word_count = count_words(&chunk); let json = hashmap_to_json(&word_count); tx_clone.send(json).expect("Failed to send data to main thread"); }); } // Drop the original sender to avoid hanging drop(tx); // Collect and print the JSON results for received in rx { println!("{}", received); } Ok(()) } /// Counts the occurrences of each word in the given text. fn count_words(text: &str) -> HashMap<String, usize> { let mut map = HashMap::new(); for word in text.split_whitespace() { *map.entry(word.to_string()).or_insert(0) += 1; } map } /// Converts a HashMap of word counts to a JSON-formatted string. fn hashmap_to_json(map: &HashMap<String, usize>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push_str(", "); } first = false; // Escape double quotes in keys let escaped_key = key.replace('"', "\\\""); json.push_str(&format!("\"{}\": {}", escaped_key, value)); } json.push('}'); json } |
{"the": 4, "was": 4, "of": 4, "times,": 2, "best": 1, "age": 2, "foolishness,": 1, "it": 4, "wisdom,": 1, "worst": 1}
{"the": 4, "it": 4, "epoch": 2, "season": 1, "Light,": 1, "incredulity,": 1, "was": 4, "of": 3, "belief,": 1}
{"had": 1, "nothing": 1, "we": 3, "us,": 1, "all": 2, "direct": 2, "to": 1, "Heaven,": 1, "the": 1, "before": 1, "were": 2, "going": 2, "way": 1, "other": 1}
{"was": 2, "us,": 1, "despair,": 1, "hope,": 1, "had": 1, "everything": 1, "of": 3, "the": 2, "spring": 1, "season": 1, "winter": 1, "we": 1, "it": 2, "before": 1, "Darkness,": 1}
Resulting in output: [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: True
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging.
Here's a self-contained Rust program that reads `/tmp/file.txt`, counts the frequency of each word using four parallel threads, and prints four separate JSON dictionaries (one per thread) to `stdout`. Each thread processes approximately one-fourth of the words in the file.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 | use std::collections::HashMap; use std::fs::File; use std::io::Read; use std::thread; fn main() { // Path to the input file let file_path = "/tmp/file.txt"; // Read the entire file into a string let mut file = match File::open(file_path) { Ok(f) => f, Err(e) => { eprintln!("Error opening file {}: {}", file_path, e); return; } }; let mut contents = String::new(); if let Err(e) = file.read_to_string(&mut contents) { eprintln!("Error reading file {}: {}", file_path, e); return; } // Split the contents into words based on whitespace let words: Vec<&str> = contents.split_whitespace().collect(); let total_words = words.len(); if total_words == 0 { println!("{{}}"); println!("{{}}"); println!("{{}}"); println!("{{}}"); return; } // Determine the number of words per thread let chunk_size = (total_words + 3) / 4; // Ceiling division // Prepare to hold thread handles let mut handles = Vec::with_capacity(4); for i in 0..4 { // Calculate the start and end indices for the current chunk let start = i * chunk_size; let end = if start + chunk_size > total_words { total_words } else { start + chunk_size }; // Clone the slice of words for the thread let chunk = words[start..end].to_vec(); // Spawn a thread to process the chunk let handle = thread::spawn(move || { let mut map: HashMap<String, usize> = HashMap::new(); for word in chunk { *map.entry(word.to_string()).or_insert(0) += 1; } map }); handles.push(handle); } // Collect the results from the threads for handle in handles { match handle.join() { Ok(map) => { // Convert the HashMap to a JSON string let json = hashmap_to_json(&map); println!("{}", json); } Err(_) => { eprintln!("A thread panicked."); println!("{{}}"); // Print empty JSON object in case of panic } } } } // Helper function to convert a HashMap<String, usize> to a JSON string fn hashmap_to_json(map: &HashMap<String, usize>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push_str(", "); } first = false; // Escape backslashes and double quotes in the key let escaped_key = key .replace("\\", "\\\\") .replace("\"", "\\\""); json.push_str(&format!("\"{}\": {}", escaped_key, value)); } json.push('}'); json } |
1 2 | rustc rust_word_count.rs -o rust_word_count /path/to/rust_word_count |
1 | This is is is, an example |
1 2 3 4 | {"This": 1, "is": 2} {"is,": 1, "an": 1} {"example": 1} {} |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 | use std::collections::HashMap; use std::fs::File; use std::io::Read; use std::thread; fn main() { // Path to the input file let file_path = "/tmp/file.txt"; // Read the entire file into a string let mut file = match File::open(file_path) { Ok(f) => f, Err(e) => { eprintln!("Error opening file {}: {}", file_path, e); return; } }; let mut contents = String::new(); if let Err(e) = file.read_to_string(&mut contents) { eprintln!("Error reading file {}: {}", file_path, e); return; } // Split the contents into words based on whitespace let words: Vec<&str> = contents.split_whitespace().collect(); let total_words = words.len(); if total_words == 0 { println!("{{}}"); println!("{{}}"); println!("{{}}"); println!("{{}}"); return; } // Determine the number of words per thread let chunk_size = (total_words + 3) / 4; // Ceiling division // Prepare to hold thread handles let mut handles = Vec::with_capacity(4); for i in 0..4 { // Calculate the start and end indices for the current chunk let start = i * chunk_size; let end = if start + chunk_size > total_words { total_words } else { start + chunk_size }; // Clone the slice of words for the thread let chunk = words[start..end].to_vec(); // Spawn a thread to process the chunk let handle = thread::spawn(move || { let mut map: HashMap<String, usize> = HashMap::new(); for word in chunk { *map.entry(word.to_string()).or_insert(0) += 1; } map }); handles.push(handle); } // Collect the results from the threads for handle in handles { match handle.join() { Ok(map) => { // Convert the HashMap to a JSON string let json = hashmap_to_json(&map); println!("{}", json); } Err(_) => { eprintln!("A thread panicked."); println!("{{}}"); // Print empty JSON object in case of panic } } } } // Helper function to convert a HashMap<String, usize> to a JSON string fn hashmap_to_json(map: &HashMap<String, usize>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push_str(", "); } first = false; // Escape backslashes and double quotes in the key let escaped_key = key .replace("\\", "\\\\") .replace("\"", "\\\""); json.push_str(&format!("\"{}\": {}", escaped_key, value)); } json.push('}'); json } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::Read; use std::thread; fn main() { // Path to the input file let file_path = "/tmp/file.txt"; // Read the entire file into a string let mut file = match File::open(file_path) { Ok(f) => f, Err(e) => { eprintln!("Error opening file {}: {}", file_path, e); return; } }; let mut contents = String::new(); if let Err(e) = file.read_to_string(&mut contents) { eprintln!("Error reading file {}: {}", file_path, e); return; } // Split the contents into words based on whitespace let words: Vec<&str> = contents.split_whitespace().collect(); let total_words = words.len(); if total_words == 0 { println!("{{}}"); println!("{{}}"); println!("{{}}"); println!("{{}}"); return; } // Determine the number of words per thread let chunk_size = (total_words + 3) / 4; // Ceiling division // Prepare to hold thread handles let mut handles = Vec::with_capacity(4); for i in 0..4 { // Calculate the start and end indices for the current chunk let start = i * chunk_size; let end = if start + chunk_size > total_words { total_words } else { start + chunk_size }; // Clone the slice of words for the thread let chunk = words[start..end].to_vec(); // Spawn a thread to process the chunk let handle = thread::spawn(move || { let mut map: HashMap<String, usize> = HashMap::new(); for word in chunk { *map.entry(word.to_string()).or_insert(0) += 1; } map }); handles.push(handle); } // Collect the results from the threads for handle in handles { match handle.join() { Ok(map) => { // Convert the HashMap to a JSON string let json = hashmap_to_json(&map); println!("{}", json); } Err(_) => { eprintln!("A thread panicked."); println!("{{}}"); // Print empty JSON object in case of panic } } } } // Helper function to convert a HashMap<String, usize> to a JSON string fn hashmap_to_json(map: &HashMap<String, usize>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push_str(", "); } first = false; // Escape backslashes and double quotes in the key let escaped_key = key .replace("\\", "\\\\") .replace("\"", "\\\""); json.push_str(&format!("\"{}\": {}", escaped_key, value)); } json.push('}'); json } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 | use std::collections::HashMap; use std::fs::File; use std::io::Read; use std::thread; fn main() { // Path to the input file let file_path = "/tmp/file.txt"; // Read the entire file into a string let mut file = match File::open(file_path) { Ok(f) => f, Err(e) => { eprintln!("Error opening file {}: {}", file_path, e); return; } }; let mut contents = String::new(); if let Err(e) = file.read_to_string(&mut contents) { eprintln!("Error reading file {}: {}", file_path, e); return; } // Split the contents into words based on whitespace let words: Vec<&str> = contents.split_whitespace().collect(); let total_words = words.len(); if total_words == 0 { println!("{{}}"); println!("{{}}"); println!("{{}}"); println!("{{}}"); return; } // Determine the number of words per thread let chunk_size = (total_words + 3) / 4; // Ceiling division // Prepare to hold thread handles let mut handles = Vec::with_capacity(4); for i in 0..4 { // Calculate the start and end indices for the current chunk let start = i * chunk_size; let end = if start + chunk_size > total_words { total_words } else { start + chunk_size }; // Clone the slice of words for the thread let chunk = words[start..end].to_vec(); // Spawn a thread to process the chunk let handle = thread::spawn(move || { let mut map: HashMap<String, usize> = HashMap::new(); for word in chunk { *map.entry(word.to_string()).or_insert(0) += 1; } map }); handles.push(handle); } // Collect the results from the threads for handle in handles { match handle.join() { Ok(map) => { // Convert the HashMap to a JSON string let json = hashmap_to_json(&map); println!("{}", json); } Err(_) => { eprintln!("A thread panicked."); println!("{{}}"); // Print empty JSON object in case of panic } } } } // Helper function to convert a HashMap<String, usize> to a JSON string fn hashmap_to_json(map: &HashMap<String, usize>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push_str(", "); } first = false; // Escape backslashes and double quotes in the key let escaped_key = key .replace("\\", "\\\\") .replace("\"", "\\\""); json.push_str(&format!("\"{}\": {}", escaped_key, value)); } json.push('}'); json } |
error[E0597]: `contents` does not live long enough
--> main.rs:27:28
|
20 | let mut contents = String::new();
| ------------ binding `contents` declared here
...
27 | let words: Vec<&str> = contents.split_whitespace().collect();
| ^^^^^^^^ borrowed value does not live long enough
...
56 | let handle = thread::spawn(move || {
| ______________________-
57 | | let mut map: HashMap<String, usize> = HashMap::new();
58 | | for word in chunk {
59 | | *map.entry(word.to_string()).or_insert(0) += 1;
60 | | }
61 | | map
62 | | });
| |__________- argument requires that `contents` is borrowed for `'static`
...
81 | }
| - `contents` dropped here while still borrowed
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0597`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False