I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that reads a text file, counts the occurrences of each word, and prints the results in a JSON-like format. The program splits the work across 4 threads, handling roughly 1/4 of the file in parallel. It does not use any external crates, so you can compile it directly with `rustc`.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::fs::File; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; fn main() -> io::Result<()> { let path = "/tmp/file.txt"; let file = File::open(path)?; let reader = io::BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); let chunk_size = (total_lines + 3) / 4; // Divide into 4 chunks let mut handles = vec![]; let word_counts = Arc::new(Mutex::new(vec![ std::collections::HashMap::new(), std::collections::HashMap::new(), std::collections::HashMap::new(), std::collections::HashMap::new(), ])); for i in 0..4 { let word_counts_clone = Arc::clone(&word_counts); let start = i * chunk_size; let end = usize::min(start + chunk_size, total_lines); let chunk_lines = lines[start..end].to_vec(); let handle = thread::spawn(move || { let mut local_counts = std::collections::HashMap::new(); for line in chunk_lines { for word in line.split_whitespace() { let sanitized_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !sanitized_word.is_empty() { *local_counts.entry(sanitized_word).or_insert(0) += 1; } } } local_counts }); handles.push(handle); } for (i, handle) in handles.into_iter().enumerate() { let local_counts = handle.join().unwrap(); let mut word_counts = word_counts.lock().unwrap(); *word_counts.get_mut(i).unwrap() = local_counts; } for counts in word_counts.lock().unwrap().iter() { print_json(&counts); } Ok(()) } fn print_json(counts: &std::collections::HashMap<String, usize>) { let json_output: Vec<String> = counts.iter() .map(|(key, value)| format!("\"{}\": {}", key, value)) .collect(); println!("{{ {} }}", json_output.join(", ")); } |
1 | rustc rust_word_count.rs |
1 | ./rust_word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::fs::File; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; fn main() -> io::Result<()> { let path = "/tmp/file.txt"; let file = File::open(path)?; let reader = io::BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); let chunk_size = (total_lines + 3) / 4; // Divide into 4 chunks let mut handles = vec![]; let word_counts = Arc::new(Mutex::new(vec![ std::collections::HashMap::new(), std::collections::HashMap::new(), std::collections::HashMap::new(), std::collections::HashMap::new(), ])); for i in 0..4 { let word_counts_clone = Arc::clone(&word_counts); let start = i * chunk_size; let end = usize::min(start + chunk_size, total_lines); let chunk_lines = lines[start..end].to_vec(); let handle = thread::spawn(move || { let mut local_counts = std::collections::HashMap::new(); for line in chunk_lines { for word in line.split_whitespace() { let sanitized_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !sanitized_word.is_empty() { *local_counts.entry(sanitized_word).or_insert(0) += 1; } } } local_counts }); handles.push(handle); } for (i, handle) in handles.into_iter().enumerate() { let local_counts = handle.join().unwrap(); let mut word_counts = word_counts.lock().unwrap(); *word_counts.get_mut(i).unwrap() = local_counts; } for counts in word_counts.lock().unwrap().iter() { print_json(&counts); } Ok(()) } fn print_json(counts: &std::collections::HashMap<String, usize>) { let json_output: Vec<String> = counts.iter() .map(|(key, value)| format!("\"{}\": {}", key, value)) .collect(); println!("{{ {} }}", json_output.join(", ")); } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::fs::File; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; fn main() -> io::Result<()> { let path = "/tmp/file.txt"; let file = File::open(path)?; let reader = io::BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); let chunk_size = (total_lines + 3) / 4; // Divide into 4 chunks let mut handles = vec![]; let word_counts = Arc::new(Mutex::new(vec![ std::collections::HashMap::new(), std::collections::HashMap::new(), std::collections::HashMap::new(), std::collections::HashMap::new(), ])); for i in 0..4 { let word_counts_clone = Arc::clone(&word_counts); let start = i * chunk_size; let end = usize::min(start + chunk_size, total_lines); let chunk_lines = lines[start..end].to_vec(); let handle = thread::spawn(move || { let mut local_counts = std::collections::HashMap::new(); for line in chunk_lines { for word in line.split_whitespace() { let sanitized_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !sanitized_word.is_empty() { *local_counts.entry(sanitized_word).or_insert(0) += 1; } } } local_counts }); handles.push(handle); } for (i, handle) in handles.into_iter().enumerate() { let local_counts = handle.join().unwrap(); let mut word_counts = word_counts.lock().unwrap(); *word_counts.get_mut(i).unwrap() = local_counts; } for counts in word_counts.lock().unwrap().iter() { print_json(&counts); } Ok(()) } fn print_json(counts: &std::collections::HashMap<String, usize>) { let json_output: Vec<String> = counts.iter() .map(|(key, value)| format!("\"{}\": {}", key, value)) .collect(); println!("{{ {} }}", json_output.join(", ")); } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::fs::File; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; fn main() -> io::Result<()> { let path = "/tmp/file.txt"; let file = File::open(path)?; let reader = io::BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); let chunk_size = (total_lines + 3) / 4; // Divide into 4 chunks let mut handles = vec![]; let word_counts = Arc::new(Mutex::new(vec![ std::collections::HashMap::new(), std::collections::HashMap::new(), std::collections::HashMap::new(), std::collections::HashMap::new(), ])); for i in 0..4 { let word_counts_clone = Arc::clone(&word_counts); let start = i * chunk_size; let end = usize::min(start + chunk_size, total_lines); let chunk_lines = lines[start..end].to_vec(); let handle = thread::spawn(move || { let mut local_counts = std::collections::HashMap::new(); for line in chunk_lines { for word in line.split_whitespace() { let sanitized_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !sanitized_word.is_empty() { *local_counts.entry(sanitized_word).or_insert(0) += 1; } } } local_counts }); handles.push(handle); } for (i, handle) in handles.into_iter().enumerate() { let local_counts = handle.join().unwrap(); let mut word_counts = word_counts.lock().unwrap(); *word_counts.get_mut(i).unwrap() = local_counts; } for counts in word_counts.lock().unwrap().iter() { print_json(&counts); } Ok(()) } fn print_json(counts: &std::collections::HashMap<String, usize>) { let json_output: Vec<String> = counts.iter() .map(|(key, value)| format!("\"{}\": {}", key, value)) .collect(); println!("{{ {} }}", json_output.join(", ")); } |
warning: unused variable: `word_counts_clone`
--> main.rs:25:13
|
25 | let word_counts_clone = Arc::clone(&word_counts);
| ^^^^^^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_word_counts_clone`
|
= note: `#[warn(unused_variables)]` on by default
warning: 1 warning emitted
thread 'main' panicked at main.rs:29:32:
slice index starts at 2 but ends at 1
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that reads a text file from `/tmp/file.txt`, counts the occurrences of each word in parallel using 4 threads, and outputs the results as JSON. The program uses standard library features and does not require any external crates or a `Cargo.toml` file.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | use std::collections::HashMap; use std::fs; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let content = fs::read_to_string(file_path).expect("Could not read the file"); let num_threads = 4; let chunk_size = content.len() / num_threads; let mut handles = vec![]; // Use an Arc and Mutex to safely share the word counts between threads let word_counts: Arc<Mutex<HashMap<String, usize>>> = Arc::new(Mutex::new(HashMap::new())); for i in 0..num_threads { let start = i * chunk_size; let end = if i == num_threads - 1 { content.len() // Last thread takes the remainder } else { start + chunk_size }; let chunk = content[start..end].to_string(); let word_counts_clone = Arc::clone(&word_counts); let handle = thread::spawn(move || { let mut local_counts = HashMap::new(); for word in chunk.split_whitespace() { // Cleaning words from punctuation let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !cleaned_word.is_empty() { *local_counts.entry(cleaned_word).or_insert(0) += 1; } } let mut counts = word_counts_clone.lock().unwrap(); for (word, count) in local_counts { *counts.entry(word).or_insert(0) += count; } }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } // Print the JSON output let counts = word_counts.lock().unwrap(); for (word, count) in counts.iter() { println!("\"{}\": {}", word, count); } } |
1 2 | rustc yourfile.rs |
1 2 | ./yourfile |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | use std::collections::HashMap; use std::fs; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let content = fs::read_to_string(file_path).expect("Could not read the file"); let num_threads = 4; let chunk_size = content.len() / num_threads; let mut handles = vec![]; // Use an Arc and Mutex to safely share the word counts between threads let word_counts: Arc<Mutex<HashMap<String, usize>>> = Arc::new(Mutex::new(HashMap::new())); for i in 0..num_threads { let start = i * chunk_size; let end = if i == num_threads - 1 { content.len() // Last thread takes the remainder } else { start + chunk_size }; let chunk = content[start..end].to_string(); let word_counts_clone = Arc::clone(&word_counts); let handle = thread::spawn(move || { let mut local_counts = HashMap::new(); for word in chunk.split_whitespace() { // Cleaning words from punctuation let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !cleaned_word.is_empty() { *local_counts.entry(cleaned_word).or_insert(0) += 1; } } let mut counts = word_counts_clone.lock().unwrap(); for (word, count) in local_counts { *counts.entry(word).or_insert(0) += count; } }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } // Print the JSON output let counts = word_counts.lock().unwrap(); for (word, count) in counts.iter() { println!("\"{}\": {}", word, count); } } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let content = fs::read_to_string(file_path).expect("Could not read the file"); let num_threads = 4; let chunk_size = content.len() / num_threads; let mut handles = vec![]; // Use an Arc and Mutex to safely share the word counts between threads let word_counts: Arc<Mutex<HashMap<String, usize>>> = Arc::new(Mutex::new(HashMap::new())); for i in 0..num_threads { let start = i * chunk_size; let end = if i == num_threads - 1 { content.len() // Last thread takes the remainder } else { start + chunk_size }; let chunk = content[start..end].to_string(); let word_counts_clone = Arc::clone(&word_counts); let handle = thread::spawn(move || { let mut local_counts = HashMap::new(); for word in chunk.split_whitespace() { // Cleaning words from punctuation let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !cleaned_word.is_empty() { *local_counts.entry(cleaned_word).or_insert(0) += 1; } } let mut counts = word_counts_clone.lock().unwrap(); for (word, count) in local_counts { *counts.entry(word).or_insert(0) += count; } }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } // Print the JSON output let counts = word_counts.lock().unwrap(); for (word, count) in counts.iter() { println!("\"{}\": {}", word, count); } } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | use std::collections::HashMap; use std::fs; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let content = fs::read_to_string(file_path).expect("Could not read the file"); let num_threads = 4; let chunk_size = content.len() / num_threads; let mut handles = vec![]; // Use an Arc and Mutex to safely share the word counts between threads let word_counts: Arc<Mutex<HashMap<String, usize>>> = Arc::new(Mutex::new(HashMap::new())); for i in 0..num_threads { let start = i * chunk_size; let end = if i == num_threads - 1 { content.len() // Last thread takes the remainder } else { start + chunk_size }; let chunk = content[start..end].to_string(); let word_counts_clone = Arc::clone(&word_counts); let handle = thread::spawn(move || { let mut local_counts = HashMap::new(); for word in chunk.split_whitespace() { // Cleaning words from punctuation let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !cleaned_word.is_empty() { *local_counts.entry(cleaned_word).or_insert(0) += 1; } } let mut counts = word_counts_clone.lock().unwrap(); for (word, count) in local_counts { *counts.entry(word).or_insert(0) += count; } }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } // Print the JSON output let counts = word_counts.lock().unwrap(); for (word, count) in counts.iter() { println!("\"{}\": {}", word, count); } } |
"Darkness": 1
"e": 1
"Light": 1
"ness": 1
"Heaven": 1
"despair": 1
"th": 1
"incredulity": 1
"belief": 1
"wisdom": 1
"had": 2
"we": 4
"hope": 1
"us": 2
"worst": 1
"the": 10
"foolish": 1
"were": 2
"epoch": 2
"all": 2
"everything": 1
"going": 2
"was": 10
"best": 1
"it": 10
"spring": 1
"before": 2
"nothing": 1
"direct": 2
"other": 1
"to": 1
"winter": 1
"season": 2
"age": 2
"times": 2
"of": 10
"way": 1
warning: unused imports: `BufRead`, `self`
--> main.rs:4:15
|
4 | use std::io::{self, BufRead};
| ^^^^ ^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
warning: 1 warning emitted
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Certainly! Below is a self-contained Rust program that reads a text file from `/tmp/file.txt`, counts the occurrences of each word, and prints the results in JSON format. The program splits the file into four parts and processes them in parallel using threads.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::collections::HashMap; use std::fs::File; use std::io::{self, BufRead}; use std::path::Path; use std::sync::{Arc, Mutex}; use std::thread; fn main() -> io::Result<()> { let path = "/tmp/file.txt"; let file = File::open(path)?; let reader = io::BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); // Determine the number of lines each thread will process let num_threads = 4; let chunk_size = (total_lines + num_threads - 1) / num_threads; // Shared mutable state for counting words let word_count: Arc<Mutex<HashMap<String, usize>>> = Arc::new(Mutex::new(HashMap::new())); let mut handles = vec![]; for i in 0..num_threads { let word_count_clone = Arc::clone(&word_count); let start = i * chunk_size; let end = std::cmp::min(start + chunk_size, total_lines); let handle = thread::spawn(move || { let mut local_count = HashMap::new(); for line in &lines[start..end] { for word in line.split_whitespace() { let word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); *local_count.entry(word).or_insert(0) += 1; } } // Lock and merge local counts into the shared word count let mut count = word_count_clone.lock().unwrap(); for (word, count_value) in local_count { *count.entry(word).or_insert(0) += count_value; } }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } let count = word_count.lock().unwrap(); // Print the output in JSON format for (word, &count_value) in count.iter() { println!("\"{}\": {}", word, count_value); } Ok(()) } |
1 2 | rustc rust_word_count.rs
./rust_word_count
|
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::collections::HashMap; use std::fs::File; use std::io::{self, BufRead}; use std::path::Path; use std::sync::{Arc, Mutex}; use std::thread; fn main() -> io::Result<()> { let path = "/tmp/file.txt"; let file = File::open(path)?; let reader = io::BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); // Determine the number of lines each thread will process let num_threads = 4; let chunk_size = (total_lines + num_threads - 1) / num_threads; // Shared mutable state for counting words let word_count: Arc<Mutex<HashMap<String, usize>>> = Arc::new(Mutex::new(HashMap::new())); let mut handles = vec![]; for i in 0..num_threads { let word_count_clone = Arc::clone(&word_count); let start = i * chunk_size; let end = std::cmp::min(start + chunk_size, total_lines); let handle = thread::spawn(move || { let mut local_count = HashMap::new(); for line in &lines[start..end] { for word in line.split_whitespace() { let word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); *local_count.entry(word).or_insert(0) += 1; } } // Lock and merge local counts into the shared word count let mut count = word_count_clone.lock().unwrap(); for (word, count_value) in local_count { *count.entry(word).or_insert(0) += count_value; } }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } let count = word_count.lock().unwrap(); // Print the output in JSON format for (word, &count_value) in count.iter() { println!("\"{}\": {}", word, count_value); } Ok(()) } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{self, BufRead}; use std::path::Path; use std::sync::{Arc, Mutex}; use std::thread; fn main() -> io::Result<()> { let path = "/tmp/file.txt"; let file = File::open(path)?; let reader = io::BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); // Determine the number of lines each thread will process let num_threads = 4; let chunk_size = (total_lines + num_threads - 1) / num_threads; // Shared mutable state for counting words let word_count: Arc<Mutex<HashMap<String, usize>>> = Arc::new(Mutex::new(HashMap::new())); let mut handles = vec![]; for i in 0..num_threads { let word_count_clone = Arc::clone(&word_count); let start = i * chunk_size; let end = std::cmp::min(start + chunk_size, total_lines); let handle = thread::spawn(move || { let mut local_count = HashMap::new(); for line in &lines[start..end] { for word in line.split_whitespace() { let word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); *local_count.entry(word).or_insert(0) += 1; } } // Lock and merge local counts into the shared word count let mut count = word_count_clone.lock().unwrap(); for (word, count_value) in local_count { *count.entry(word).or_insert(0) += count_value; } }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } let count = word_count.lock().unwrap(); // Print the output in JSON format for (word, &count_value) in count.iter() { println!("\"{}\": {}", word, count_value); } Ok(()) } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::collections::HashMap; use std::fs::File; use std::io::{self, BufRead}; use std::path::Path; use std::sync::{Arc, Mutex}; use std::thread; fn main() -> io::Result<()> { let path = "/tmp/file.txt"; let file = File::open(path)?; let reader = io::BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); // Determine the number of lines each thread will process let num_threads = 4; let chunk_size = (total_lines + num_threads - 1) / num_threads; // Shared mutable state for counting words let word_count: Arc<Mutex<HashMap<String, usize>>> = Arc::new(Mutex::new(HashMap::new())); let mut handles = vec![]; for i in 0..num_threads { let word_count_clone = Arc::clone(&word_count); let start = i * chunk_size; let end = std::cmp::min(start + chunk_size, total_lines); let handle = thread::spawn(move || { let mut local_count = HashMap::new(); for line in &lines[start..end] { for word in line.split_whitespace() { let word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); *local_count.entry(word).or_insert(0) += 1; } } // Lock and merge local counts into the shared word count let mut count = word_count_clone.lock().unwrap(); for (word, count_value) in local_count { *count.entry(word).or_insert(0) += count_value; } }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } let count = word_count.lock().unwrap(); // Print the output in JSON format for (word, &count_value) in count.iter() { println!("\"{}\": {}", word, count_value); } Ok(()) } |
warning: unused import: `std::path::Path`
--> main.rs:5:5
|
5 | use std::path::Path;
| ^^^^^^^^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error[E0382]: use of moved value: `lines`
--> main.rs:30:36
|
13 | let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect();
| ----- move occurs because `lines` has type `Vec<String>`, which does not implement the `Copy` trait
...
30 | let handle = thread::spawn(move || {
| ^^^^^^^ value moved into closure here, in previous iteration of loop
31 | let mut local_count = HashMap::new();
32 | for line in &lines[start..end] {
| ----- use occurs due to use in closure
error: aborting due to 1 previous error; 1 warning emitted
For more information about this error, try `rustc --explain E0382`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here is a self-contained Rust program that reads a text file from `/tmp/file.txt`, counts the occurrences of each word in parallel using 4 threads, and prints the results in JSON format. This program does not use any external crates, so you can compile it directly with `rustc`.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 | use std::fs::File; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; use std::collections::HashMap; fn main() -> io::Result<()> { let file_path = "/tmp/file.txt"; let file = File::open(file_path)?; let reader = io::BufReader::new(file); // Collect lines into a vector let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); // Create an Arc to share results between threads let results: Arc<Mutex<Vec<HashMap<String, usize>>>> = Arc::new(Mutex::new(vec![HashMap::new(); 4])); let mut handles = vec![]; // Spawn 4 threads for i in 0..4 { let lines = lines.clone(); let results = Arc::clone(&results); let start = (total_lines / 4) * i; let end = if i == 3 { total_lines } else { (total_lines / 4) * (i + 1) }; let handle = thread::spawn(move || { let mut local_count = HashMap::new(); for line in &lines[start..end] { for word in line.split_whitespace() { // Cleaning word from punctuation let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !cleaned_word.is_empty() { *local_count.entry(cleaned_word).or_insert(0) += 1; } } } local_count }); handles.push(handle); } // Wait for all threads to finish and collect results for (i, handle) in handles.into_iter().enumerate() { let local_count = handle.join().unwrap(); let mut results_lock = results.lock().unwrap(); results_lock[i] = local_count; } // Print the results in JSON format let results_lock = results.lock().unwrap(); for result in results_lock.iter() { print!("{"); let mut first = true; for (key, value) in result { if !first { print!(", "); } print!("\"{}\": {}", key, value); first = false; } println!("}"); } Ok(()) } |
1 2 | rustc rust_word_count.rs |
1 2 | ./rust_word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 | use std::fs::File; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; use std::collections::HashMap; fn main() -> io::Result<()> { let file_path = "/tmp/file.txt"; let file = File::open(file_path)?; let reader = io::BufReader::new(file); // Collect lines into a vector let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); // Create an Arc to share results between threads let results: Arc<Mutex<Vec<HashMap<String, usize>>>> = Arc::new(Mutex::new(vec![HashMap::new(); 4])); let mut handles = vec![]; // Spawn 4 threads for i in 0..4 { let lines = lines.clone(); let results = Arc::clone(&results); let start = (total_lines / 4) * i; let end = if i == 3 { total_lines } else { (total_lines / 4) * (i + 1) }; let handle = thread::spawn(move || { let mut local_count = HashMap::new(); for line in &lines[start..end] { for word in line.split_whitespace() { // Cleaning word from punctuation let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !cleaned_word.is_empty() { *local_count.entry(cleaned_word).or_insert(0) += 1; } } } local_count }); handles.push(handle); } // Wait for all threads to finish and collect results for (i, handle) in handles.into_iter().enumerate() { let local_count = handle.join().unwrap(); let mut results_lock = results.lock().unwrap(); results_lock[i] = local_count; } // Print the results in JSON format let results_lock = results.lock().unwrap(); for result in results_lock.iter() { print!("{"); let mut first = true; for (key, value) in result { if !first { print!(", "); } print!("\"{}\": {}", key, value); first = false; } println!("}"); } Ok(()) } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::fs::File; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; use std::collections::HashMap; fn main() -> io::Result<()> { let file_path = "/tmp/file.txt"; let file = File::open(file_path)?; let reader = io::BufReader::new(file); // Collect lines into a vector let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); // Create an Arc to share results between threads let results: Arc<Mutex<Vec<HashMap<String, usize>>>> = Arc::new(Mutex::new(vec![HashMap::new(); 4])); let mut handles = vec![]; // Spawn 4 threads for i in 0..4 { let lines = lines.clone(); let results = Arc::clone(&results); let start = (total_lines / 4) * i; let end = if i == 3 { total_lines } else { (total_lines / 4) * (i + 1) }; let handle = thread::spawn(move || { let mut local_count = HashMap::new(); for line in &lines[start..end] { for word in line.split_whitespace() { // Cleaning word from punctuation let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !cleaned_word.is_empty() { *local_count.entry(cleaned_word).or_insert(0) += 1; } } } local_count }); handles.push(handle); } // Wait for all threads to finish and collect results for (i, handle) in handles.into_iter().enumerate() { let local_count = handle.join().unwrap(); let mut results_lock = results.lock().unwrap(); results_lock[i] = local_count; } // Print the results in JSON format let results_lock = results.lock().unwrap(); for result in results_lock.iter() { print!("{"); let mut first = true; for (key, value) in result { if !first { print!(", "); } print!("\"{}\": {}", key, value); first = false; } println!("}"); } Ok(()) } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 | use std::fs::File; use std::io::{self, BufRead}; use std::sync::{Arc, Mutex}; use std::thread; use std::collections::HashMap; fn main() -> io::Result<()> { let file_path = "/tmp/file.txt"; let file = File::open(file_path)?; let reader = io::BufReader::new(file); // Collect lines into a vector let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); // Create an Arc to share results between threads let results: Arc<Mutex<Vec<HashMap<String, usize>>>> = Arc::new(Mutex::new(vec![HashMap::new(); 4])); let mut handles = vec![]; // Spawn 4 threads for i in 0..4 { let lines = lines.clone(); let results = Arc::clone(&results); let start = (total_lines / 4) * i; let end = if i == 3 { total_lines } else { (total_lines / 4) * (i + 1) }; let handle = thread::spawn(move || { let mut local_count = HashMap::new(); for line in &lines[start..end] { for word in line.split_whitespace() { // Cleaning word from punctuation let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()).to_string(); if !cleaned_word.is_empty() { *local_count.entry(cleaned_word).or_insert(0) += 1; } } } local_count }); handles.push(handle); } // Wait for all threads to finish and collect results for (i, handle) in handles.into_iter().enumerate() { let local_count = handle.join().unwrap(); let mut results_lock = results.lock().unwrap(); results_lock[i] = local_count; } // Print the results in JSON format let results_lock = results.lock().unwrap(); for result in results_lock.iter() { print!("{"); let mut first = true; for (key, value) in result { if !first { print!(", "); } print!("\"{}\": {}", key, value); first = false; } println!("}"); } Ok(()) } |
error: invalid format string: expected `'}'` but string was terminated
--> main.rs:56:18
|
56 | print!("{");
| -^ expected `'}'` in format string
| |
| because of this opening brace
|
= note: if you intended to print `{`, you can escape it using `{{`
error: invalid format string: unmatched `}` found
--> main.rs:65:19
|
65 | println!("}");
| ^ unmatched `}` in format string
|
= note: if you intended to print `}`, you can escape it using `}}`
error: aborting due to 2 previous errors
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that counts the occurrences of each word in a text file located at `/tmp/file.txt`. The program splits the file into four parts, processes each part in parallel, and outputs the word counts in JSON format, one JSON object per line. You can compile it with `rustc yourfile.rs` and run it as specified.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Could not open file"); let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); let chunk_size = (total_lines + 3) / 4; // Divide lines into 4 chunks let handles: Vec<_> = (0..4).map(|i| { let lines_clone = lines.clone(); thread::spawn(move || { let mut word_count = HashMap::new(); let start = i * chunk_size; let end = std::cmp::min(start + chunk_size, total_lines); for line in &lines_clone[start..end] { let words = line.split_whitespace(); for word in words { let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()); *word_count.entry(cleaned_word.to_string()).or_insert(0) += 1; } } word_count }) }).collect(); let mut results = Vec::new(); for handle in handles { let word_count = handle.join().expect("Thread panicked"); results.push(word_count); } for result in results { print_json(&result); } } fn print_json(word_count: &HashMap<String, usize>) { let json_str = word_count.iter() .map(|(key, value)| format!("\"{}\": {}", key, value)) .collect::<Vec<_>>() .join(", "); println!("{{ {} }}", json_str); } |
1 2 | rustc rust_word_count.rs |
1 2 | ./rust_word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Could not open file"); let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); let chunk_size = (total_lines + 3) / 4; // Divide lines into 4 chunks let handles: Vec<_> = (0..4).map(|i| { let lines_clone = lines.clone(); thread::spawn(move || { let mut word_count = HashMap::new(); let start = i * chunk_size; let end = std::cmp::min(start + chunk_size, total_lines); for line in &lines_clone[start..end] { let words = line.split_whitespace(); for word in words { let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()); *word_count.entry(cleaned_word.to_string()).or_insert(0) += 1; } } word_count }) }).collect(); let mut results = Vec::new(); for handle in handles { let word_count = handle.join().expect("Thread panicked"); results.push(word_count); } for result in results { print_json(&result); } } fn print_json(word_count: &HashMap<String, usize>) { let json_str = word_count.iter() .map(|(key, value)| format!("\"{}\": {}", key, value)) .collect::<Vec<_>>() .join(", "); println!("{{ {} }}", json_str); } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Could not open file"); let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); let chunk_size = (total_lines + 3) / 4; // Divide lines into 4 chunks let handles: Vec<_> = (0..4).map(|i| { let lines_clone = lines.clone(); thread::spawn(move || { let mut word_count = HashMap::new(); let start = i * chunk_size; let end = std::cmp::min(start + chunk_size, total_lines); for line in &lines_clone[start..end] { let words = line.split_whitespace(); for word in words { let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()); *word_count.entry(cleaned_word.to_string()).or_insert(0) += 1; } } word_count }) }).collect(); let mut results = Vec::new(); for handle in handles { let word_count = handle.join().expect("Thread panicked"); results.push(word_count); } for result in results { print_json(&result); } } fn print_json(word_count: &HashMap<String, usize>) { let json_str = word_count.iter() .map(|(key, value)| format!("\"{}\": {}", key, value)) .collect::<Vec<_>>() .join(", "); println!("{{ {} }}", json_str); } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Could not open file"); let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().filter_map(Result::ok).collect(); let total_lines = lines.len(); let chunk_size = (total_lines + 3) / 4; // Divide lines into 4 chunks let handles: Vec<_> = (0..4).map(|i| { let lines_clone = lines.clone(); thread::spawn(move || { let mut word_count = HashMap::new(); let start = i * chunk_size; let end = std::cmp::min(start + chunk_size, total_lines); for line in &lines_clone[start..end] { let words = line.split_whitespace(); for word in words { let cleaned_word = word.trim_matches(|c: char| !c.is_alphanumeric()); *word_count.entry(cleaned_word.to_string()).or_insert(0) += 1; } } word_count }) }).collect(); let mut results = Vec::new(); for handle in handles { let word_count = handle.join().expect("Thread panicked"); results.push(word_count); } for result in results { print_json(&result); } } fn print_json(word_count: &HashMap<String, usize>) { let json_str = word_count.iter() .map(|(key, value)| format!("\"{}\": {}", key, value)) .collect::<Vec<_>>() .join(", "); println!("{{ {} }}", json_str); } |
warning: unused imports: `Arc`, `Mutex`
--> main.rs:5:17
|
5 | use std::sync::{Arc, Mutex};
| ^^^ ^^^^^
|
= note: `#[warn(unused_imports)]` on by default
warning: 1 warning emitted
thread '<unnamed>' panicked at main.rs:22:37thread ':
slice index starts at 3 but ends at 1
<unnamed>note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
' panicked at main.rs:22:37:
slice index starts at 2 but ends at 1
thread 'main' panicked at main.rs:35:40:
Thread panicked: Any { .. }
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False