I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let mut threads = Vec::new(); let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let num_lines = reader.lines().count(); let chunk_size = (num_lines + 3) / 4; // Divide the lines into 4 chunks // Create a shared map for each thread to store word counts let shared_map: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); for chunk_start in (0..num_lines).step_by(chunk_size) { let chunk_end = std::cmp::min(chunk_start + chunk_size, num_lines); let shared_map_clone = shared_map.clone(); threads.push(thread::spawn(move || { let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let mut line_counter = 0; let mut word_counts = HashMap::new(); for line in reader.lines().skip(chunk_start).take(chunk_end - chunk_start) { let line = line.expect("Failed to read line"); let words: Vec<_> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word.to_string()).or_insert(0) += 1; } line_counter += 1; } // Merge word counts into the shared map let mut shared_map = shared_map_clone.lock().unwrap(); for (word, count) in word_counts { *shared_map.entry(word).or_insert(0) += count; } })); } // Wait for all threads to finish for thread in threads { thread.join().expect("Failed to join thread"); } // Print the merged word counts as JSON let shared_map = shared_map.lock().unwrap(); for (word, count) in &*shared_map { let json_str = format!("{{\"{}\": {}}}", word, count); println!("{}", json_str); } } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let mut threads = Vec::new(); let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let num_lines = reader.lines().count(); let chunk_size = (num_lines + 3) / 4; // Divide the lines into 4 chunks // Create a shared map for each thread to store word counts let shared_map: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); for chunk_start in (0..num_lines).step_by(chunk_size) { let chunk_end = std::cmp::min(chunk_start + chunk_size, num_lines); let shared_map_clone = shared_map.clone(); threads.push(thread::spawn(move || { let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let mut line_counter = 0; let mut word_counts = HashMap::new(); for line in reader.lines().skip(chunk_start).take(chunk_end - chunk_start) { let line = line.expect("Failed to read line"); let words: Vec<_> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word.to_string()).or_insert(0) += 1; } line_counter += 1; } // Merge word counts into the shared map let mut shared_map = shared_map_clone.lock().unwrap(); for (word, count) in word_counts { *shared_map.entry(word).or_insert(0) += count; } })); } // Wait for all threads to finish for thread in threads { thread.join().expect("Failed to join thread"); } // Print the merged word counts as JSON let shared_map = shared_map.lock().unwrap(); for (word, count) in &*shared_map { let json_str = format!("{{\"{}\": {}}}", word, count); println!("{}", json_str); } } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let mut threads = Vec::new(); let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let num_lines = reader.lines().count(); let chunk_size = (num_lines + 3) / 4; // Divide the lines into 4 chunks // Create a shared map for each thread to store word counts let shared_map: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); for chunk_start in (0..num_lines).step_by(chunk_size) { let chunk_end = std::cmp::min(chunk_start + chunk_size, num_lines); let shared_map_clone = shared_map.clone(); threads.push(thread::spawn(move || { let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let mut line_counter = 0; let mut word_counts = HashMap::new(); for line in reader.lines().skip(chunk_start).take(chunk_end - chunk_start) { let line = line.expect("Failed to read line"); let words: Vec<_> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word.to_string()).or_insert(0) += 1; } line_counter += 1; } // Merge word counts into the shared map let mut shared_map = shared_map_clone.lock().unwrap(); for (word, count) in word_counts { *shared_map.entry(word).or_insert(0) += count; } })); } // Wait for all threads to finish for thread in threads { thread.join().expect("Failed to join thread"); } // Print the merged word counts as JSON let shared_map = shared_map.lock().unwrap(); for (word, count) in &*shared_map { let json_str = format!("{{\"{}\": {}}}", word, count); println!("{}", json_str); } } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let mut threads = Vec::new(); let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let num_lines = reader.lines().count(); let chunk_size = (num_lines + 3) / 4; // Divide the lines into 4 chunks // Create a shared map for each thread to store word counts let shared_map: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); for chunk_start in (0..num_lines).step_by(chunk_size) { let chunk_end = std::cmp::min(chunk_start + chunk_size, num_lines); let shared_map_clone = shared_map.clone(); threads.push(thread::spawn(move || { let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let mut line_counter = 0; let mut word_counts = HashMap::new(); for line in reader.lines().skip(chunk_start).take(chunk_end - chunk_start) { let line = line.expect("Failed to read line"); let words: Vec<_> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word.to_string()).or_insert(0) += 1; } line_counter += 1; } // Merge word counts into the shared map let mut shared_map = shared_map_clone.lock().unwrap(); for (word, count) in word_counts { *shared_map.entry(word).or_insert(0) += count; } })); } // Wait for all threads to finish for thread in threads { thread.join().expect("Failed to join thread"); } // Print the merged word counts as JSON let shared_map = shared_map.lock().unwrap(); for (word, count) in &*shared_map { let json_str = format!("{{\"{}\": {}}}", word, count); println!("{}", json_str); } } |
{"the": 11}
{"winter": 1}
{"was": 10}
{"wisdom,": 1}
{"age": 2}
{"hope,": 1}
{"everything": 1}
{"spring": 1}
{"of": 10}
{"before": 2}
{"we": 4}
{"times,": 2}
{"foolishness,": 1}
{"nothing": 1}
{"Darkness,": 1}
{"direct": 2}
{"had": 2}
{"going": 2}
{"Heaven,": 1}
{"worst": 1}
{"way": 1}
{"to": 1}
{"season": 2}
{"it": 10}
{"best": 1}
{"all": 2}
{"Light,": 1}
{"other": 1}
{"incredulity,": 1}
{"despair,": 1}
{"were": 2}
{"epoch": 2}
{"belief,": 1}
{"us,": 2}
warning: unused import: `std::env`
--> main.rs:2:5
|
2 | use std::env;
| ^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
warning: variable `line_counter` is assigned to, but never used
--> main.rs:30:21
|
30 | let mut line_counter = 0;
| ^^^^^^^^^^^^
|
= note: consider using `_line_counter` instead
= note: `#[warn(unused_variables)]` on by default
warning: 2 warnings emitted
Resulting in output: [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: True
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().flatten().collect(); let num_threads = 4; let chunk_size = lines.len() / num_threads; let mut handles = Vec::new(); let shared_map: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); for i in 0..num_threads { let shared_map_clone = Arc::clone(&shared_map); let start = i * chunk_size; let end = if i == num_threads - 1 { lines.len() } else { start + chunk_size }; let handle = thread::spawn(move || { let mut local_map: HashMap<String, u32> = HashMap::new(); for line in &lines[start..end] { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *local_map.entry(word.to_string()).or_insert(0) += 1; } } let mut shared_map = shared_map_clone.lock().unwrap(); for (word, count) in local_map { *shared_map.entry(word).or_insert(0) += count; } }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } let shared_map = shared_map.lock().unwrap(); for (word, count) in shared_map.iter() { let json_str = format!("{{\"{}\": {}}}", word, count); println!("{}", json_str); } } |
1 | rustc word_count.rs |
1 | ./word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().flatten().collect(); let num_threads = 4; let chunk_size = lines.len() / num_threads; let mut handles = Vec::new(); let shared_map: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); for i in 0..num_threads { let shared_map_clone = Arc::clone(&shared_map); let start = i * chunk_size; let end = if i == num_threads - 1 { lines.len() } else { start + chunk_size }; let handle = thread::spawn(move || { let mut local_map: HashMap<String, u32> = HashMap::new(); for line in &lines[start..end] { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *local_map.entry(word.to_string()).or_insert(0) += 1; } } let mut shared_map = shared_map_clone.lock().unwrap(); for (word, count) in local_map { *shared_map.entry(word).or_insert(0) += count; } }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } let shared_map = shared_map.lock().unwrap(); for (word, count) in shared_map.iter() { let json_str = format!("{{\"{}\": {}}}", word, count); println!("{}", json_str); } } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().flatten().collect(); let num_threads = 4; let chunk_size = lines.len() / num_threads; let mut handles = Vec::new(); let shared_map: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); for i in 0..num_threads { let shared_map_clone = Arc::clone(&shared_map); let start = i * chunk_size; let end = if i == num_threads - 1 { lines.len() } else { start + chunk_size }; let handle = thread::spawn(move || { let mut local_map: HashMap<String, u32> = HashMap::new(); for line in &lines[start..end] { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *local_map.entry(word.to_string()).or_insert(0) += 1; } } let mut shared_map = shared_map_clone.lock().unwrap(); for (word, count) in local_map { *shared_map.entry(word).or_insert(0) += count; } }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } let shared_map = shared_map.lock().unwrap(); for (word, count) in shared_map.iter() { let json_str = format!("{{\"{}\": {}}}", word, count); println!("{}", json_str); } } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().flatten().collect(); let num_threads = 4; let chunk_size = lines.len() / num_threads; let mut handles = Vec::new(); let shared_map: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); for i in 0..num_threads { let shared_map_clone = Arc::clone(&shared_map); let start = i * chunk_size; let end = if i == num_threads - 1 { lines.len() } else { start + chunk_size }; let handle = thread::spawn(move || { let mut local_map: HashMap<String, u32> = HashMap::new(); for line in &lines[start..end] { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *local_map.entry(word.to_string()).or_insert(0) += 1; } } let mut shared_map = shared_map_clone.lock().unwrap(); for (word, count) in local_map { *shared_map.entry(word).or_insert(0) += count; } }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } let shared_map = shared_map.lock().unwrap(); for (word, count) in shared_map.iter() { let json_str = format!("{{\"{}\": {}}}", word, count); println!("{}", json_str); } } |
warning: unused import: `std::env`
--> main.rs:2:5
|
2 | use std::env;
| ^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error[E0382]: borrow of moved value: `lines`
--> main.rs:25:13
|
14 | let lines: Vec<String> = reader.lines().flatten().collect();
| ----- move occurs because `lines` has type `Vec<String>`, which does not implement the `Copy` trait
...
25 | lines.len()
| ^^^^^ value borrowed here after move
...
30 | let handle = thread::spawn(move || {
| ------- value moved into closure here, in previous iteration of loop
error: aborting due to 1 previous error; 1 warning emitted
For more information about this error, try `rustc --explain E0382`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 | use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let args: Vec<String> = env::args().collect(); let file_path = if args.len() > 1 { &args[1] } else { "/tmp/file.txt" }; let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let mut handles = vec![]; let line_counts = Arc::new(Mutex::new(Vec::new())); for chunk in reader.lines().flatten().chunks(4) { let line_counts = Arc::clone(&line_counts); let handle = thread::spawn(move || { let mut word_counts: HashMap<String, usize> = HashMap::new(); for line in chunk { for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } line_counts.lock().unwrap().push(word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } for word_counts in line_counts.lock().unwrap().iter() { let mut json_output = String::from("{"); let mut first = true; for (word, count) in word_counts { if !first { json_output.push_str(", "); } first = false; json_output.push_str(&format!("\"{}\": {}", word, count)); } json_output.push_str("}"); println!("{}", json_output); } } |
1 2 | rustc word_count.rs ./word_count |
1 | ./word_count /path/to/your/file.txt |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 | use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let args: Vec<String> = env::args().collect(); let file_path = if args.len() > 1 { &args[1] } else { "/tmp/file.txt" }; let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let mut handles = vec![]; let line_counts = Arc::new(Mutex::new(Vec::new())); for chunk in reader.lines().flatten().chunks(4) { let line_counts = Arc::clone(&line_counts); let handle = thread::spawn(move || { let mut word_counts: HashMap<String, usize> = HashMap::new(); for line in chunk { for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } line_counts.lock().unwrap().push(word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } for word_counts in line_counts.lock().unwrap().iter() { let mut json_output = String::from("{"); let mut first = true; for (word, count) in word_counts { if !first { json_output.push_str(", "); } first = false; json_output.push_str(&format!("\"{}\": {}", word, count)); } json_output.push_str("}"); println!("{}", json_output); } } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let args: Vec<String> = env::args().collect(); let file_path = if args.len() > 1 { &args[1] } else { "/tmp/file.txt" }; let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let mut handles = vec![]; let line_counts = Arc::new(Mutex::new(Vec::new())); for chunk in reader.lines().flatten().chunks(4) { let line_counts = Arc::clone(&line_counts); let handle = thread::spawn(move || { let mut word_counts: HashMap<String, usize> = HashMap::new(); for line in chunk { for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } line_counts.lock().unwrap().push(word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } for word_counts in line_counts.lock().unwrap().iter() { let mut json_output = String::from("{"); let mut first = true; for (word, count) in word_counts { if !first { json_output.push_str(", "); } first = false; json_output.push_str(&format!("\"{}\": {}", word, count)); } json_output.push_str("}"); println!("{}", json_output); } } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 | use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let args: Vec<String> = env::args().collect(); let file_path = if args.len() > 1 { &args[1] } else { "/tmp/file.txt" }; let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); let mut handles = vec![]; let line_counts = Arc::new(Mutex::new(Vec::new())); for chunk in reader.lines().flatten().chunks(4) { let line_counts = Arc::clone(&line_counts); let handle = thread::spawn(move || { let mut word_counts: HashMap<String, usize> = HashMap::new(); for line in chunk { for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } line_counts.lock().unwrap().push(word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } for word_counts in line_counts.lock().unwrap().iter() { let mut json_output = String::from("{"); let mut first = true; for (word, count) in word_counts { if !first { json_output.push_str(", "); } first = false; json_output.push_str(&format!("\"{}\": {}", word, count)); } json_output.push_str("}"); println!("{}", json_output); } } |
error[E0599]: no method named `chunks` found for struct `Flatten` in the current scope
--> main.rs:23:43
|
23 | for chunk in reader.lines().flatten().chunks(4) {
| ^^^^^^ method not found in `Flatten<Lines<BufReader<File>>>`
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0599`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that prints a JSON output of how often each word appears in the file `/tmp/file.txt` using parallel processing with 4 cores, each handling roughly a quarter of the file. The program writes the JSON output from scratch without importing `serde_json`:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::env; use std::fs::File; use std::io::{BufReader, BufRead}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Could not open file"); let reader = BufReader::new(file); let mut handles = Vec::new(); let line_counts: Arc<Mutex<Vec<HashMap<String, u32>>>> = Arc::new(Mutex::new(vec![HashMap::new(); 4])); let num_lines = reader.lines().count(); let lines_per_thread = num_lines / 4; for thread_id in 0..4 { let line_counts = Arc::clone(&line_counts); let handle = thread::spawn(move || { let mut line_iter = reader.lines().skip(thread_id * lines_per_thread).take(lines_per_thread); if thread_id == 3 { line_iter = reader.lines().skip(thread_id * lines_per_thread); } let mut word_counts = HashMap::new(); for line in line_iter { let line = line.expect("Could not read line"); for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } line_counts.lock().unwrap()[thread_id] = word_counts; }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } for word_counts in line_counts.lock().unwrap().iter() { let mut json_output = String::new(); json_output.push_str("{"); let mut first = true; for (word, count) in word_counts.iter() { if !first { json_output.push_str(","); } first = false; json_output.push_str(&format!("\"{}\":{}", word, count)); } json_output.push_str("}"); println!("{}", json_output); } } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::env; use std::fs::File; use std::io::{BufReader, BufRead}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Could not open file"); let reader = BufReader::new(file); let mut handles = Vec::new(); let line_counts: Arc<Mutex<Vec<HashMap<String, u32>>>> = Arc::new(Mutex::new(vec![HashMap::new(); 4])); let num_lines = reader.lines().count(); let lines_per_thread = num_lines / 4; for thread_id in 0..4 { let line_counts = Arc::clone(&line_counts); let handle = thread::spawn(move || { let mut line_iter = reader.lines().skip(thread_id * lines_per_thread).take(lines_per_thread); if thread_id == 3 { line_iter = reader.lines().skip(thread_id * lines_per_thread); } let mut word_counts = HashMap::new(); for line in line_iter { let line = line.expect("Could not read line"); for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } line_counts.lock().unwrap()[thread_id] = word_counts; }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } for word_counts in line_counts.lock().unwrap().iter() { let mut json_output = String::new(); json_output.push_str("{"); let mut first = true; for (word, count) in word_counts.iter() { if !first { json_output.push_str(","); } first = false; json_output.push_str(&format!("\"{}\":{}", word, count)); } json_output.push_str("}"); println!("{}", json_output); } } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::env; use std::fs::File; use std::io::{BufReader, BufRead}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Could not open file"); let reader = BufReader::new(file); let mut handles = Vec::new(); let line_counts: Arc<Mutex<Vec<HashMap<String, u32>>>> = Arc::new(Mutex::new(vec![HashMap::new(); 4])); let num_lines = reader.lines().count(); let lines_per_thread = num_lines / 4; for thread_id in 0..4 { let line_counts = Arc::clone(&line_counts); let handle = thread::spawn(move || { let mut line_iter = reader.lines().skip(thread_id * lines_per_thread).take(lines_per_thread); if thread_id == 3 { line_iter = reader.lines().skip(thread_id * lines_per_thread); } let mut word_counts = HashMap::new(); for line in line_iter { let line = line.expect("Could not read line"); for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } line_counts.lock().unwrap()[thread_id] = word_counts; }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } for word_counts in line_counts.lock().unwrap().iter() { let mut json_output = String::new(); json_output.push_str("{"); let mut first = true; for (word, count) in word_counts.iter() { if !first { json_output.push_str(","); } first = false; json_output.push_str(&format!("\"{}\":{}", word, count)); } json_output.push_str("}"); println!("{}", json_output); } } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::env; use std::fs::File; use std::io::{BufReader, BufRead}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let file = File::open(file_path).expect("Could not open file"); let reader = BufReader::new(file); let mut handles = Vec::new(); let line_counts: Arc<Mutex<Vec<HashMap<String, u32>>>> = Arc::new(Mutex::new(vec![HashMap::new(); 4])); let num_lines = reader.lines().count(); let lines_per_thread = num_lines / 4; for thread_id in 0..4 { let line_counts = Arc::clone(&line_counts); let handle = thread::spawn(move || { let mut line_iter = reader.lines().skip(thread_id * lines_per_thread).take(lines_per_thread); if thread_id == 3 { line_iter = reader.lines().skip(thread_id * lines_per_thread); } let mut word_counts = HashMap::new(); for line in line_iter { let line = line.expect("Could not read line"); for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } line_counts.lock().unwrap()[thread_id] = word_counts; }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } for word_counts in line_counts.lock().unwrap().iter() { let mut json_output = String::new(); json_output.push_str("{"); let mut first = true; for (word, count) in word_counts.iter() { if !first { json_output.push_str(","); } first = false; json_output.push_str(&format!("\"{}\":{}", word, count)); } json_output.push_str("}"); println!("{}", json_output); } } |
warning: unused import: `std::env`
--> main.rs:2:5
|
2 | use std::env;
| ^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error[E0308]: mismatched types
--> main.rs:25:29
|
23 | let mut line_iter = reader.lines().skip(thread_id * lines_per_thread).take(lines_per_thread);
| ------------------------------------------------------------------------ expected due to this value
24 | if thread_id == 3 {
25 | line_iter = reader.lines().skip(thread_id * lines_per_thread);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `Take<Skip<Lines<BufReader<File>>>>`, found `Skip<Lines<BufReader<File>>>`
|
= note: expected struct `std::iter::Take<Skip<_>>`
found struct `Skip<_>`
error: aborting due to 1 previous error; 1 warning emitted
For more information about this error, try `rustc --explain E0308`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 | use std::env; use std::fs::File; use std::io::{BufReader, BufRead}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).unwrap(); let reader = BufReader::new(file); let shared_word_counts: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); let mut threads = Vec::new(); let lines_per_thread = reader.lines().count() / num_threads; for i in 0..num_threads { let shared_word_counts = Arc::clone(&shared_word_counts); let start = i * lines_per_thread; let end = if i == num_threads - 1 { reader.lines().count() } else { start + lines_per_thread }; threads.push(thread::spawn(move || { let mut local_word_counts = HashMap::new(); for (line_num, line) in reader.lines().skip(start).take(end - start).enumerate() { let line = line.unwrap(); let words = line.split_whitespace(); for word in words { let count = local_word_counts.entry(word.to_string()).or_insert(0); *count += 1; } } let mut shared_word_counts = shared_word_counts.lock().unwrap(); for (word, count) in local_word_counts.iter() { let entry = shared_word_counts.entry(word.clone()).or_insert(0); *entry += count; } let json_output = format!("{{\n{}\n}}", shared_word_counts.iter() .map(|(k, v)| format!("\"{}\": {}", k, v)) .collect::<Vec<_>>() .join(",\n")); println!("{}", json_output); })); } for thread in threads { thread.join().unwrap(); } } |
1 2 | rustc word_count.rs ./word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 | use std::env; use std::fs::File; use std::io::{BufReader, BufRead}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).unwrap(); let reader = BufReader::new(file); let shared_word_counts: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); let mut threads = Vec::new(); let lines_per_thread = reader.lines().count() / num_threads; for i in 0..num_threads { let shared_word_counts = Arc::clone(&shared_word_counts); let start = i * lines_per_thread; let end = if i == num_threads - 1 { reader.lines().count() } else { start + lines_per_thread }; threads.push(thread::spawn(move || { let mut local_word_counts = HashMap::new(); for (line_num, line) in reader.lines().skip(start).take(end - start).enumerate() { let line = line.unwrap(); let words = line.split_whitespace(); for word in words { let count = local_word_counts.entry(word.to_string()).or_insert(0); *count += 1; } } let mut shared_word_counts = shared_word_counts.lock().unwrap(); for (word, count) in local_word_counts.iter() { let entry = shared_word_counts.entry(word.clone()).or_insert(0); *entry += count; } let json_output = format!("{{\n{}\n}}", shared_word_counts.iter() .map(|(k, v)| format!("\"{}\": {}", k, v)) .collect::<Vec<_>>() .join(",\n")); println!("{}", json_output); })); } for thread in threads { thread.join().unwrap(); } } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::env; use std::fs::File; use std::io::{BufReader, BufRead}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).unwrap(); let reader = BufReader::new(file); let shared_word_counts: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); let mut threads = Vec::new(); let lines_per_thread = reader.lines().count() / num_threads; for i in 0..num_threads { let shared_word_counts = Arc::clone(&shared_word_counts); let start = i * lines_per_thread; let end = if i == num_threads - 1 { reader.lines().count() } else { start + lines_per_thread }; threads.push(thread::spawn(move || { let mut local_word_counts = HashMap::new(); for (line_num, line) in reader.lines().skip(start).take(end - start).enumerate() { let line = line.unwrap(); let words = line.split_whitespace(); for word in words { let count = local_word_counts.entry(word.to_string()).or_insert(0); *count += 1; } } let mut shared_word_counts = shared_word_counts.lock().unwrap(); for (word, count) in local_word_counts.iter() { let entry = shared_word_counts.entry(word.clone()).or_insert(0); *entry += count; } let json_output = format!("{{\n{}\n}}", shared_word_counts.iter() .map(|(k, v)| format!("\"{}\": {}", k, v)) .collect::<Vec<_>>() .join(",\n")); println!("{}", json_output); })); } for thread in threads { thread.join().unwrap(); } } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 | use std::env; use std::fs::File; use std::io::{BufReader, BufRead}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).unwrap(); let reader = BufReader::new(file); let shared_word_counts: Arc<Mutex<HashMap<String, u32>>> = Arc::new(Mutex::new(HashMap::new())); let mut threads = Vec::new(); let lines_per_thread = reader.lines().count() / num_threads; for i in 0..num_threads { let shared_word_counts = Arc::clone(&shared_word_counts); let start = i * lines_per_thread; let end = if i == num_threads - 1 { reader.lines().count() } else { start + lines_per_thread }; threads.push(thread::spawn(move || { let mut local_word_counts = HashMap::new(); for (line_num, line) in reader.lines().skip(start).take(end - start).enumerate() { let line = line.unwrap(); let words = line.split_whitespace(); for word in words { let count = local_word_counts.entry(word.to_string()).or_insert(0); *count += 1; } } let mut shared_word_counts = shared_word_counts.lock().unwrap(); for (word, count) in local_word_counts.iter() { let entry = shared_word_counts.entry(word.clone()).or_insert(0); *entry += count; } let json_output = format!("{{\n{}\n}}", shared_word_counts.iter() .map(|(k, v)| format!("\"{}\": {}", k, v)) .collect::<Vec<_>>() .join(",\n")); println!("{}", json_output); })); } for thread in threads { thread.join().unwrap(); } } |
warning: unused import: `std::env`
--> main.rs:2:5
|
2 | use std::env;
| ^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
warning: unused variable: `line_num`
--> main.rs:33:18
|
33 | for (line_num, line) in reader.lines().skip(start).take(end - start).enumerate() {
| ^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_line_num`
|
= note: `#[warn(unused_variables)]` on by default
error[E0382]: use of moved value: `reader`
--> main.rs:25:13
|
14 | let reader = BufReader::new(file);
| ------ move occurs because `reader` has type `BufReader<File>`, which does not implement the `Copy` trait
...
19 | let lines_per_thread = reader.lines().count() / num_threads;
| ------- `reader` moved due to this method call
...
25 | reader.lines().count()
| ^^^^^^ value used here after move
|
note: `lines` takes ownership of the receiver `self`, which moves `reader`
--> /rustc/07dca489ac2d933c78d3c5158e3f43beefeb02ce/library/std/src/io/mod.rs:2521:14
error[E0382]: use of moved value: `reader`
--> main.rs:30:36
|
14 | let reader = BufReader::new(file);
| ------ move occurs because `reader` has type `BufReader<File>`, which does not implement the `Copy` trait
...
19 | let lines_per_thread = reader.lines().count() / num_threads;
| ------- `reader` moved due to this method call
20 |
21 | for i in 0..num_threads {
| ----------------------- inside of this loop
...
25 | reader.lines().count()
| ------- `reader` moved due to this method call
...
30 | threads.push(thread::spawn(move || {
| ^^^^^^^ value used here after move
...
33 | for (line_num, line) in reader.lines().skip(start).take(end - start).enumerate() {
| ------ use occurs due to use in closure
error: aborting due to 2 previous errors; 2 warnings emitted
For more information about this error, try `rustc --explain E0382`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False