I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let word_counts = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let word_counts = word_counts.clone(); threads.push(thread::spawn(move || { let mut counts = HashMap::new(); let reader = BufReader::new(file); for line in reader.lines().skip(start as usize).take((end - start) as usize) { let line = line.expect("Failed to read line"); for word in line.split_whitespace() { *counts.entry(word.to_string()).or_insert(0) += 1; } } word_counts.lock().unwrap().push(counts); })); } for thread in threads { thread.join().expect("Thread panicked"); } for counts in word_counts.lock().unwrap().iter() { print_json(&counts); } } fn print_json(counts: &HashMap<String, u32>) { print!("{{"); let mut first = true; for (word, count) in counts { if !first { print!(", "); } print!("\"{}\": {}", word, count); first = false; } println!("}}"); } |
1 2 | rustc rust_word_count.rs ./rust_word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let word_counts = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let word_counts = word_counts.clone(); threads.push(thread::spawn(move || { let mut counts = HashMap::new(); let reader = BufReader::new(file); for line in reader.lines().skip(start as usize).take((end - start) as usize) { let line = line.expect("Failed to read line"); for word in line.split_whitespace() { *counts.entry(word.to_string()).or_insert(0) += 1; } } word_counts.lock().unwrap().push(counts); })); } for thread in threads { thread.join().expect("Thread panicked"); } for counts in word_counts.lock().unwrap().iter() { print_json(&counts); } } fn print_json(counts: &HashMap<String, u32>) { print!("{{"); let mut first = true; for (word, count) in counts { if !first { print!(", "); } print!("\"{}\": {}", word, count); first = false; } println!("}}"); } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let word_counts = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let word_counts = word_counts.clone(); threads.push(thread::spawn(move || { let mut counts = HashMap::new(); let reader = BufReader::new(file); for line in reader.lines().skip(start as usize).take((end - start) as usize) { let line = line.expect("Failed to read line"); for word in line.split_whitespace() { *counts.entry(word.to_string()).or_insert(0) += 1; } } word_counts.lock().unwrap().push(counts); })); } for thread in threads { thread.join().expect("Thread panicked"); } for counts in word_counts.lock().unwrap().iter() { print_json(&counts); } } fn print_json(counts: &HashMap<String, u32>) { print!("{{"); let mut first = true; for (word, count) in counts { if !first { print!(", "); } print!("\"{}\": {}", word, count); first = false; } println!("}}"); } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let word_counts = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let word_counts = word_counts.clone(); threads.push(thread::spawn(move || { let mut counts = HashMap::new(); let reader = BufReader::new(file); for line in reader.lines().skip(start as usize).take((end - start) as usize) { let line = line.expect("Failed to read line"); for word in line.split_whitespace() { *counts.entry(word.to_string()).or_insert(0) += 1; } } word_counts.lock().unwrap().push(counts); })); } for thread in threads { thread.join().expect("Thread panicked"); } for counts in word_counts.lock().unwrap().iter() { print_json(&counts); } } fn print_json(counts: &HashMap<String, u32>) { print!("{{"); let mut first = true; for (word, count) in counts { if !first { print!(", "); } print!("\"{}\": {}", word, count); first = false; } println!("}}"); } |
{}
{}
{"direct": 2, "way": 1, "best": 1, "age": 2, "other": 1, "worst": 1, "hope,": 1, "incredulity,": 1, "Darkness,": 1, "were": 2, "we": 4, "had": 2, "to": 1, "us,": 2, "everything": 1, "it": 10, "times,": 2, "despair,": 1, "was": 10, "epoch": 2, "season": 2, "going": 2, "wisdom,": 1, "all": 2, "winter": 1, "of": 10, "nothing": 1, "the": 11, "foolishness,": 1, "belief,": 1, "Heaven,": 1, "before": 2, "spring": 1, "Light,": 1}
{}
Resulting in output: [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: True
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let shared_map = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start_pos = i as u64 * chunk_size; let end_pos = std::cmp::min((i + 1) as u64 * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let shared_map = Arc::clone(&shared_map); threads.push(thread::spawn(move || { let mut reader = BufReader::new(file); reader.seek(std::io::SeekFrom::Start(start_pos)).expect("Failed to seek"); let mut word_count = HashMap::new(); let mut buffer = String::new(); while reader.read_line(&mut buffer).expect("Failed to read line") > 0 { if reader.stream_position().expect("Failed to get stream position") >= end_pos { break; } let words: Vec<&str> = buffer.split_whitespace().collect(); for word in words { *word_count.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } let mut shared_map = shared_map.lock().expect("Failed to acquire lock"); shared_map.push(word_count); })); } for thread in threads { thread.join().expect("Failed to join thread"); } let shared_map = shared_map.lock().expect("Failed to acquire lock"); for word_count in shared_map.iter() { println!("{}", to_json(word_count)); } } fn to_json(map: &HashMap<String, u32>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push(','); } first = false; json.push('"'); json.push_str(key); json.push_str("\":"); json.push_str(&value.to_string()); } json.push('}'); json } |
1 | rustc rust_word_count.rs && ./rust_word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let shared_map = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start_pos = i as u64 * chunk_size; let end_pos = std::cmp::min((i + 1) as u64 * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let shared_map = Arc::clone(&shared_map); threads.push(thread::spawn(move || { let mut reader = BufReader::new(file); reader.seek(std::io::SeekFrom::Start(start_pos)).expect("Failed to seek"); let mut word_count = HashMap::new(); let mut buffer = String::new(); while reader.read_line(&mut buffer).expect("Failed to read line") > 0 { if reader.stream_position().expect("Failed to get stream position") >= end_pos { break; } let words: Vec<&str> = buffer.split_whitespace().collect(); for word in words { *word_count.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } let mut shared_map = shared_map.lock().expect("Failed to acquire lock"); shared_map.push(word_count); })); } for thread in threads { thread.join().expect("Failed to join thread"); } let shared_map = shared_map.lock().expect("Failed to acquire lock"); for word_count in shared_map.iter() { println!("{}", to_json(word_count)); } } fn to_json(map: &HashMap<String, u32>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push(','); } first = false; json.push('"'); json.push_str(key); json.push_str("\":"); json.push_str(&value.to_string()); } json.push('}'); json } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let shared_map = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start_pos = i as u64 * chunk_size; let end_pos = std::cmp::min((i + 1) as u64 * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let shared_map = Arc::clone(&shared_map); threads.push(thread::spawn(move || { let mut reader = BufReader::new(file); reader.seek(std::io::SeekFrom::Start(start_pos)).expect("Failed to seek"); let mut word_count = HashMap::new(); let mut buffer = String::new(); while reader.read_line(&mut buffer).expect("Failed to read line") > 0 { if reader.stream_position().expect("Failed to get stream position") >= end_pos { break; } let words: Vec<&str> = buffer.split_whitespace().collect(); for word in words { *word_count.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } let mut shared_map = shared_map.lock().expect("Failed to acquire lock"); shared_map.push(word_count); })); } for thread in threads { thread.join().expect("Failed to join thread"); } let shared_map = shared_map.lock().expect("Failed to acquire lock"); for word_count in shared_map.iter() { println!("{}", to_json(word_count)); } } fn to_json(map: &HashMap<String, u32>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push(','); } first = false; json.push('"'); json.push_str(key); json.push_str("\":"); json.push_str(&value.to_string()); } json.push('}'); json } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let shared_map = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start_pos = i as u64 * chunk_size; let end_pos = std::cmp::min((i + 1) as u64 * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let shared_map = Arc::clone(&shared_map); threads.push(thread::spawn(move || { let mut reader = BufReader::new(file); reader.seek(std::io::SeekFrom::Start(start_pos)).expect("Failed to seek"); let mut word_count = HashMap::new(); let mut buffer = String::new(); while reader.read_line(&mut buffer).expect("Failed to read line") > 0 { if reader.stream_position().expect("Failed to get stream position") >= end_pos { break; } let words: Vec<&str> = buffer.split_whitespace().collect(); for word in words { *word_count.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } let mut shared_map = shared_map.lock().expect("Failed to acquire lock"); shared_map.push(word_count); })); } for thread in threads { thread.join().expect("Failed to join thread"); } let shared_map = shared_map.lock().expect("Failed to acquire lock"); for word_count in shared_map.iter() { println!("{}", to_json(word_count)); } } fn to_json(map: &HashMap<String, u32>) -> String { let mut json = String::from("{"); let mut first = true; for (key, value) in map { if !first { json.push(','); } first = false; json.push('"'); json.push_str(key); json.push_str("\":"); json.push_str(&value.to_string()); } json.push('}'); json } |
error[E0599]: no method named `seek` found for struct `BufReader` in the current scope
--> main.rs:28:20
|
28 | reader.seek(std::io::SeekFrom::Start(start_pos)).expect("Failed to seek");
| ^^^^ method not found in `BufReader<File>`
--> /rustc/07dca489ac2d933c78d3c5158e3f43beefeb02ce/library/std/src/io/mod.rs:1897:8
|
= note: the method is available for `BufReader<File>` here
|
= help: items from traits can only be used if the trait is in scope
help: the following trait is implemented but not in scope; perhaps add a `use` for it:
|
2 + use std::io::Seek;
|
error[E0599]: no method named `stream_position` found for struct `BufReader` in the current scope
--> main.rs:34:27
|
34 | if reader.stream_position().expect("Failed to get stream position") >= end_pos {
| ^^^^^^^^^^^^^^^ method not found in `BufReader<File>`
--> /rustc/07dca489ac2d933c78d3c5158e3f43beefeb02ce/library/std/src/io/mod.rs:2005:8
|
= note: the method is available for `BufReader<File>` here
|
= help: items from traits can only be used if the trait is in scope
help: the following trait is implemented but not in scope; perhaps add a `use` for it:
|
2 + use std::io::Seek;
|
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0599`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that reads a text file located at "/tmp/file.txt", counts the frequency of each word using parallel processing on 4 cores, and prints 4 different JSON dictionaries (one per line) to stdout:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Unable to open file"); let file_size = file.metadata().expect("Unable to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let file_reader = BufReader::new(file); let lines: Vec<String> = file_reader.lines().map(|line| line.expect("Unable to read line")).collect(); let shared_map = Arc::new(Mutex::new(Vec::new())); let mut handles = vec![]; for i in 0..num_threads { let start_index = i * chunk_size as usize; let end_index = std::cmp::min((i + 1) * chunk_size as usize, lines.len()); let lines_chunk = lines[start_index..end_index].to_vec(); let shared_map_clone = Arc::clone(&shared_map); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); for line in lines_chunk { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word).or_insert(0) += 1; } } shared_map_clone.lock().unwrap().push(word_counts); }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } let results = shared_map.lock().unwrap(); for word_counts in results.iter() { let json_output = format_json_output(word_counts); println!("{}", json_output); } } fn format_json_output(word_counts: &HashMap<&str, u32>) -> String { let mut json_output = String::from("{"); let mut iter = word_counts.iter(); if let Some((word, count)) = iter.next() { json_output.push_str(&format!("\"{}\": {}", word, count)); } for (word, count) in iter { json_output.push_str(&format!(", \"{}\": {}", word, count)); } json_output.push('}'); json_output } |
1 2 | rustc rust_word_count.rs ./rust_word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Unable to open file"); let file_size = file.metadata().expect("Unable to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let file_reader = BufReader::new(file); let lines: Vec<String> = file_reader.lines().map(|line| line.expect("Unable to read line")).collect(); let shared_map = Arc::new(Mutex::new(Vec::new())); let mut handles = vec![]; for i in 0..num_threads { let start_index = i * chunk_size as usize; let end_index = std::cmp::min((i + 1) * chunk_size as usize, lines.len()); let lines_chunk = lines[start_index..end_index].to_vec(); let shared_map_clone = Arc::clone(&shared_map); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); for line in lines_chunk { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word).or_insert(0) += 1; } } shared_map_clone.lock().unwrap().push(word_counts); }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } let results = shared_map.lock().unwrap(); for word_counts in results.iter() { let json_output = format_json_output(word_counts); println!("{}", json_output); } } fn format_json_output(word_counts: &HashMap<&str, u32>) -> String { let mut json_output = String::from("{"); let mut iter = word_counts.iter(); if let Some((word, count)) = iter.next() { json_output.push_str(&format!("\"{}\": {}", word, count)); } for (word, count) in iter { json_output.push_str(&format!(", \"{}\": {}", word, count)); } json_output.push('}'); json_output } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Unable to open file"); let file_size = file.metadata().expect("Unable to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let file_reader = BufReader::new(file); let lines: Vec<String> = file_reader.lines().map(|line| line.expect("Unable to read line")).collect(); let shared_map = Arc::new(Mutex::new(Vec::new())); let mut handles = vec![]; for i in 0..num_threads { let start_index = i * chunk_size as usize; let end_index = std::cmp::min((i + 1) * chunk_size as usize, lines.len()); let lines_chunk = lines[start_index..end_index].to_vec(); let shared_map_clone = Arc::clone(&shared_map); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); for line in lines_chunk { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word).or_insert(0) += 1; } } shared_map_clone.lock().unwrap().push(word_counts); }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } let results = shared_map.lock().unwrap(); for word_counts in results.iter() { let json_output = format_json_output(word_counts); println!("{}", json_output); } } fn format_json_output(word_counts: &HashMap<&str, u32>) -> String { let mut json_output = String::from("{"); let mut iter = word_counts.iter(); if let Some((word, count)) = iter.next() { json_output.push_str(&format!("\"{}\": {}", word, count)); } for (word, count) in iter { json_output.push_str(&format!(", \"{}\": {}", word, count)); } json_output.push('}'); json_output } Answer either 'The student passes' or 'The student fails'.
The student passes
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Unable to open file"); let file_size = file.metadata().expect("Unable to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let file_reader = BufReader::new(file); let lines: Vec<String> = file_reader.lines().map(|line| line.expect("Unable to read line")).collect(); let shared_map = Arc::new(Mutex::new(Vec::new())); let mut handles = vec![]; for i in 0..num_threads { let start_index = i * chunk_size as usize; let end_index = std::cmp::min((i + 1) * chunk_size as usize, lines.len()); let lines_chunk = lines[start_index..end_index].to_vec(); let shared_map_clone = Arc::clone(&shared_map); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); for line in lines_chunk { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word).or_insert(0) += 1; } } shared_map_clone.lock().unwrap().push(word_counts); }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } let results = shared_map.lock().unwrap(); for word_counts in results.iter() { let json_output = format_json_output(word_counts); println!("{}", json_output); } } fn format_json_output(word_counts: &HashMap<&str, u32>) -> String { let mut json_output = String::from("{"); let mut iter = word_counts.iter(); if let Some((word, count)) = iter.next() { json_output.push_str(&format!("\"{}\": {}", word, count)); } for (word, count) in iter { json_output.push_str(&format!(", \"{}\": {}", word, count)); } json_output.push('}'); json_output } |
error[E0597]: `line` does not live long enough
--> main.rs:31:40
|
26 | let shared_map_clone = Arc::clone(&shared_map);
| ---------------- lifetime `'1` appears in the type of `shared_map_clone`
...
30 | for line in lines_chunk {
| ---- binding `line` declared here
31 | let words: Vec<&str> = line.split_whitespace().collect();
| ^^^^ borrowed value does not live long enough
...
35 | }
| - `line` dropped here while still borrowed
36 | shared_map_clone.lock().unwrap().push(word_counts);
| -------------------------------------------------- argument requires that `line` is borrowed for `'1`
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0597`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that counts word frequencies in a text file located at `/tmp/file.txt` using parallel processing on 4 cores:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let counter = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let counter = Arc::clone(&counter); threads.push(thread::spawn(move || { let mut word_counts = HashMap::new(); let reader = BufReader::new(file); for line in reader.lines().skip(start as usize).take((end - start) as usize) { let line = line.expect("Failed to read line"); let words = line.split_whitespace(); for word in words { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } let mut counter = counter.lock().unwrap(); counter.push(word_counts); })); } for thread in threads { thread.join().expect("Failed to join thread"); } let counter = counter.lock().unwrap(); for word_counts in counter.iter() { print_json(word_counts); } } fn print_json(word_counts: &HashMap<String, u32>) { print!("{{"); let mut iter = word_counts.iter(); if let Some((word, count)) = iter.next() { print!("\"{}\": {}", escape_json(word), count); } for (word, count) in iter { print!(", \"{}\": {}", escape_json(word), count); } println!("}}"); } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
1 2 | rustc rust_word_count.rs ./rust_word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let counter = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let counter = Arc::clone(&counter); threads.push(thread::spawn(move || { let mut word_counts = HashMap::new(); let reader = BufReader::new(file); for line in reader.lines().skip(start as usize).take((end - start) as usize) { let line = line.expect("Failed to read line"); let words = line.split_whitespace(); for word in words { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } let mut counter = counter.lock().unwrap(); counter.push(word_counts); })); } for thread in threads { thread.join().expect("Failed to join thread"); } let counter = counter.lock().unwrap(); for word_counts in counter.iter() { print_json(word_counts); } } fn print_json(word_counts: &HashMap<String, u32>) { print!("{{"); let mut iter = word_counts.iter(); if let Some((word, count)) = iter.next() { print!("\"{}\": {}", escape_json(word), count); } for (word, count) in iter { print!(", \"{}\": {}", escape_json(word), count); } println!("}}"); } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let counter = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let counter = Arc::clone(&counter); threads.push(thread::spawn(move || { let mut word_counts = HashMap::new(); let reader = BufReader::new(file); for line in reader.lines().skip(start as usize).take((end - start) as usize) { let line = line.expect("Failed to read line"); let words = line.split_whitespace(); for word in words { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } let mut counter = counter.lock().unwrap(); counter.push(word_counts); })); } for thread in threads { thread.join().expect("Failed to join thread"); } let counter = counter.lock().unwrap(); for word_counts in counter.iter() { print_json(word_counts); } } fn print_json(word_counts: &HashMap<String, u32>) { print!("{{"); let mut iter = word_counts.iter(); if let Some((word, count)) = iter.next() { print!("\"{}\": {}", escape_json(word), count); } for (word, count) in iter { print!(", \"{}\": {}", escape_json(word), count); } println!("}}"); } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size / num_threads as u64) + 1; let counter = Arc::new(Mutex::new(Vec::new())); let mut threads = Vec::new(); for i in 0..num_threads { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, file_size); let file = File::open(file_path).expect("Failed to open file"); let counter = Arc::clone(&counter); threads.push(thread::spawn(move || { let mut word_counts = HashMap::new(); let reader = BufReader::new(file); for line in reader.lines().skip(start as usize).take((end - start) as usize) { let line = line.expect("Failed to read line"); let words = line.split_whitespace(); for word in words { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } let mut counter = counter.lock().unwrap(); counter.push(word_counts); })); } for thread in threads { thread.join().expect("Failed to join thread"); } let counter = counter.lock().unwrap(); for word_counts in counter.iter() { print_json(word_counts); } } fn print_json(word_counts: &HashMap<String, u32>) { print!("{{"); let mut iter = word_counts.iter(); if let Some((word, count)) = iter.next() { print!("\"{}\": {}", escape_json(word), count); } for (word, count) in iter { print!(", \"{}\": {}", escape_json(word), count); } println!("}}"); } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
{}
{}
{}
{"we": 4, "before": 2, "foolishness,": 1, "were": 2, "times,": 2, "other": 1, "belief,": 1, "it": 10, "best": 1, "the": 11, "hope,": 1, "all": 2, "Heaven,": 1, "season": 2, "Darkness,": 1, "epoch": 2, "Light,": 1, "spring": 1, "of": 10, "incredulity,": 1, "worst": 1, "had": 2, "everything": 1, "us,": 2, "way": 1, "winter": 1, "despair,": 1, "wisdom,": 1, "to": 1, "was": 10, "age": 2, "going": 2, "direct": 2, "nothing": 1}
Resulting in output: [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: True
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 | use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::thread; use std::sync::Arc; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size as usize + num_threads - 1) / num_threads; let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().map(|line| line.expect("Failed to read line")).collect(); let shared_lines = Arc::new(lines); let mut handles = Vec::new(); for i in 0..num_threads { let lines = Arc::clone(&shared_lines); let handle = thread::spawn(move || { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, lines.len()); let mut word_count = HashMap::new(); for line in lines[start..end].iter() { for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } println!("{}", to_json(&word_count)); }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } } fn to_json(word_count: &HashMap<String, usize>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_count.iter().enumerate() { json.push_str(&format!("\"{}\": {}", word, count)); if i < word_count.len() - 1 { json.push_str(", "); } } json.push('}'); json } |
1 2 | rustc rust_word_count.rs ./rust_word_count |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 | use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::thread; use std::sync::Arc; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size as usize + num_threads - 1) / num_threads; let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().map(|line| line.expect("Failed to read line")).collect(); let shared_lines = Arc::new(lines); let mut handles = Vec::new(); for i in 0..num_threads { let lines = Arc::clone(&shared_lines); let handle = thread::spawn(move || { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, lines.len()); let mut word_count = HashMap::new(); for line in lines[start..end].iter() { for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } println!("{}", to_json(&word_count)); }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } } fn to_json(word_count: &HashMap<String, usize>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_count.iter().enumerate() { json.push_str(&format!("\"{}\": {}", word, count)); if i < word_count.len() - 1 { json.push_str(", "); } } json.push('}'); json } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::thread; use std::sync::Arc; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size as usize + num_threads - 1) / num_threads; let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().map(|line| line.expect("Failed to read line")).collect(); let shared_lines = Arc::new(lines); let mut handles = Vec::new(); for i in 0..num_threads { let lines = Arc::clone(&shared_lines); let handle = thread::spawn(move || { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, lines.len()); let mut word_count = HashMap::new(); for line in lines[start..end].iter() { for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } println!("{}", to_json(&word_count)); }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } } fn to_json(word_count: &HashMap<String, usize>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_count.iter().enumerate() { json.push_str(&format!("\"{}\": {}", word, count)); if i < word_count.len() - 1 { json.push_str(", "); } } json.push('}'); json } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 | use std::fs::File; use std::io::{BufRead, BufReader}; use std::collections::HashMap; use std::thread; use std::sync::Arc; fn main() { let file_path = "/tmp/file.txt"; let num_threads = 4; let file = File::open(file_path).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get file metadata").len(); let chunk_size = (file_size as usize + num_threads - 1) / num_threads; let reader = BufReader::new(file); let lines: Vec<String> = reader.lines().map(|line| line.expect("Failed to read line")).collect(); let shared_lines = Arc::new(lines); let mut handles = Vec::new(); for i in 0..num_threads { let lines = Arc::clone(&shared_lines); let handle = thread::spawn(move || { let start = i * chunk_size; let end = std::cmp::min((i + 1) * chunk_size, lines.len()); let mut word_count = HashMap::new(); for line in lines[start..end].iter() { for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } println!("{}", to_json(&word_count)); }); handles.push(handle); } for handle in handles { handle.join().expect("Thread panicked"); } } fn to_json(word_count: &HashMap<String, usize>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_count.iter().enumerate() { json.push_str(&format!("\"{}\": {}", word, count)); if i < word_count.len() - 1 { json.push_str(", "); } } json.push('}'); json } |
{"were": 2, "Heaven,": 1, "way": 1, "Darkness,": 1, "age": 2, "us,": 2, "incredulity,": 1, "despair,": 1, "other": 1, "times,": 2, "Light,": 1, "epoch": 2, "wisdom,": 1, "spring": 1, "going": 2, "all": 2, "foolishness,": 1, "worst": 1, "hope,": 1, "it": 10, "best": 1, "everything": 1, "we": 4, "direct": 2, "was": 10, "of": 10, "had": 2, "nothing": 1, "before": 2, "to": 1, "belief,": 1, "winter": 1, "season": 2, "the": 11}
thread '<unnamed>' panicked at main.rs:31:30:
slice index starts at 104 but ends at 1
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
thread '<unnamed>' panicked at main.rs:31:30:
slice index starts at 208 but ends at 1
thread '<unnamed>' panicked at main.rs:31:30:
slice index starts at 312 but ends at 1
thread 'main' panicked at main.rs:44:23:
Thread panicked: Any { .. }
Resulting in output: [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: True