I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = File::open("/tmp/file.txt").expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let chunk_size = file_size / 4; let file = Arc::new(Mutex::new(file)); let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); let mut word_count = HashMap::new(); for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) { let line = line.unwrap(); for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } format_json(&word_count) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(word_count: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_count.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(word), count)); } json.push_str("}"); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
1 | rustc yourfile.rs |
1 | ./yourfile |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = File::open("/tmp/file.txt").expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let chunk_size = file_size / 4; let file = Arc::new(Mutex::new(file)); let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); let mut word_count = HashMap::new(); for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) { let line = line.unwrap(); for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } format_json(&word_count) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(word_count: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_count.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(word), count)); } json.push_str("}"); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = File::open("/tmp/file.txt").expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let chunk_size = file_size / 4; let file = Arc::new(Mutex::new(file)); let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); let mut word_count = HashMap::new(); for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) { let line = line.unwrap(); for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } format_json(&word_count) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(word_count: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_count.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(word), count)); } json.push_str("}"); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = File::open("/tmp/file.txt").expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let chunk_size = file_size / 4; let file = Arc::new(Mutex::new(file)); let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); let mut word_count = HashMap::new(); for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) { let line = line.unwrap(); for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } format_json(&word_count) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(word_count: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_count.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(word), count)); } json.push_str("}"); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
error[E0277]: the trait bound `MutexGuard<'_, File>: std::io::Read` is not satisfied
--> main.rs:21:41
|
21 | let reader = BufReader::new(file);
| -------------- ^^^^ the trait `std::io::Read` is not implemented for `MutexGuard<'_, File>`
| |
| required by a bound introduced by this call
|
note: required by a bound in `BufReader::<R>::new`
--> /rustc/9b00956e56009bab2aa15d7bff10916599e3d6d6/library/std/src/io/buffered/bufreader.rs:72:5
error[E0599]: the method `lines` exists for struct `BufReader<MutexGuard<'_, File>>`, but its trait bounds were not satisfied
--> main.rs:24:32
|
24 | for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) {
| ^^^^^ method cannot be called on `BufReader<MutexGuard<'_, File>>` due to unsatisfied trait bounds
|
= note: the following trait bounds were not satisfied:
`MutexGuard<'_, File>: std::io::Read`
which is required by `BufReader<MutexGuard<'_, File>>: BufRead`
`BufReader<MutexGuard<'_, File>>: BufRead`
which is required by `&mut BufReader<MutexGuard<'_, File>>: BufRead`
warning: unused import: `BufRead`
--> main.rs:4:15
|
4 | use std::io::{BufRead, BufReader, Seek, SeekFrom};
| ^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error: aborting due to 2 previous errors; 1 warning emitted
Some errors have detailed explanations: E0277, E0599.
For more information about an error, try `rustc --explain E0277`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").unwrap())); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_count = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines() { if let Ok(line) = line { for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } if reader.stream_position().unwrap() >= (i + 1) * chunk_size { break; } } format_json(&word_count) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(map: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (key, value)) in map.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(key), value)); } json.push('}'); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").unwrap())); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_count = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines() { if let Ok(line) = line { for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } if reader.stream_position().unwrap() >= (i + 1) * chunk_size { break; } } format_json(&word_count) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(map: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (key, value)) in map.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(key), value)); } json.push('}'); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").unwrap())); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_count = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines() { if let Ok(line) = line { for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } if reader.stream_position().unwrap() >= (i + 1) * chunk_size { break; } } format_json(&word_count) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(map: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (key, value)) in map.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(key), value)); } json.push('}'); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").unwrap())); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_count = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines() { if let Ok(line) = line { for word in line.split_whitespace() { *word_count.entry(word.to_string()).or_insert(0) += 1; } } if reader.stream_position().unwrap() >= (i + 1) * chunk_size { break; } } format_json(&word_count) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(map: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (key, value)) in map.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(key), value)); } json.push('}'); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
error[E0277]: the trait bound `MutexGuard<'_, File>: std::io::Read` is not satisfied
--> main.rs:22:41
|
22 | let reader = BufReader::new(file);
| -------------- ^^^^ the trait `std::io::Read` is not implemented for `MutexGuard<'_, File>`
| |
| required by a bound introduced by this call
|
note: required by a bound in `BufReader::<R>::new`
--> /rustc/9b00956e56009bab2aa15d7bff10916599e3d6d6/library/std/src/io/buffered/bufreader.rs:72:5
error[E0599]: the method `lines` exists for struct `BufReader<MutexGuard<'_, File>>`, but its trait bounds were not satisfied
--> main.rs:24:32
|
24 | for line in reader.lines() {
| ^^^^^ method cannot be called on `BufReader<MutexGuard<'_, File>>` due to unsatisfied trait bounds
|
= note: the following trait bounds were not satisfied:
`MutexGuard<'_, File>: std::io::Read`
which is required by `BufReader<MutexGuard<'_, File>>: BufRead`
`BufReader<MutexGuard<'_, File>>: BufRead`
which is required by `&mut BufReader<MutexGuard<'_, File>>: BufRead`
error[E0599]: the method `stream_position` exists for struct `BufReader<MutexGuard<'_, File>>`, but its trait bounds were not satisfied
--> main.rs:30:27
|
30 | if reader.stream_position().unwrap() >= (i + 1) * chunk_size {
| ^^^^^^^^^^^^^^^ method cannot be called on `BufReader<MutexGuard<'_, File>>` due to unsatisfied trait bounds
|
= note: the following trait bounds were not satisfied:
`MutexGuard<'_, File>: Seek`
which is required by `BufReader<MutexGuard<'_, File>>: Seek`
warning: unused import: `BufRead`
--> main.rs:4:15
|
4 | use std::io::{BufRead, BufReader, Seek, SeekFrom};
| ^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error: aborting due to 3 previous errors; 1 warning emitted
Some errors have detailed explanations: E0277, E0599.
For more information about an error, try `rustc --explain E0277`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Read, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = File::open("/tmp/file.txt").expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let chunk_size = file_size / 4; let file = Arc::new(Mutex::new(file)); let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let mut reader = BufReader::new(file.take(chunk_size)); let mut word_counts = HashMap::new(); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } word_counts }); handles.push(handle); } for handle in handles { let word_counts = handle.join().unwrap(); print_json(&word_counts); } } fn print_json(word_counts: &HashMap<String, i32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(", "); } first = false; print!("\"{}\": {}", escape_json(word), count); } println!("}}"); } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Read, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = File::open("/tmp/file.txt").expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let chunk_size = file_size / 4; let file = Arc::new(Mutex::new(file)); let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let mut reader = BufReader::new(file.take(chunk_size)); let mut word_counts = HashMap::new(); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } word_counts }); handles.push(handle); } for handle in handles { let word_counts = handle.join().unwrap(); print_json(&word_counts); } } fn print_json(word_counts: &HashMap<String, i32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(", "); } first = false; print!("\"{}\": {}", escape_json(word), count); } println!("}}"); } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Read, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = File::open("/tmp/file.txt").expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let chunk_size = file_size / 4; let file = Arc::new(Mutex::new(file)); let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let mut reader = BufReader::new(file.take(chunk_size)); let mut word_counts = HashMap::new(); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } word_counts }); handles.push(handle); } for handle in handles { let word_counts = handle.join().unwrap(); print_json(&word_counts); } } fn print_json(word_counts: &HashMap<String, i32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(", "); } first = false; print!("\"{}\": {}", escape_json(word), count); } println!("}}"); } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Read, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = File::open("/tmp/file.txt").expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let chunk_size = file_size / 4; let file = Arc::new(Mutex::new(file)); let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let mut reader = BufReader::new(file.take(chunk_size)); let mut word_counts = HashMap::new(); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } word_counts }); handles.push(handle); } for handle in handles { let word_counts = handle.join().unwrap(); print_json(&word_counts); } } fn print_json(word_counts: &HashMap<String, i32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(", "); } first = false; print!("\"{}\": {}", escape_json(word), count); } println!("}}"); } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
error[E0507]: cannot move out of dereference of `MutexGuard<'_, File>`
--> main.rs:22:45
|
22 | let mut reader = BufReader::new(file.take(chunk_size));
| ^^^^ ---------------- value moved due to this method call
| |
| move occurs because value has type `File`, which does not implement the `Copy` trait
|
note: `std::io::Read::take` takes ownership of the receiver `self`, which moves value
--> /rustc/9b00956e56009bab2aa15d7bff10916599e3d6d6/library/std/src/io/mod.rs:1156:13
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0507`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").unwrap())); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines() { if let Ok(line) = line { for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } if reader.stream_position().unwrap() >= (i + 1) * chunk_size { break; } } format_json(&word_counts) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(word_counts: &HashMap<String, usize>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_counts.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", word, count)); } json.push('}'); json } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").unwrap())); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines() { if let Ok(line) = line { for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } if reader.stream_position().unwrap() >= (i + 1) * chunk_size { break; } } format_json(&word_counts) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(word_counts: &HashMap<String, usize>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_counts.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", word, count)); } json.push('}'); json } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").unwrap())); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines() { if let Ok(line) = line { for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } if reader.stream_position().unwrap() >= (i + 1) * chunk_size { break; } } format_json(&word_counts) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(word_counts: &HashMap<String, usize>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_counts.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", word, count)); } json.push('}'); json } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").unwrap())); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines() { if let Ok(line) = line { for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } if reader.stream_position().unwrap() >= (i + 1) * chunk_size { break; } } format_json(&word_counts) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(word_counts: &HashMap<String, usize>) -> String { let mut json = String::from("{"); for (i, (word, count)) in word_counts.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", word, count)); } json.push('}'); json } |
error[E0277]: the trait bound `MutexGuard<'_, File>: std::io::Read` is not satisfied
--> main.rs:21:41
|
21 | let reader = BufReader::new(file);
| -------------- ^^^^ the trait `std::io::Read` is not implemented for `MutexGuard<'_, File>`
| |
| required by a bound introduced by this call
|
note: required by a bound in `BufReader::<R>::new`
--> /rustc/9b00956e56009bab2aa15d7bff10916599e3d6d6/library/std/src/io/buffered/bufreader.rs:72:5
error[E0599]: the method `lines` exists for struct `BufReader<MutexGuard<'_, File>>`, but its trait bounds were not satisfied
--> main.rs:23:32
|
23 | for line in reader.lines() {
| ^^^^^ method cannot be called on `BufReader<MutexGuard<'_, File>>` due to unsatisfied trait bounds
|
= note: the following trait bounds were not satisfied:
`MutexGuard<'_, File>: std::io::Read`
which is required by `BufReader<MutexGuard<'_, File>>: BufRead`
`BufReader<MutexGuard<'_, File>>: BufRead`
which is required by `&mut BufReader<MutexGuard<'_, File>>: BufRead`
error[E0599]: the method `stream_position` exists for struct `BufReader<MutexGuard<'_, File>>`, but its trait bounds were not satisfied
--> main.rs:29:27
|
29 | if reader.stream_position().unwrap() >= (i + 1) * chunk_size {
| ^^^^^^^^^^^^^^^ method cannot be called on `BufReader<MutexGuard<'_, File>>` due to unsatisfied trait bounds
|
= note: the following trait bounds were not satisfied:
`MutexGuard<'_, File>: Seek`
which is required by `BufReader<MutexGuard<'_, File>>: Seek`
warning: unused import: `BufRead`
--> main.rs:4:15
|
4 | use std::io::{BufRead, BufReader, Seek, SeekFrom};
| ^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error: aborting due to 3 previous errors; 1 warning emitted
Some errors have detailed explanations: E0277, E0599.
For more information about an error, try `rustc --explain E0277`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Here's a self-contained Rust program that meets your requirements:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").expect("Failed to open file"))); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) { let line = line.unwrap(); for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } format_json(&word_counts) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(map: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (key, value)) in map.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(key), value)); } json.push('}'); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
1 2 | rustc yourfile.rs ./yourfile |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").expect("Failed to open file"))); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) { let line = line.unwrap(); for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } format_json(&word_counts) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(map: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (key, value)) in map.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(key), value)); } json.push('}'); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").expect("Failed to open file"))); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) { let line = line.unwrap(); for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } format_json(&word_counts) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(map: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (key, value)) in map.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(key), value)); } json.push('}'); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::sync::{Arc, Mutex}; use std::thread; fn main() { let file = Arc::new(Mutex::new(File::open("/tmp/file.txt").expect("Failed to open file"))); let file_size = file.lock().unwrap().metadata().unwrap().len(); let chunk_size = file_size / 4; let mut handles = vec![]; for i in 0..4 { let file = Arc::clone(&file); let handle = thread::spawn(move || { let mut word_counts = HashMap::new(); let mut file = file.lock().unwrap(); file.seek(SeekFrom::Start(i * chunk_size)).unwrap(); let reader = BufReader::new(file); for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) { let line = line.unwrap(); for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } } format_json(&word_counts) }); handles.push(handle); } for handle in handles { println!("{}", handle.join().unwrap()); } } fn format_json(map: &HashMap<String, i32>) -> String { let mut json = String::from("{"); for (i, (key, value)) in map.iter().enumerate() { if i > 0 { json.push_str(", "); } json.push_str(&format!("\"{}\": {}", escape_json(key), value)); } json.push('}'); json } fn escape_json(s: &str) -> String { s.replace('\\', "\\\\") .replace('"', "\\\"") .replace('\n', "\\n") .replace('\r', "\\r") .replace('\t', "\\t") } |
error[E0277]: the trait bound `MutexGuard<'_, File>: std::io::Read` is not satisfied
--> main.rs:21:41
|
21 | let reader = BufReader::new(file);
| -------------- ^^^^ the trait `std::io::Read` is not implemented for `MutexGuard<'_, File>`
| |
| required by a bound introduced by this call
|
note: required by a bound in `BufReader::<R>::new`
--> /rustc/9b00956e56009bab2aa15d7bff10916599e3d6d6/library/std/src/io/buffered/bufreader.rs:72:5
error[E0599]: the method `lines` exists for struct `BufReader<MutexGuard<'_, File>>`, but its trait bounds were not satisfied
--> main.rs:23:32
|
23 | for line in reader.lines().take_while(|_| file.stream_position().unwrap() < (i + 1) * chunk_size) {
| ^^^^^ method cannot be called on `BufReader<MutexGuard<'_, File>>` due to unsatisfied trait bounds
|
= note: the following trait bounds were not satisfied:
`MutexGuard<'_, File>: std::io::Read`
which is required by `BufReader<MutexGuard<'_, File>>: BufRead`
`BufReader<MutexGuard<'_, File>>: BufRead`
which is required by `&mut BufReader<MutexGuard<'_, File>>: BufRead`
warning: unused import: `BufRead`
--> main.rs:4:15
|
4 | use std::io::{BufRead, BufReader, Seek, SeekFrom};
| ^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error: aborting due to 2 previous errors; 1 warning emitted
Some errors have detailed explanations: E0277, E0599.
For more information about an error, try `rustc --explain E0277`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False