I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position for this thread reader.seek_relative(start as i64).expect("Failed to seek"); let mut word_counts = HashMap::new(); let mut current_pos = start; let mut line = String::new(); while current_pos < end && reader.read_line(&mut line).expect("Failed to read line") > 0 { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word).or_insert(0) += 1; } current_pos += line.len() as u64; line.clear(); } print_json(&word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } fn print_json(word_counts: &HashMap<&str, u32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position for this thread reader.seek_relative(start as i64).expect("Failed to seek"); let mut word_counts = HashMap::new(); let mut current_pos = start; let mut line = String::new(); while current_pos < end && reader.read_line(&mut line).expect("Failed to read line") > 0 { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word).or_insert(0) += 1; } current_pos += line.len() as u64; line.clear(); } print_json(&word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } fn print_json(word_counts: &HashMap<&str, u32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position for this thread reader.seek_relative(start as i64).expect("Failed to seek"); let mut word_counts = HashMap::new(); let mut current_pos = start; let mut line = String::new(); while current_pos < end && reader.read_line(&mut line).expect("Failed to read line") > 0 { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word).or_insert(0) += 1; } current_pos += line.len() as u64; line.clear(); } print_json(&word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } fn print_json(word_counts: &HashMap<&str, u32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position for this thread reader.seek_relative(start as i64).expect("Failed to seek"); let mut word_counts = HashMap::new(); let mut current_pos = start; let mut line = String::new(); while current_pos < end && reader.read_line(&mut line).expect("Failed to read line") > 0 { let words: Vec<&str> = line.split_whitespace().collect(); for word in words { *word_counts.entry(word).or_insert(0) += 1; } current_pos += line.len() as u64; line.clear(); } print_json(&word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } fn print_json(word_counts: &HashMap<&str, u32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); } |
warning: unused import: `std::env`
--> main.rs:3:5
|
3 | use std::env;
| ^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error[E0502]: cannot borrow `line` as mutable because it is also borrowed as immutable
--> main.rs:32:57
|
32 | while current_pos < end && reader.read_line(&mut line).expect("Failed to read line") > 0 {
| ^^^^^^^^^ mutable borrow occurs here
33 | let words: Vec<&str> = line.split_whitespace().collect();
| ---- immutable borrow occurs here
...
41 | print_json(&word_counts);
| ------------ immutable borrow later used here
error[E0502]: cannot borrow `line` as mutable because it is also borrowed as immutable
--> main.rs:38:17
|
33 | let words: Vec<&str> = line.split_whitespace().collect();
| ---- immutable borrow occurs here
...
38 | line.clear();
| ^^^^^^^^^^^^ mutable borrow occurs here
...
41 | print_json(&word_counts);
| ------------ immutable borrow later used here
error: aborting due to 2 previous errors; 1 warning emitted
For more information about this error, try `rustc --explain E0502`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); // Move ownership into the closure let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file"); let mut reader = BufReader::new(file); // Skip to the starting offset. Not perfectly accurate byte-wise, but good enough for this example reader.seek_relative(start as i64).expect("Failed to seek"); let mut word_counts = HashMap::new(); let mut current_line = String::new(); loop { let bytes_read = reader.read_line(&mut current_line).expect("Failed to read line"); if bytes_read == 0 || reader.stream_position().unwrap() >= end { break; // End of file or chunk } for word in current_line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } current_line.clear(); } // Manual JSON serialization print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); // Move ownership into the closure let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file"); let mut reader = BufReader::new(file); // Skip to the starting offset. Not perfectly accurate byte-wise, but good enough for this example reader.seek_relative(start as i64).expect("Failed to seek"); let mut word_counts = HashMap::new(); let mut current_line = String::new(); loop { let bytes_read = reader.read_line(&mut current_line).expect("Failed to read line"); if bytes_read == 0 || reader.stream_position().unwrap() >= end { break; // End of file or chunk } for word in current_line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } current_line.clear(); } // Manual JSON serialization print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); // Move ownership into the closure let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file"); let mut reader = BufReader::new(file); // Skip to the starting offset. Not perfectly accurate byte-wise, but good enough for this example reader.seek_relative(start as i64).expect("Failed to seek"); let mut word_counts = HashMap::new(); let mut current_line = String::new(); loop { let bytes_read = reader.read_line(&mut current_line).expect("Failed to read line"); if bytes_read == 0 || reader.stream_position().unwrap() >= end { break; // End of file or chunk } for word in current_line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } current_line.clear(); } // Manual JSON serialization print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); // Move ownership into the closure let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file"); let mut reader = BufReader::new(file); // Skip to the starting offset. Not perfectly accurate byte-wise, but good enough for this example reader.seek_relative(start as i64).expect("Failed to seek"); let mut word_counts = HashMap::new(); let mut current_line = String::new(); loop { let bytes_read = reader.read_line(&mut current_line).expect("Failed to read line"); if bytes_read == 0 || reader.stream_position().unwrap() >= end { break; // End of file or chunk } for word in current_line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } current_line.clear(); } // Manual JSON serialization print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } |
warning: unused import: `std::env`
--> main.rs:3:5
|
3 | use std::env;
| ^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error[E0599]: no method named `stream_position` found for struct `BufReader` in the current scope
--> main.rs:36:46
|
36 | if bytes_read == 0 || reader.stream_position().unwrap() >= end {
| ^^^^^^^^^^^^^^^ method not found in `BufReader<File>`
--> /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14/library/std/src/io/mod.rs:2022:8
|
= note: the method is available for `BufReader<File>` here
|
= help: items from traits can only be used if the trait is in scope
help: trait `Seek` which provides `stream_position` is implemented but not in scope; perhaps you want to import it
|
2 + use std::io::Seek;
|
error: aborting due to 1 previous error; 1 warning emitted
For more information about this error, try `rustc --explain E0599`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file"); let mut reader = BufReader::new(file); // Seek to the starting offset for this thread. reader.seek_relative(start as i64).unwrap(); let mut word_counts = HashMap::new(); let mut buffer = String::new(); loop { let bytes_read = reader.read_line(&mut buffer).expect("Failed to read line"); if bytes_read == 0 { break; } let current_position = reader.stream_position().unwrap(); if current_position >= end { let last_line = buffer.clone(); // Split the last line at the end boundary. let mut split_point = 0; if current_position > end { split_point = (current_position - end) as usize; if split_point < buffer.len() { let (kept_part, _) = buffer.split_at(split_point); buffer = kept_part.to_string(); } } for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } break; } for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file"); let mut reader = BufReader::new(file); // Seek to the starting offset for this thread. reader.seek_relative(start as i64).unwrap(); let mut word_counts = HashMap::new(); let mut buffer = String::new(); loop { let bytes_read = reader.read_line(&mut buffer).expect("Failed to read line"); if bytes_read == 0 { break; } let current_position = reader.stream_position().unwrap(); if current_position >= end { let last_line = buffer.clone(); // Split the last line at the end boundary. let mut split_point = 0; if current_position > end { split_point = (current_position - end) as usize; if split_point < buffer.len() { let (kept_part, _) = buffer.split_at(split_point); buffer = kept_part.to_string(); } } for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } break; } for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file"); let mut reader = BufReader::new(file); // Seek to the starting offset for this thread. reader.seek_relative(start as i64).unwrap(); let mut word_counts = HashMap::new(); let mut buffer = String::new(); loop { let bytes_read = reader.read_line(&mut buffer).expect("Failed to read line"); if bytes_read == 0 { break; } let current_position = reader.stream_position().unwrap(); if current_position >= end { let last_line = buffer.clone(); // Split the last line at the end boundary. let mut split_point = 0; if current_position > end { split_point = (current_position - end) as usize; if split_point < buffer.len() { let (kept_part, _) = buffer.split_at(split_point); buffer = kept_part.to_string(); } } for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } break; } for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().expect("Failed to get metadata").len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size * i as u64) / num_threads as u64; let end = (file_size * (i + 1) as u64) / num_threads as u64; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file"); let mut reader = BufReader::new(file); // Seek to the starting offset for this thread. reader.seek_relative(start as i64).unwrap(); let mut word_counts = HashMap::new(); let mut buffer = String::new(); loop { let bytes_read = reader.read_line(&mut buffer).expect("Failed to read line"); if bytes_read == 0 { break; } let current_position = reader.stream_position().unwrap(); if current_position >= end { let last_line = buffer.clone(); // Split the last line at the end boundary. let mut split_point = 0; if current_position > end { split_point = (current_position - end) as usize; if split_point < buffer.len() { let (kept_part, _) = buffer.split_at(split_point); buffer = kept_part.to_string(); } } for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } break; } for word in buffer.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } buffer.clear(); } print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } |
warning: unused import: `std::env`
--> main.rs:3:5
|
3 | use std::env;
| ^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error[E0599]: no method named `stream_position` found for struct `BufReader` in the current scope
--> main.rs:38:47
|
38 | let current_position = reader.stream_position().unwrap();
| ^^^^^^^^^^^^^^^ method not found in `BufReader<File>`
--> /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14/library/std/src/io/mod.rs:2022:8
|
= note: the method is available for `BufReader<File>` here
|
= help: items from traits can only be used if the trait is in scope
help: trait `Seek` which provides `stream_position` is implemented but not in scope; perhaps you want to import it
|
2 + use std::io::Seek;
|
error: aborting due to 1 previous error; 1 warning emitted
For more information about this error, try `rustc --explain E0599`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size / num_threads as u64) * i as u64; let end = if i == num_threads - 1 { file_size } else { (file_size / num_threads as u64) * (i + 1) as u64 }; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position reader.seek_relative(start as i64).unwrap(); if i != 0 { // Skip the first word since it might be incomplete reader.read_line(&mut String::new()).unwrap(); } let mut word_counts = HashMap::new(); let mut line = String::new(); loop { let bytes_read = reader.read_line(&mut line).unwrap(); if bytes_read == 0 || reader.stream_position().unwrap() >= end { break; } for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } line.clear(); } print_json(&word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } fn print_json(word_counts: &HashMap<String, i32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size / num_threads as u64) * i as u64; let end = if i == num_threads - 1 { file_size } else { (file_size / num_threads as u64) * (i + 1) as u64 }; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position reader.seek_relative(start as i64).unwrap(); if i != 0 { // Skip the first word since it might be incomplete reader.read_line(&mut String::new()).unwrap(); } let mut word_counts = HashMap::new(); let mut line = String::new(); loop { let bytes_read = reader.read_line(&mut line).unwrap(); if bytes_read == 0 || reader.stream_position().unwrap() >= end { break; } for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } line.clear(); } print_json(&word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } fn print_json(word_counts: &HashMap<String, i32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size / num_threads as u64) * i as u64; let end = if i == num_threads - 1 { file_size } else { (file_size / num_threads as u64) * (i + 1) as u64 }; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position reader.seek_relative(start as i64).unwrap(); if i != 0 { // Skip the first word since it might be incomplete reader.read_line(&mut String::new()).unwrap(); } let mut word_counts = HashMap::new(); let mut line = String::new(); loop { let bytes_read = reader.read_line(&mut line).unwrap(); if bytes_read == 0 || reader.stream_position().unwrap() >= end { break; } for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } line.clear(); } print_json(&word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } fn print_json(word_counts: &HashMap<String, i32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size / num_threads as u64) * i as u64; let end = if i == num_threads - 1 { file_size } else { (file_size / num_threads as u64) * (i + 1) as u64 }; let filename = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position reader.seek_relative(start as i64).unwrap(); if i != 0 { // Skip the first word since it might be incomplete reader.read_line(&mut String::new()).unwrap(); } let mut word_counts = HashMap::new(); let mut line = String::new(); loop { let bytes_read = reader.read_line(&mut line).unwrap(); if bytes_read == 0 || reader.stream_position().unwrap() >= end { break; } for word in line.split_whitespace() { *word_counts.entry(word.to_string()).or_insert(0) += 1; } line.clear(); } print_json(&word_counts); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } fn print_json(word_counts: &HashMap<String, i32>) { print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); } |
warning: unused import: `std::env`
--> main.rs:3:5
|
3 | use std::env;
| ^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error[E0599]: no method named `stream_position` found for struct `BufReader` in the current scope
--> main.rs:42:46
|
42 | if bytes_read == 0 || reader.stream_position().unwrap() >= end {
| ^^^^^^^^^^^^^^^ method not found in `BufReader<File>`
--> /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14/library/std/src/io/mod.rs:2022:8
|
= note: the method is available for `BufReader<File>` here
|
= help: items from traits can only be used if the trait is in scope
help: trait `Seek` which provides `stream_position` is implemented but not in scope; perhaps you want to import it
|
2 + use std::io::Seek;
|
error: aborting due to 1 previous error; 1 warning emitted
For more information about this error, try `rustc --explain E0599`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False
I have setup the docker container to run the model evaluation.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
Write a self-contained Rust program that prints a JSON output of how often each word appears in a text file located in /tmp/file.txt. So if the file contains "This is is is, an example" then the program should print to stdout. {"This": 1, "is": 2, "is,": 1, "an": 1, "example": 1} Don't import any crates that would require a Cargo.toml file. I'll just compile it with `rustc yourfile.rs` and it should work. The program should be run by calling /path/to/rust_word_count with no arguments. Make it run in parallel on 4 cores at the same time, each job taking ~1/4 of the file. Print out 4 differnt json dicts, one per line. I'll do the merging. Do not import serde_json, write json output from scratch.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size / num_threads as u64) * i as u64; let end = if i == num_threads - 1 { file_size } else { (file_size / num_threads as u64) * (i + 1) as u64 }; let filename_clone = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename_clone).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position reader.seek_relative(start as i64).unwrap(); let mut word_counts = HashMap::new(); let mut current_word = String::new(); // Process until the end position or EOF let mut pos = start; while pos < end { let buf = reader.fill_buf().unwrap(); if buf.is_empty() { break; // EOF } for &byte in buf { pos += 1; if byte.is_ascii_whitespace() { if !current_word.is_empty() { *word_counts.entry(current_word.clone()).or_insert(0) += 1; current_word.clear(); } } else { current_word.push(byte as char); } } reader.consume(buf.len()); } // Process any remaining word if !current_word.is_empty() { *word_counts.entry(current_word).or_insert(0) += 1; } // Manually create JSON output print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } |
I extracted the following code from that output:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size / num_threads as u64) * i as u64; let end = if i == num_threads - 1 { file_size } else { (file_size / num_threads as u64) * (i + 1) as u64 }; let filename_clone = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename_clone).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position reader.seek_relative(start as i64).unwrap(); let mut word_counts = HashMap::new(); let mut current_word = String::new(); // Process until the end position or EOF let mut pos = start; while pos < end { let buf = reader.fill_buf().unwrap(); if buf.is_empty() { break; // EOF } for &byte in buf { pos += 1; if byte.is_ascii_whitespace() { if !current_word.is_empty() { *word_counts.entry(current_word.clone()).or_insert(0) += 1; current_word.clear(); } } else { current_word.push(byte as char); } } reader.consume(buf.len()); } // Process any remaining word if !current_word.is_empty() { *word_counts.entry(current_word).or_insert(0) += 1; } // Manually create JSON output print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } |
Below is some student's code. I asked them to parallelize some code. Do not judge if it is correct, I have test cases for that. Logic errors are okay. So are synatx errors. Just tell me if they tried to parallelize it. use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size / num_threads as u64) * i as u64; let end = if i == num_threads - 1 { file_size } else { (file_size / num_threads as u64) * (i + 1) as u64 }; let filename_clone = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename_clone).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position reader.seek_relative(start as i64).unwrap(); let mut word_counts = HashMap::new(); let mut current_word = String::new(); // Process until the end position or EOF let mut pos = start; while pos < end { let buf = reader.fill_buf().unwrap(); if buf.is_empty() { break; // EOF } for &byte in buf { pos += 1; if byte.is_ascii_whitespace() { if !current_word.is_empty() { *word_counts.entry(current_word.clone()).or_insert(0) += 1; current_word.clear(); } } else { current_word.push(byte as char); } } reader.consume(buf.len()); } // Process any remaining word if !current_word.is_empty() { *word_counts.entry(current_word).or_insert(0) += 1; } // Manually create JSON output print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } Answer either 'The student passes' or 'The student fails'.
The student passes.
Testing if the previous output contains the string student passes
: True
Running the following program:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use std::thread; fn main() { let filename = "/tmp/file.txt"; let file = File::open(filename).expect("Failed to open file"); let file_size = file.metadata().unwrap().len(); let num_threads = 4; let mut handles = vec![]; for i in 0..num_threads { let start = (file_size / num_threads as u64) * i as u64; let end = if i == num_threads - 1 { file_size } else { (file_size / num_threads as u64) * (i + 1) as u64 }; let filename_clone = filename.to_string(); let handle = thread::spawn(move || { let file = File::open(filename_clone).expect("Failed to open file in thread"); let mut reader = BufReader::new(file); // Seek to the starting position reader.seek_relative(start as i64).unwrap(); let mut word_counts = HashMap::new(); let mut current_word = String::new(); // Process until the end position or EOF let mut pos = start; while pos < end { let buf = reader.fill_buf().unwrap(); if buf.is_empty() { break; // EOF } for &byte in buf { pos += 1; if byte.is_ascii_whitespace() { if !current_word.is_empty() { *word_counts.entry(current_word.clone()).or_insert(0) += 1; current_word.clear(); } } else { current_word.push(byte as char); } } reader.consume(buf.len()); } // Process any remaining word if !current_word.is_empty() { *word_counts.entry(current_word).or_insert(0) += 1; } // Manually create JSON output print!("{{"); let mut first = true; for (word, count) in word_counts { if !first { print!(","); } print!("\"{}\":{}", word, count); first = false; } println!("}}"); }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } } |
warning: unused import: `std::env`
--> main.rs:3:5
|
3 | use std::env;
| ^^^^^^^^
|
= note: `#[warn(unused_imports)]` on by default
error[E0499]: cannot borrow `reader` as mutable more than once at a time
--> main.rs:56:17
|
39 | let buf = reader.fill_buf().unwrap();
| ------ first mutable borrow occurs here
...
56 | reader.consume(buf.len());
| ^^^^^^ --- first borrow later used here
| |
| second mutable borrow occurs here
error: aborting due to 1 previous error; 1 warning emitted
For more information about this error, try `rustc --explain E0499`.
main.sh: line 2: ./a.out: No such file or directory
Resulting in output: []
Testing if the previous output equals the string [('Darkness,', 1), ('Heaven,', 1), ('Light,', 1), ('age', 2), ('all', 2), ('before', 2), ('belief,', 1), ('best', 1), ('despair,', 1), ('direct', 2), ('epoch', 2), ('everything', 1), ('foolishness,', 1), ('going', 2), ('had', 2), ('hope,', 1), ('incredulity,', 1), ('it', 10), ('nothing', 1), ('of', 10), ('other', 1), ('season', 2), ('spring', 1), ('the', 11), ('times,', 2), ('to', 1), ('us,', 2), ('was', 10), ('way', 1), ('we', 4), ('were', 2), ('winter', 1), ('wisdom,', 1), ('worst', 1)]
: False