Process large datasets in Rust by reading files lazily with BufReader and using iterators to transform data without loading everything into memory.
use std::fs::File;
use std::io::{self, BufRead, BufReader};
fn main() -> io::Result<()> {
let file = File::open("large_dataset.csv")?;
let reader = BufReader::new(file);
for line in reader.lines() {
let line = line?;
// Process line here
println!("Processing: {}", line);
}
Ok(())
}