fn book_training_1()

in candle-book/src/lib.rs [111:198]


    fn book_training_1() -> Result<()>{
// ANCHOR: book_training_1
use hf_hub::{api::sync::Api, Repo, RepoType};

let dataset_id = "mnist".to_string();

let api = Api::new()?;
let repo = Repo::with_revision(
    dataset_id,
    RepoType::Dataset,
    "refs/convert/parquet".to_string(),
);
let repo = api.repo(repo);
let test_parquet_filename = repo.get("mnist/test/0000.parquet")?;
let train_parquet_filename = repo.get("mnist/train/0000.parquet")?;
let test_parquet = SerializedFileReader::new(std::fs::File::open(test_parquet_filename)?)?;
let train_parquet = SerializedFileReader::new(std::fs::File::open(train_parquet_filename)?)?;
// ANCHOR_END: book_training_1
// Ignore unused
let _train = train_parquet;
// ANCHOR: book_training_2
for row in test_parquet {
    for (idx, (name, field)) in row?.get_column_iter().enumerate() {
        println!("Column id {idx}, name {name}, value {field}");
    }
}
// ANCHOR_END: book_training_2
let test_parquet_filename = repo.get("mnist/test/0000.parquet")?;
let train_parquet_filename = repo.get("mnist/train/0000.parquet")?;
let test_parquet = SerializedFileReader::new(std::fs::File::open(test_parquet_filename)?)?;
let train_parquet = SerializedFileReader::new(std::fs::File::open(train_parquet_filename)?)?;
// ANCHOR: book_training_3

let test_samples = 10_000;
let mut test_buffer_images: Vec<u8> = Vec::with_capacity(test_samples * 784);
let mut test_buffer_labels: Vec<u8> = Vec::with_capacity(test_samples);
for row in test_parquet{
    for (_name, field) in row?.get_column_iter() {
        if let parquet::record::Field::Group(subrow) = field {
            for (_name, field) in subrow.get_column_iter() {
                if let parquet::record::Field::Bytes(value) = field {
                    let image = image::load_from_memory(value.data()).unwrap();
                    test_buffer_images.extend(image.to_luma8().as_raw());
                }
            }
        }else if let parquet::record::Field::Long(label) = field {
            test_buffer_labels.push(*label as u8);
        }
    }
}
let test_images = (Tensor::from_vec(test_buffer_images, (test_samples, 784), &Device::Cpu)?.to_dtype(DType::F32)? / 255.)?;
let test_labels = Tensor::from_vec(test_buffer_labels, (test_samples, ), &Device::Cpu)?;

let train_samples = 60_000;
let mut train_buffer_images: Vec<u8> = Vec::with_capacity(train_samples * 784);
let mut train_buffer_labels: Vec<u8> = Vec::with_capacity(train_samples);
for row in train_parquet{
    for (_name, field) in row?.get_column_iter() {
        if let parquet::record::Field::Group(subrow) = field {
            for (_name, field) in subrow.get_column_iter() {
                if let parquet::record::Field::Bytes(value) = field {
                    let image = image::load_from_memory(value.data()).unwrap();
                    train_buffer_images.extend(image.to_luma8().as_raw());
                }
            }
        }else if let parquet::record::Field::Long(label) = field {
            train_buffer_labels.push(*label as u8);
        }
    }
}
let train_images = (Tensor::from_vec(train_buffer_images, (train_samples, 784), &Device::Cpu)?.to_dtype(DType::F32)? / 255.)?;
let train_labels = Tensor::from_vec(train_buffer_labels, (train_samples, ), &Device::Cpu)?;

let mnist = candle_datasets::vision::Dataset {
    train_images,
    train_labels,
    test_images,
    test_labels,
    labels: 10,
};

// ANCHOR_END: book_training_3
assert_eq!(mnist.test_images.dims(), &[10_000, 784]);
assert_eq!(mnist.test_labels.dims(), &[10_000]);
assert_eq!(mnist.train_images.dims(), &[60_000, 784]);
assert_eq!(mnist.train_labels.dims(), &[60_000]);
Ok(())
    }