test / example_usage.py
pbk0's picture
save
a17cfac
"""
Example usage of the DLSCA Test Dataset with streaming zarr support.
This example demonstrates how to use the custom dataset for both local
development and production with streaming capabilities.
Note: You may see "Repo card metadata block was not found" warnings -
these are harmless and expected for local datasets without published cards.
"""
import os
from test import TestDataset, TestDownloadManager
import numpy as np
def example_local_usage():
"""Example of using the dataset locally for development."""
print("=== Local Development Usage ===")
# Load dataset locally
dataset = TestDataset()
dataset.download_and_prepare()
dataset_dict = dataset.as_dataset(split="train")
print(f"Dataset size: {len(dataset_dict)}")
print(f"Features: {list(dataset_dict.features.keys())}")
# Access a few examples
for i in range(3):
example = dataset_dict[i]
print(f"Example {i}: labels={example['labels'][:2]}..., traces_len={len(example['traces'])}")
return dataset_dict
def example_streaming_usage():
"""Example of using the dataset with streaming zarr support."""
print("\n=== Streaming Usage ===")
# Initialize custom download manager
dl_manager = TestDownloadManager(dataset_name="dlsca_test")
# Convert traces to zarr format and cache
traces_path = os.path.join(os.path.dirname(__file__), "data", "traces.npy")
zarr_zip_path = dl_manager.download_zarr_chunks(traces_path, chunk_size=100)
print(f"Zarr chunks cached at: {zarr_zip_path}")
# Load dataset with streaming
dataset = TestDataset()
# Test streaming access to zarr data
zarr_array = dataset._load_zarr_from_zip(zarr_zip_path)
print(f"Zarr array shape: {zarr_array.shape}")
print(f"Zarr array chunks: {zarr_array.chunks}")
# Demonstrate chunk-based access (simulating streaming)
chunk_size = 100
num_chunks = (zarr_array.shape[0] + chunk_size - 1) // chunk_size
print(f"Total chunks: {num_chunks}")
# Access data by chunks (this would be efficient for large datasets)
for chunk_idx in range(min(3, num_chunks)): # Just show first 3 chunks
start_idx = chunk_idx * chunk_size
end_idx = min(start_idx + chunk_size, zarr_array.shape[0])
chunk_data = zarr_array[start_idx:end_idx]
print(f"Chunk {chunk_idx}: shape={chunk_data.shape}, range=[{start_idx}:{end_idx}]")
return zarr_array
def example_chunk_selection():
"""Example of selecting specific chunks for training."""
print("\n=== Chunk Selection Example ===")
dl_manager = TestDownloadManager()
traces_path = os.path.join(os.path.dirname(__file__), "data", "traces.npy")
zarr_zip_path = dl_manager.download_zarr_chunks(traces_path, chunk_size=100)
dataset = TestDataset()
zarr_array = dataset._load_zarr_from_zip(zarr_zip_path)
labels = np.load(os.path.join(os.path.dirname(__file__), "data", "labels.npy"))
# Example: Select specific samples for training (e.g., samples 200-299)
selected_range = slice(200, 300)
selected_traces = zarr_array[selected_range]
selected_labels = labels[selected_range]
print(f"Selected traces shape: {selected_traces.shape}")
print(f"Selected labels shape: {selected_labels.shape}")
print(f"Sample labels: {selected_labels[:3]}")
return selected_traces, selected_labels
def benchmark_access_patterns():
"""Benchmark different access patterns."""
print("\n=== Access Pattern Benchmark ===")
import time
# Load both numpy and zarr versions
traces_np = np.load(os.path.join(os.path.dirname(__file__), "data", "traces.npy"))
dl_manager = TestDownloadManager()
traces_path = os.path.join(os.path.dirname(__file__), "data", "traces.npy")
zarr_zip_path = dl_manager.download_zarr_chunks(traces_path, chunk_size=100)
dataset = TestDataset()
traces_zarr = dataset._load_zarr_from_zip(zarr_zip_path)
# Benchmark sequential access
print("Sequential access (first 300 samples):")
# NumPy
start = time.time()
np_data = traces_np[:300]
np_time = time.time() - start
print(f" NumPy: {np_time:.4f}s")
# Zarr
start = time.time()
zarr_data = traces_zarr[:300]
zarr_time = time.time() - start
print(f" Zarr: {zarr_time:.4f}s")
# Verify same data
print(f" Data identical: {np.array_equal(np_data, zarr_data)}")
# Benchmark random access
print("\nRandom chunk access (3 chunks):")
indices = [50, 250, 450]
# NumPy
start = time.time()
for idx in indices:
_ = traces_np[idx:idx+50]
np_random_time = time.time() - start
print(f" NumPy: {np_random_time:.4f}s")
# Zarr
start = time.time()
for idx in indices:
_ = traces_zarr[idx:idx+50]
zarr_random_time = time.time() - start
print(f" Zarr: {zarr_random_time:.4f}s")
if __name__ == "__main__":
# Run all examples
local_dataset = example_local_usage()
zarr_array = example_streaming_usage()
selected_data = example_chunk_selection()
benchmark_access_patterns()
print("\n=== Summary ===")
print("✅ Local dataset loading works")
print("✅ Zarr conversion and streaming works")
print("✅ Chunk selection works")
print("✅ Access pattern benchmarking works")
print("\nThe dataset is ready for use with Hugging Face Hub!")
print("Next steps:")
print("1. Push this dataset to Hugging Face Hub")
print("2. Use datasets.load_dataset('DLSCA/test') to access it")
print("3. The streaming will automatically use zarr chunks for large traces")