trace_idx
int32 0
10k
| ciphertext
listlengths 16
16
⌀ |
|---|---|
0
|
[
173,
219,
245,
115,
42,
27,
254,
102,
155,
225,
195,
129,
22,
72,
229,
75
] |
1
|
[
203,
157,
97,
135,
164,
45,
171,
150,
20,
254,
215,
71,
93,
53,
168,
102
] |
2
|
[
26,
199,
31,
212,
109,
122,
234,
246,
142,
134,
156,
165,
73,
80,
15,
226
] |
3
|
[
37,
13,
158,
7,
94,
67,
196,
7,
25,
125,
30,
232,
70,
203,
148,
79
] |
4
|
[
45,
10,
47,
121,
222,
92,
78,
62,
239,
139,
90,
33,
211,
200,
173,
14
] |
5
|
[
3,
129,
80,
55,
183,
43,
231,
50,
223,
38,
68,
37,
147,
146,
151,
169
] |
6
|
[
209,
128,
47,
235,
251,
143,
84,
114,
78,
202,
35,
177,
43,
137,
119,
230
] |
7
|
[
216,
91,
49,
107,
21,
127,
74,
154,
148,
148,
166,
75,
166,
16,
1,
117
] |
8
|
[
64,
104,
242,
239,
123,
158,
180,
163,
236,
191,
142,
237,
190,
110,
10,
34
] |
9
|
[
225,
176,
15,
15,
213,
140,
137,
12,
87,
165,
148,
181,
141,
6,
224,
229
] |
10
|
[
145,
122,
84,
221,
218,
157,
123,
137,
157,
98,
229,
83,
127,
72,
132,
49
] |
11
|
[
147,
161,
226,
151,
120,
112,
227,
167,
223,
50,
127,
232,
234,
231,
30,
252
] |
12
|
[
60,
42,
62,
204,
225,
160,
65,
174,
113,
130,
213,
43,
176,
19,
20,
118
] |
13
|
[
163,
246,
73,
175,
81,
122,
56,
214,
175,
31,
108,
215,
42,
53,
72,
234
] |
14
|
[
30,
170,
137,
116,
26,
240,
133,
111,
236,
31,
107,
0,
93,
138,
233,
172
] |
15
|
[
147,
202,
57,
253,
116,
92,
162,
247,
151,
216,
186,
172,
3,
104,
162,
205
] |
16
|
[
72,
209,
55,
30,
13,
6,
254,
212,
198,
233,
196,
81,
149,
137,
76,
221
] |
17
|
[
37,
188,
87,
51,
240,
251,
111,
56,
68,
165,
229,
0,
23,
250,
101,
14
] |
18
|
[
232,
186,
57,
136,
121,
134,
214,
252,
169,
253,
249,
63,
229,
190,
132,
204
] |
19
|
[
60,
48,
36,
235,
152,
224,
246,
114,
101,
74,
172,
202,
46,
225,
72,
42
] |
20
|
[
115,
111,
19,
149,
235,
84,
243,
111,
146,
85,
163,
236,
41,
175,
15,
25
] |
21
|
[
44,
20,
80,
251,
214,
199,
238,
150,
128,
144,
237,
137,
191,
65,
190,
93
] |
22
|
[
68,
73,
42,
166,
137,
232,
125,
155,
186,
75,
244,
128,
5,
140,
214,
68
] |
23
|
[
178,
106,
120,
32,
105,
123,
206,
223,
140,
57,
128,
53,
56,
241,
249,
145
] |
24
|
[
217,
57,
187,
17,
107,
30,
42,
213,
206,
151,
190,
136,
188,
240,
2,
90
] |
25
|
[
30,
231,
17,
207,
146,
176,
89,
33,
12,
171,
106,
243,
193,
217,
186,
40
] |
26
|
[
3,
190,
210,
32,
41,
166,
208,
74,
246,
206,
75,
24,
222,
142,
12,
250
] |
27
|
[
15,
215,
114,
38,
108,
116,
110,
87,
176,
0,
231,
180,
229,
207,
111,
154
] |
28
|
[
47,
15,
56,
96,
245,
73,
254,
47,
96,
89,
165,
65,
21,
77,
109,
46
] |
29
|
[
57,
45,
108,
154,
189,
187,
127,
194,
252,
86,
198,
179,
94,
10,
114,
10
] |
30
|
[
206,
244,
9,
230,
101,
68,
171,
29,
78,
22,
192,
212,
53,
97,
18,
77
] |
31
|
[
153,
195,
195,
235,
128,
58,
103,
9,
83,
191,
80,
227,
6,
67,
153,
138
] |
32
|
[
192,
132,
156,
138,
174,
124,
111,
206,
50,
66,
24,
5,
226,
1,
176,
134
] |
33
|
[
221,
254,
66,
205,
222,
77,
73,
228,
132,
79,
133,
87,
117,
66,
24,
187
] |
34
|
[
187,
187,
188,
210,
123,
200,
173,
40,
202,
178,
8,
210,
12,
36,
25,
242
] |
35
|
[
240,
70,
114,
60,
15,
166,
129,
7,
181,
103,
99,
227,
181,
250,
156,
91
] |
36
|
[
218,
254,
234,
221,
110,
18,
202,
32,
5,
198,
249,
255,
64,
9,
210,
223
] |
37
|
[
214,
200,
131,
249,
207,
142,
212,
73,
81,
167,
210,
133,
162,
12,
86,
180
] |
38
|
[
207,
186,
193,
145,
30,
227,
232,
19,
242,
1,
117,
90,
34,
104,
108,
39
] |
39
|
[
219,
197,
7,
50,
66,
191,
44,
57,
69,
161,
182,
37,
34,
123,
70,
110
] |
40
|
[
197,
105,
223,
52,
162,
54,
188,
133,
176,
168,
208,
58,
0,
15,
158,
12
] |
41
|
[
117,
225,
12,
238,
85,
85,
88,
65,
23,
51,
217,
251,
164,
179,
236,
161
] |
42
|
[
53,
173,
31,
0,
45,
144,
254,
145,
45,
73,
141,
18,
178,
178,
229,
236
] |
43
|
[
173,
86,
237,
151,
153,
44,
102,
195,
41,
244,
149,
162,
17,
33,
59,
53
] |
44
|
[
248,
33,
10,
66,
130,
20,
32,
220,
177,
223,
130,
91,
223,
67,
6,
149
] |
45
|
[
6,
2,
12,
0,
128,
184,
90,
52,
103,
238,
180,
101,
185,
100,
62,
62
] |
46
|
[
206,
143,
183,
30,
13,
190,
144,
102,
48,
166,
97,
161,
95,
57,
44,
5
] |
47
|
[
234,
78,
7,
0,
158,
231,
105,
235,
107,
20,
152,
31,
159,
4,
210,
138
] |
48
|
[
246,
20,
216,
157,
228,
213,
47,
216,
210,
173,
139,
236,
161,
6,
50,
51
] |
49
|
[
151,
209,
142,
145,
58,
209,
37,
129,
174,
123,
115,
158,
175,
45,
131,
99
] |
50
|
[
244,
107,
163,
159,
20,
156,
89,
30,
137,
51,
222,
177,
2,
194,
180,
61
] |
51
|
[
132,
230,
43,
30,
158,
62,
178,
143,
87,
78,
96,
135,
189,
149,
130,
143
] |
52
|
[
55,
177,
36,
188,
44,
88,
26,
151,
174,
204,
77,
197,
91,
224,
176,
50
] |
53
|
[
154,
132,
70,
185,
164,
246,
26,
187,
161,
251,
15,
198,
181,
108,
185,
21
] |
54
|
[
142,
170,
7,
77,
199,
247,
152,
49,
5,
94,
126,
84,
77,
87,
203,
23
] |
55
|
[
235,
169,
174,
177,
192,
20,
249,
194,
240,
198,
176,
5,
108,
152,
111,
174
] |
56
|
[
56,
128,
187,
136,
124,
2,
74,
116,
225,
85,
62,
130,
151,
166,
109,
139
] |
57
|
[
206,
34,
101,
41,
112,
118,
149,
26,
154,
10,
61,
23,
120,
181,
145,
135
] |
58
|
[
59,
104,
101,
155,
235,
4,
58,
114,
29,
228,
74,
123,
53,
184,
130,
169
] |
59
|
[
75,
119,
95,
229,
229,
222,
155,
247,
63,
164,
8,
203,
96,
30,
247,
39
] |
60
|
[
40,
78,
19,
6,
45,
59,
109,
73,
100,
135,
92,
19,
176,
90,
155,
119
] |
61
|
[
112,
116,
248,
64,
103,
73,
144,
40,
86,
1,
86,
246,
233,
24,
11,
193
] |
62
|
[
165,
174,
39,
157,
170,
42,
52,
184,
165,
3,
24,
228,
97,
232,
195,
163
] |
63
|
[
27,
91,
63,
33,
188,
12,
57,
222,
124,
41,
184,
117,
119,
109,
255,
29
] |
64
|
[
86,
250,
80,
158,
62,
57,
50,
134,
61,
173,
137,
34,
85,
60,
80,
77
] |
65
|
[
171,
139,
50,
87,
5,
147,
40,
12,
250,
178,
38,
111,
136,
45,
111,
39
] |
66
|
[
24,
8,
207,
79,
187,
67,
31,
52,
248,
48,
49,
34,
42,
93,
133,
97
] |
67
|
[
51,
6,
56,
63,
243,
214,
0,
37,
245,
130,
17,
23,
24,
62,
161,
41
] |
68
|
[
110,
147,
54,
221,
131,
47,
220,
3,
49,
188,
92,
234,
99,
172,
130,
87
] |
69
|
[
198,
159,
1,
45,
216,
91,
217,
27,
79,
21,
140,
102,
124,
61,
185,
217
] |
70
|
[
171,
203,
214,
88,
243,
66,
8,
77,
100,
184,
16,
203,
141,
180,
244,
155
] |
71
|
[
1,
148,
179,
213,
139,
213,
56,
253,
56,
46,
68,
186,
175,
100,
22,
201
] |
72
|
[
84,
141,
236,
199,
64,
30,
249,
45,
148,
17,
188,
198,
106,
115,
238,
235
] |
73
|
[
72,
247,
25,
187,
228,
176,
185,
133,
27,
42,
185,
227,
199,
231,
27,
66
] |
74
|
[
225,
183,
221,
79,
20,
35,
242,
36,
222,
58,
230,
215,
53,
103,
14,
63
] |
75
|
[
143,
129,
71,
136,
220,
111,
60,
62,
142,
80,
68,
79,
58,
161,
176,
111
] |
76
|
[
137,
181,
97,
163,
244,
13,
215,
68,
85,
104,
171,
132,
212,
41,
246,
109
] |
77
|
[
193,
221,
58,
60,
198,
155,
253,
87,
233,
4,
237,
211,
241,
75,
46,
158
] |
78
|
[
133,
72,
30,
234,
250,
76,
241,
56,
85,
205,
76,
132,
58,
175,
22,
212
] |
79
|
[
214,
252,
134,
173,
115,
252,
124,
221,
143,
15,
170,
231,
187,
113,
113,
198
] |
80
|
[
239,
81,
165,
160,
163,
7,
158,
181,
241,
210,
245,
173,
193,
252,
131,
77
] |
81
|
[
63,
44,
225,
36,
131,
247,
121,
113,
176,
249,
243,
8,
218,
200,
185,
50
] |
82
|
[
228,
27,
205,
187,
10,
118,
206,
96,
123,
107,
161,
15,
26,
3,
174,
124
] |
83
|
[
177,
97,
41,
11,
156,
237,
17,
18,
148,
244,
223,
49,
89,
161,
219,
164
] |
84
|
[
82,
209,
115,
183,
77,
144,
42,
204,
68,
26,
66,
79,
119,
156,
0,
40
] |
85
|
[
24,
56,
212,
24,
116,
205,
42,
69,
180,
187,
1,
8,
51,
50,
247,
253
] |
86
|
[
44,
24,
49,
114,
26,
40,
154,
136,
37,
101,
11,
196,
144,
11,
65,
239
] |
87
|
[
73,
244,
214,
229,
79,
100,
157,
194,
152,
1,
236,
227,
207,
46,
249,
235
] |
88
|
[
230,
231,
24,
79,
52,
63,
139,
111,
198,
79,
24,
137,
133,
138,
188,
16
] |
89
|
[
20,
29,
54,
53,
154,
171,
19,
96,
62,
255,
141,
114,
223,
252,
98,
218
] |
90
|
[
66,
238,
5,
42,
22,
231,
11,
5,
58,
201,
234,
251,
228,
91,
242,
187
] |
91
|
[
173,
148,
255,
145,
68,
181,
163,
114,
100,
40,
93,
170,
33,
84,
188,
9
] |
92
|
[
223,
189,
173,
72,
135,
182,
229,
195,
133,
152,
199,
124,
203,
157,
206,
167
] |
93
|
[
46,
92,
210,
172,
10,
98,
70,
95,
185,
203,
66,
3,
126,
66,
20,
33
] |
94
|
[
83,
193,
240,
62,
193,
254,
123,
187,
93,
58,
122,
39,
93,
59,
157,
196
] |
95
|
[
208,
157,
180,
194,
233,
26,
149,
104,
83,
1,
186,
80,
250,
101,
227,
11
] |
96
|
[
59,
72,
128,
73,
43,
183,
151,
109,
38,
237,
43,
230,
187,
206,
230,
131
] |
97
|
[
202,
144,
39,
26,
157,
222,
203,
76,
92,
35,
96,
29,
246,
127,
199,
87
] |
98
|
[
239,
173,
124,
108,
20,
13,
215,
219,
21,
18,
75,
126,
161,
160,
35,
34
] |
99
|
[
228,
205,
2,
174,
234,
8,
128,
67,
63,
182,
238,
233,
247,
186,
194,
50
] |
dataset1
Dataset Description
This is a synthetic side-channel analysis (SCA) dataset containing power consumption traces captured during AES-128 encryption operations. The dataset is structured similar to the ASCAD format and designed for deep learning-based side-channel attacks.
Dataset Details
- Number of Traces: 10,000
- Trace Length: 5,000 samples per trace
- Encryption Algorithm: AES-128
- Block Size: 16 bytes
- Data Type: Synthetic/Simulated Data
- Storage Format: Parquet (columnar, compressed with Snappy)
- Trace Chunk Size: 1000 traces per file
- Number of Trace Chunks: 10
- Total Dataset Size: ~191.0 MB (uncompressed)
What This Dataset Contains
This dataset captures the power consumption during 10,000 AES-128 encryption operations:
Power Traces (Large, Chunked Files):
- Files:
data-traces-00000-of-00010.parquetthroughdata-traces-00009-of-00010.parquet - Each trace represents the power consumption measured during one AES encryption
- 5,000 samples per trace (time-series data)
- Stored as int32 arrays (will be converted to float during analysis)
- Chunked for resilience: If download/upload fails, only failed chunks need to be retried
- Files:
Plaintext Metadata (Single Small File):
- File:
data-plaintexts.parquet(~156.2 KB) - Contains the 16-byte input data for each AES encryption
- Each plaintext is an array of 16 integers (0-255)
- Linked to traces via
trace_idxcolumn
- File:
Ciphertext Metadata (Single Small File):
- File:
data-ciphertexts.parquet(~156.2 KB) - Contains the 16-byte encrypted output for each AES encryption
- Each ciphertext is an array of 16 integers (0-255)
- Linked to traces via
trace_idxcolumn
- File:
Data Schema
Trace Files (data-traces-*-of-*.parquet):
| Column | Type | Description |
|---|---|---|
trace_idx |
int | Unique index (0 to 9999) linking to metadata |
trace |
list[int32] | Power consumption samples (5,000 values) |
Plaintexts File (data-plaintexts.parquet):
| Column | Type | Description |
|---|---|---|
trace_idx |
int | Index linking to corresponding trace |
plaintext |
list[uint8] | 16-byte AES plaintext input |
Ciphertexts File (data-ciphertexts.parquet):
| Column | Type | Description |
|---|---|---|
trace_idx |
int | Index linking to corresponding trace |
ciphertext |
list[uint8] | 16-byte AES ciphertext output |
Usage Examples
Basic Loading with HuggingFace Datasets
from datasets import load_dataset
import pyarrow.parquet as pq
import pyarrow.compute as pc
# Download dataset (automatically handles all chunks)
dataset = load_dataset("DLSCA/dataset1")
# Load and combine all data
trace_tables = [pq.read_table(f) for f in dataset.data_files['traces']]
traces_table = pa.concat_tables(trace_tables)
plaintexts_table = pq.read_table(dataset.data_files['plaintexts'])
ciphertexts_table = pq.read_table(dataset.data_files['ciphertexts'])
# Merge on trace_idx to get complete dataset using pyarrow
import pyarrow as pa
full_table = traces_table.join(plaintexts_table, 'trace_idx').join(ciphertexts_table, 'trace_idx')
print(f"Loaded {len(full_table):,} complete traces")
print(full_table.slice(0, 5)) # Show first 5 rows
Efficient Batch Processing
import pyarrow.parquet as pq
from pathlib import Path
import numpy as np
# Download dataset locally first
dataset = load_dataset("DLSCA/dataset1", cache_dir="./cache")
# Process traces in batches (memory-efficient)
trace_files = sorted(Path("./cache").glob("data-traces-*.parquet"))
plaintexts_table = pq.read_table("./cache/data-plaintexts.parquet")
ciphertexts_table = pq.read_table("./cache/data-ciphertexts.parquet")
for trace_file in trace_files:
# Load one chunk at a time
chunk_table = pq.read_table(trace_file)
# Join with metadata
chunk_full = chunk_table.join(plaintexts_table, 'trace_idx').join(ciphertexts_table, 'trace_idx')
# Process this batch
for i in range(len(chunk_full)):
trace = np.array(chunk_full['trace'][i].as_py(), dtype=np.float32)
plaintext = np.array(chunk_full['plaintext'][i].as_py(), dtype=np.uint8)
ciphertext = np.array(chunk_full['ciphertext'][i].as_py(), dtype=np.uint8)
# Your side-channel analysis here
# Example: Extract intermediate value for byte 0
# sbox_output = AES_SBOX[plaintext[0] ^ key_guess]
PyTorch DataLoader Integration
import torch
from torch.utils.data import Dataset, DataLoader
import pyarrow.parquet as pq
import pyarrow as pa
import numpy as np
class SCADataset(Dataset):
def __init__(self, traces_table, plaintexts_table, ciphertexts_table):
# Join all tables on trace_idx
self.data = traces_table.join(plaintexts_table, 'trace_idx').join(ciphertexts_table, 'trace_idx')
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
trace = torch.tensor(self.data['trace'][idx].as_py(), dtype=torch.float32)
plaintext = torch.tensor(self.data['plaintext'][idx].as_py(), dtype=torch.uint8)
ciphertext = torch.tensor(self.data['ciphertext'][idx].as_py(), dtype=torch.uint8)
return trace, plaintext, ciphertext
# Load dataset
dataset = load_dataset("DLSCA/dataset1")
trace_tables = [pq.read_table(f) for f in dataset['traces']]
traces_table = pa.concat_tables(trace_tables)
plaintexts_table = pq.read_table(dataset['plaintexts'])
ciphertexts_table = pq.read_table(dataset['ciphertexts'])
# Create PyTorch dataset and loader
sca_dataset = SCADataset(traces_table, plaintexts_table, ciphertexts_table)
dataloader = DataLoader(sca_dataset, batch_size=32, shuffle=True)
# Training loop
for traces, plaintexts, ciphertexts in dataloader:
# Your neural network training here
pass
Why Chunked Storage?
Problem: Power traces are large (e.g., 5,000 samples × 10,000 traces ≈ 191 MB uncompressed). If upload/download fails at 90%, you'd have to restart from 0%.
Solution:
- Traces are chunked into 10 files of ~1000 traces each
- Metadata stays together in single files (only ~312 KB combined)
- Resume capability: Failed uploads/downloads can continue from the last successful chunk
- Parallel processing: Can process chunks independently for distributed computing
All files are linked via trace_idx, so merging is straightforward with pyarrow.
Use Cases
This dataset is suitable for:
- Deep Learning-based Side-Channel Analysis (DLSCA)
- Profiling attacks on AES implementations
- Machine learning model training for power analysis
- Research on neural network architectures for SCA
- Educational purposes in cryptographic engineering
Dataset Creation
This dataset was automatically generated using the DLSCA platform for side-channel analysis research and development.
Generation Parameters:
- Chunk Size: 1000 traces per file
- Synthetic traces with simulated noise and periodic patterns
- Random plaintexts and ciphertexts (not real AES encryptions)
Citation
If you use this dataset in your research, please cite:
@dataset{dataset1,
title={dataset1: Synthetic Side-Channel Analysis Dataset},
author={DLSCA Platform},
year={2026},
publisher={HuggingFace},
url={https://huggingface.co/datasets/DLSCA/dataset1}
}
- Downloads last month
- -