blob: 29e15d497a0e9f77091085023cbc434c933e38ea [file] [log] [blame]
Victor Hsiehec184562020-11-06 13:50:31 -08001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Victor Hsiehf77c5722020-11-13 14:30:05 -080017use libc::EIO;
Victor Hsiehec184562020-11-06 13:50:31 -080018use std::io;
19use thiserror::Error;
20
Victor Hsiehf77c5722020-11-13 14:30:05 -080021use crate::auth::Authenticator;
Victor Hsieh88ac6ca2020-11-13 15:20:24 -080022use crate::common::divide_roundup;
Victor Hsiehec184562020-11-06 13:50:31 -080023use crate::crypto::{CryptoError, Sha256Hasher};
24use crate::reader::ReadOnlyDataByChunk;
25
26const ZEROS: [u8; 4096] = [0u8; 4096];
27
Victor Hsiehf77c5722020-11-13 14:30:05 -080028// The size of `struct fsverity_formatted_digest` in Linux with SHA-256.
29const SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256: usize = 12 + Sha256Hasher::HASH_SIZE;
30
Victor Hsiehec184562020-11-06 13:50:31 -080031#[derive(Error, Debug)]
32pub enum FsverityError {
Victor Hsiehf77c5722020-11-13 14:30:05 -080033 #[error("Cannot verify a signature")]
34 BadSignature,
35 #[error("Insufficient data, only got {0}")]
36 InsufficientData(usize),
Victor Hsiehec184562020-11-06 13:50:31 -080037 #[error("Cannot verify a block")]
38 CannotVerify,
39 #[error("I/O error")]
40 Io(#[from] io::Error),
41 #[error("Crypto")]
42 UnexpectedCryptoError(#[from] CryptoError),
43}
44
45type HashBuffer = [u8; Sha256Hasher::HASH_SIZE];
46
Victor Hsiehec184562020-11-06 13:50:31 -080047fn hash_with_padding(chunk: &[u8], pad_to: usize) -> Result<HashBuffer, CryptoError> {
48 let padding_size = pad_to - chunk.len();
49 Sha256Hasher::new()?.update(&chunk)?.update(&ZEROS[..padding_size])?.finalize()
50}
51
Victor Hsiehec184562020-11-06 13:50:31 -080052fn verity_check<T: ReadOnlyDataByChunk>(
53 chunk: &[u8],
54 chunk_index: u64,
55 file_size: u64,
56 merkle_tree: &T,
57) -> Result<HashBuffer, FsverityError> {
58 // The caller should not be able to produce a chunk at the first place if `file_size` is 0. The
59 // current implementation expects to crash when a `ReadOnlyDataByChunk` implementation reads
60 // beyone the file size, including empty file.
61 assert_ne!(file_size, 0);
62
63 let chunk_hash = hash_with_padding(&chunk, T::CHUNK_SIZE as usize)?;
64
65 fsverity_walk(chunk_index, file_size, merkle_tree)?.try_fold(
66 chunk_hash,
67 |actual_hash, result| {
68 let (merkle_chunk, hash_offset_in_chunk) = result?;
69 let expected_hash =
70 &merkle_chunk[hash_offset_in_chunk..hash_offset_in_chunk + Sha256Hasher::HASH_SIZE];
71 if actual_hash != expected_hash {
72 return Err(FsverityError::CannotVerify);
73 }
74 Ok(hash_with_padding(&merkle_chunk, T::CHUNK_SIZE as usize)?)
75 },
76 )
77}
78
79fn log128_ceil(num: u64) -> Option<u64> {
80 match num {
81 0 => None,
82 n => Some(divide_roundup(64 - (n - 1).leading_zeros() as u64, 7)),
83 }
84}
85
86/// Given a chunk index and the size of the file, returns an iterator that walks the Merkle tree
87/// from the leaf to the root. The iterator carries the slice of the chunk/node as well as the
88/// offset of the child node's hash. It is up to the iterator user to use the node and hash,
89/// e.g. for the actual verification.
90#[allow(clippy::needless_collect)]
ThiƩbaud Weksteen3f5f2362021-01-12 11:37:51 +010091fn fsverity_walk<T: ReadOnlyDataByChunk>(
Victor Hsiehec184562020-11-06 13:50:31 -080092 chunk_index: u64,
93 file_size: u64,
ThiƩbaud Weksteen3f5f2362021-01-12 11:37:51 +010094 merkle_tree: &T,
95) -> Result<impl Iterator<Item = Result<([u8; 4096], usize), FsverityError>> + '_, FsverityError> {
Victor Hsiehec184562020-11-06 13:50:31 -080096 let hashes_per_node = T::CHUNK_SIZE / Sha256Hasher::HASH_SIZE as u64;
97 let hash_pages = divide_roundup(file_size, hashes_per_node * T::CHUNK_SIZE);
98 debug_assert_eq!(hashes_per_node, 128u64);
99 let max_level = log128_ceil(hash_pages).expect("file should not be empty") as u32;
100 let root_to_leaf_steps = (0..=max_level)
101 .rev()
102 .map(|x| {
103 let leaves_per_hash = hashes_per_node.pow(x);
104 let leaves_size_per_hash = T::CHUNK_SIZE * leaves_per_hash;
105 let leaves_size_per_node = leaves_size_per_hash * hashes_per_node;
106 let nodes_at_level = divide_roundup(file_size, leaves_size_per_node);
107 let level_size = nodes_at_level * T::CHUNK_SIZE;
108 let offset_in_level = (chunk_index / leaves_per_hash) * Sha256Hasher::HASH_SIZE as u64;
109 (level_size, offset_in_level)
110 })
111 .scan(0, |level_offset, (level_size, offset_in_level)| {
112 let this_level_offset = *level_offset;
113 *level_offset += level_size;
114 let global_hash_offset = this_level_offset + offset_in_level;
115 Some(global_hash_offset)
116 })
117 .map(|global_hash_offset| {
118 let chunk_index = global_hash_offset / T::CHUNK_SIZE;
119 let hash_offset_in_chunk = (global_hash_offset % T::CHUNK_SIZE) as usize;
120 (chunk_index, hash_offset_in_chunk)
121 })
122 .collect::<Vec<_>>();
123
124 Ok(root_to_leaf_steps.into_iter().rev().map(move |(chunk_index, hash_offset_in_chunk)| {
125 let mut merkle_chunk = [0u8; 4096];
126 let _ = merkle_tree.read_chunk(chunk_index, &mut merkle_chunk)?;
127 Ok((merkle_chunk, hash_offset_in_chunk))
128 }))
129}
130
Victor Hsiehf77c5722020-11-13 14:30:05 -0800131fn build_fsverity_formatted_digest(
132 root_hash: &HashBuffer,
133 file_size: u64,
134) -> Result<[u8; SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256], CryptoError> {
135 let desc_hash = Sha256Hasher::new()?
136 .update(&1u8.to_le_bytes())? // version
137 .update(&1u8.to_le_bytes())? // hash_algorithm
138 .update(&12u8.to_le_bytes())? // log_blocksize
139 .update(&0u8.to_le_bytes())? // salt_size
140 .update(&0u32.to_le_bytes())? // sig_size
141 .update(&file_size.to_le_bytes())? // data_size
142 .update(root_hash)? // root_hash, first 32 bytes
143 .update(&[0u8; 32])? // root_hash, last 32 bytes
144 .update(&[0u8; 32])? // salt
145 .update(&[0u8; 32])? // reserved
146 .update(&[0u8; 32])? // reserved
147 .update(&[0u8; 32])? // reserved
148 .update(&[0u8; 32])? // reserved
149 .update(&[0u8; 16])? // reserved
150 .finalize()?;
151
152 let mut fsverity_digest = [0u8; SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256];
153 fsverity_digest[0..8].copy_from_slice(b"FSVerity");
154 fsverity_digest[8..10].copy_from_slice(&1u16.to_le_bytes());
155 fsverity_digest[10..12].copy_from_slice(&32u16.to_le_bytes());
156 fsverity_digest[12..].copy_from_slice(&desc_hash);
157 Ok(fsverity_digest)
158}
159
160pub struct FsverityChunkedFileReader<F: ReadOnlyDataByChunk, M: ReadOnlyDataByChunk> {
161 chunked_file: F,
162 file_size: u64,
163 merkle_tree: M,
164 root_hash: HashBuffer,
165}
166
167impl<F: ReadOnlyDataByChunk, M: ReadOnlyDataByChunk> FsverityChunkedFileReader<F, M> {
Victor Hsiehf77c5722020-11-13 14:30:05 -0800168 pub fn new<A: Authenticator>(
169 authenticator: &A,
170 chunked_file: F,
171 file_size: u64,
172 sig: Vec<u8>,
173 merkle_tree: M,
174 ) -> Result<FsverityChunkedFileReader<F, M>, FsverityError> {
175 // TODO(victorhsieh): Use generic constant directly once supported. No need to assert
176 // afterward.
177 let mut buf = [0u8; 4096];
178 assert_eq!(buf.len() as u64, M::CHUNK_SIZE);
179 let size = merkle_tree.read_chunk(0, &mut buf)?;
180 if buf.len() != size {
181 return Err(FsverityError::InsufficientData(size));
182 }
183 let root_hash = Sha256Hasher::new()?.update(&buf[..])?.finalize()?;
184 let fsverity_digest = build_fsverity_formatted_digest(&root_hash, file_size)?;
185 let valid = authenticator.verify(&sig, &fsverity_digest)?;
186 if valid {
187 Ok(FsverityChunkedFileReader { chunked_file, file_size, merkle_tree, root_hash })
188 } else {
189 Err(FsverityError::BadSignature)
190 }
191 }
192}
193
194impl<F: ReadOnlyDataByChunk, M: ReadOnlyDataByChunk> ReadOnlyDataByChunk
195 for FsverityChunkedFileReader<F, M>
196{
197 fn read_chunk(&self, chunk_index: u64, buf: &mut [u8]) -> io::Result<usize> {
198 debug_assert!(buf.len() as u64 >= Self::CHUNK_SIZE);
199 let size = self.chunked_file.read_chunk(chunk_index, buf)?;
200 let root_hash = verity_check(&buf[..size], chunk_index, self.file_size, &self.merkle_tree)
201 .map_err(|_| io::Error::from_raw_os_error(EIO))?;
202 if root_hash != self.root_hash {
203 Err(io::Error::from_raw_os_error(EIO))
204 } else {
205 Ok(size)
206 }
207 }
208}
209
Victor Hsiehec184562020-11-06 13:50:31 -0800210#[cfg(test)]
211mod tests {
212 use super::*;
Victor Hsiehf77c5722020-11-13 14:30:05 -0800213 use crate::auth::FakeAuthenticator;
Victor Hsiehec184562020-11-06 13:50:31 -0800214 use crate::reader::ReadOnlyDataByChunk;
215 use anyhow::Result;
216
217 fn total_chunk_number(file_size: u64) -> u64 {
218 (file_size + 4095) / 4096
219 }
220
221 #[test]
222 fn fsverity_verify_full_read_4k() -> Result<()> {
223 let file = &include_bytes!("../testdata/input.4k")[..];
224 let merkle_tree = &include_bytes!("../testdata/input.4k.merkle_dump")[..];
Victor Hsiehf77c5722020-11-13 14:30:05 -0800225 let sig = include_bytes!("../testdata/input.4k.fsv_sig").to_vec();
226 let authenticator = FakeAuthenticator::always_succeed();
227 let verified_file = FsverityChunkedFileReader::new(
228 &authenticator,
229 file,
230 file.len() as u64,
231 sig,
232 merkle_tree,
233 )?;
Victor Hsiehec184562020-11-06 13:50:31 -0800234
235 for i in 0..total_chunk_number(file.len() as u64) {
Victor Hsiehf77c5722020-11-13 14:30:05 -0800236 let mut buf = [0u8; 4096];
237 assert!(verified_file.read_chunk(i, &mut buf[..]).is_ok());
Victor Hsiehec184562020-11-06 13:50:31 -0800238 }
239 Ok(())
240 }
241
242 #[test]
243 fn fsverity_verify_full_read_4k1() -> Result<()> {
244 let file = &include_bytes!("../testdata/input.4k1")[..];
245 let merkle_tree = &include_bytes!("../testdata/input.4k1.merkle_dump")[..];
Victor Hsiehf77c5722020-11-13 14:30:05 -0800246 let sig = include_bytes!("../testdata/input.4k1.fsv_sig").to_vec();
247 let authenticator = FakeAuthenticator::always_succeed();
248 let verified_file = FsverityChunkedFileReader::new(
249 &authenticator,
250 file,
251 file.len() as u64,
252 sig,
253 merkle_tree,
254 )?;
Victor Hsiehec184562020-11-06 13:50:31 -0800255
Victor Hsiehec184562020-11-06 13:50:31 -0800256 for i in 0..total_chunk_number(file.len() as u64) {
Victor Hsiehf77c5722020-11-13 14:30:05 -0800257 let mut buf = [0u8; 4096];
258 assert!(verified_file.read_chunk(i, &mut buf[..]).is_ok());
Victor Hsiehec184562020-11-06 13:50:31 -0800259 }
260 Ok(())
261 }
262
263 #[test]
264 fn fsverity_verify_full_read_4m() -> Result<()> {
265 let file = &include_bytes!("../testdata/input.4m")[..];
266 let merkle_tree = &include_bytes!("../testdata/input.4m.merkle_dump")[..];
Victor Hsiehf77c5722020-11-13 14:30:05 -0800267 let sig = include_bytes!("../testdata/input.4m.fsv_sig").to_vec();
268 let authenticator = FakeAuthenticator::always_succeed();
269 let verified_file = FsverityChunkedFileReader::new(
270 &authenticator,
271 file,
272 file.len() as u64,
273 sig,
274 merkle_tree,
275 )?;
Victor Hsiehec184562020-11-06 13:50:31 -0800276
Victor Hsiehec184562020-11-06 13:50:31 -0800277 for i in 0..total_chunk_number(file.len() as u64) {
Victor Hsiehf77c5722020-11-13 14:30:05 -0800278 let mut buf = [0u8; 4096];
279 assert!(verified_file.read_chunk(i, &mut buf[..]).is_ok());
Victor Hsiehec184562020-11-06 13:50:31 -0800280 }
281 Ok(())
282 }
283
284 #[test]
285 fn fsverity_verify_bad_merkle_tree() -> Result<()> {
286 let file = &include_bytes!("../testdata/input.4m")[..];
287 // First leaf node is corrupted.
288 let merkle_tree = &include_bytes!("../testdata/input.4m.merkle_dump.bad")[..];
Victor Hsiehf77c5722020-11-13 14:30:05 -0800289 let sig = include_bytes!("../testdata/input.4m.fsv_sig").to_vec();
290 let authenticator = FakeAuthenticator::always_succeed();
291 let verified_file = FsverityChunkedFileReader::new(
292 &authenticator,
293 file,
294 file.len() as u64,
295 sig,
296 merkle_tree,
297 )?;
Victor Hsiehec184562020-11-06 13:50:31 -0800298
299 // A lowest broken node (a 4K chunk that contains 128 sha256 hashes) will fail the read
300 // failure of the underlying chunks, but not before or after.
301 let mut buf = [0u8; 4096];
302 let num_hashes = 4096 / 32;
303 let last_index = num_hashes;
304 for i in 0..last_index {
Victor Hsiehf77c5722020-11-13 14:30:05 -0800305 assert!(verified_file.read_chunk(i, &mut buf[..]).is_err());
Victor Hsiehec184562020-11-06 13:50:31 -0800306 }
Victor Hsiehf77c5722020-11-13 14:30:05 -0800307 assert!(verified_file.read_chunk(last_index, &mut buf[..]).is_ok());
308 Ok(())
309 }
310
311 #[test]
312 fn invalid_signature() -> Result<()> {
313 let authenticator = FakeAuthenticator::always_fail();
314 let file = &include_bytes!("../testdata/input.4m")[..];
315 let merkle_tree = &include_bytes!("../testdata/input.4m.merkle_dump")[..];
316 let sig = include_bytes!("../testdata/input.4m.fsv_sig").to_vec();
317 assert!(FsverityChunkedFileReader::new(
318 &authenticator,
319 file,
320 file.len() as u64,
321 sig,
322 merkle_tree
323 )
324 .is_err());
Victor Hsiehec184562020-11-06 13:50:31 -0800325 Ok(())
326 }
327}