blob: c9070ba4a54df428f5fbd429887726bd2bca1404 [file] [log] [blame]
Victor Hsiehec184562020-11-06 13:50:31 -08001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Victor Hsiehf77c5722020-11-13 14:30:05 -080017use libc::EIO;
Victor Hsiehec184562020-11-06 13:50:31 -080018use std::io;
19use thiserror::Error;
20
Victor Hsiehf77c5722020-11-13 14:30:05 -080021use crate::auth::Authenticator;
Victor Hsiehec184562020-11-06 13:50:31 -080022use crate::crypto::{CryptoError, Sha256Hasher};
23use crate::reader::ReadOnlyDataByChunk;
24
25const ZEROS: [u8; 4096] = [0u8; 4096];
26
Victor Hsiehf77c5722020-11-13 14:30:05 -080027// The size of `struct fsverity_formatted_digest` in Linux with SHA-256.
28const SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256: usize = 12 + Sha256Hasher::HASH_SIZE;
29
Victor Hsiehec184562020-11-06 13:50:31 -080030#[derive(Error, Debug)]
31pub enum FsverityError {
Victor Hsiehf77c5722020-11-13 14:30:05 -080032 #[error("Cannot verify a signature")]
33 BadSignature,
34 #[error("Insufficient data, only got {0}")]
35 InsufficientData(usize),
Victor Hsiehec184562020-11-06 13:50:31 -080036 #[error("Cannot verify a block")]
37 CannotVerify,
38 #[error("I/O error")]
39 Io(#[from] io::Error),
40 #[error("Crypto")]
41 UnexpectedCryptoError(#[from] CryptoError),
42}
43
44type HashBuffer = [u8; Sha256Hasher::HASH_SIZE];
45
46fn divide_roundup(dividend: u64, divisor: u64) -> u64 {
47 (dividend + divisor - 1) / divisor
48}
49
50fn hash_with_padding(chunk: &[u8], pad_to: usize) -> Result<HashBuffer, CryptoError> {
51 let padding_size = pad_to - chunk.len();
52 Sha256Hasher::new()?.update(&chunk)?.update(&ZEROS[..padding_size])?.finalize()
53}
54
Victor Hsiehec184562020-11-06 13:50:31 -080055fn verity_check<T: ReadOnlyDataByChunk>(
56 chunk: &[u8],
57 chunk_index: u64,
58 file_size: u64,
59 merkle_tree: &T,
60) -> Result<HashBuffer, FsverityError> {
61 // The caller should not be able to produce a chunk at the first place if `file_size` is 0. The
62 // current implementation expects to crash when a `ReadOnlyDataByChunk` implementation reads
63 // beyone the file size, including empty file.
64 assert_ne!(file_size, 0);
65
66 let chunk_hash = hash_with_padding(&chunk, T::CHUNK_SIZE as usize)?;
67
68 fsverity_walk(chunk_index, file_size, merkle_tree)?.try_fold(
69 chunk_hash,
70 |actual_hash, result| {
71 let (merkle_chunk, hash_offset_in_chunk) = result?;
72 let expected_hash =
73 &merkle_chunk[hash_offset_in_chunk..hash_offset_in_chunk + Sha256Hasher::HASH_SIZE];
74 if actual_hash != expected_hash {
75 return Err(FsverityError::CannotVerify);
76 }
77 Ok(hash_with_padding(&merkle_chunk, T::CHUNK_SIZE as usize)?)
78 },
79 )
80}
81
82fn log128_ceil(num: u64) -> Option<u64> {
83 match num {
84 0 => None,
85 n => Some(divide_roundup(64 - (n - 1).leading_zeros() as u64, 7)),
86 }
87}
88
89/// Given a chunk index and the size of the file, returns an iterator that walks the Merkle tree
90/// from the leaf to the root. The iterator carries the slice of the chunk/node as well as the
91/// offset of the child node's hash. It is up to the iterator user to use the node and hash,
92/// e.g. for the actual verification.
93#[allow(clippy::needless_collect)]
94fn fsverity_walk<'a, T: ReadOnlyDataByChunk>(
95 chunk_index: u64,
96 file_size: u64,
97 merkle_tree: &'a T,
98) -> Result<impl Iterator<Item = Result<([u8; 4096], usize), FsverityError>> + 'a, FsverityError> {
99 let hashes_per_node = T::CHUNK_SIZE / Sha256Hasher::HASH_SIZE as u64;
100 let hash_pages = divide_roundup(file_size, hashes_per_node * T::CHUNK_SIZE);
101 debug_assert_eq!(hashes_per_node, 128u64);
102 let max_level = log128_ceil(hash_pages).expect("file should not be empty") as u32;
103 let root_to_leaf_steps = (0..=max_level)
104 .rev()
105 .map(|x| {
106 let leaves_per_hash = hashes_per_node.pow(x);
107 let leaves_size_per_hash = T::CHUNK_SIZE * leaves_per_hash;
108 let leaves_size_per_node = leaves_size_per_hash * hashes_per_node;
109 let nodes_at_level = divide_roundup(file_size, leaves_size_per_node);
110 let level_size = nodes_at_level * T::CHUNK_SIZE;
111 let offset_in_level = (chunk_index / leaves_per_hash) * Sha256Hasher::HASH_SIZE as u64;
112 (level_size, offset_in_level)
113 })
114 .scan(0, |level_offset, (level_size, offset_in_level)| {
115 let this_level_offset = *level_offset;
116 *level_offset += level_size;
117 let global_hash_offset = this_level_offset + offset_in_level;
118 Some(global_hash_offset)
119 })
120 .map(|global_hash_offset| {
121 let chunk_index = global_hash_offset / T::CHUNK_SIZE;
122 let hash_offset_in_chunk = (global_hash_offset % T::CHUNK_SIZE) as usize;
123 (chunk_index, hash_offset_in_chunk)
124 })
125 .collect::<Vec<_>>();
126
127 Ok(root_to_leaf_steps.into_iter().rev().map(move |(chunk_index, hash_offset_in_chunk)| {
128 let mut merkle_chunk = [0u8; 4096];
129 let _ = merkle_tree.read_chunk(chunk_index, &mut merkle_chunk)?;
130 Ok((merkle_chunk, hash_offset_in_chunk))
131 }))
132}
133
Victor Hsiehf77c5722020-11-13 14:30:05 -0800134fn build_fsverity_formatted_digest(
135 root_hash: &HashBuffer,
136 file_size: u64,
137) -> Result<[u8; SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256], CryptoError> {
138 let desc_hash = Sha256Hasher::new()?
139 .update(&1u8.to_le_bytes())? // version
140 .update(&1u8.to_le_bytes())? // hash_algorithm
141 .update(&12u8.to_le_bytes())? // log_blocksize
142 .update(&0u8.to_le_bytes())? // salt_size
143 .update(&0u32.to_le_bytes())? // sig_size
144 .update(&file_size.to_le_bytes())? // data_size
145 .update(root_hash)? // root_hash, first 32 bytes
146 .update(&[0u8; 32])? // root_hash, last 32 bytes
147 .update(&[0u8; 32])? // salt
148 .update(&[0u8; 32])? // reserved
149 .update(&[0u8; 32])? // reserved
150 .update(&[0u8; 32])? // reserved
151 .update(&[0u8; 32])? // reserved
152 .update(&[0u8; 16])? // reserved
153 .finalize()?;
154
155 let mut fsverity_digest = [0u8; SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256];
156 fsverity_digest[0..8].copy_from_slice(b"FSVerity");
157 fsverity_digest[8..10].copy_from_slice(&1u16.to_le_bytes());
158 fsverity_digest[10..12].copy_from_slice(&32u16.to_le_bytes());
159 fsverity_digest[12..].copy_from_slice(&desc_hash);
160 Ok(fsverity_digest)
161}
162
163pub struct FsverityChunkedFileReader<F: ReadOnlyDataByChunk, M: ReadOnlyDataByChunk> {
164 chunked_file: F,
165 file_size: u64,
166 merkle_tree: M,
167 root_hash: HashBuffer,
168}
169
170impl<F: ReadOnlyDataByChunk, M: ReadOnlyDataByChunk> FsverityChunkedFileReader<F, M> {
171 #[allow(dead_code)]
172 pub fn new<A: Authenticator>(
173 authenticator: &A,
174 chunked_file: F,
175 file_size: u64,
176 sig: Vec<u8>,
177 merkle_tree: M,
178 ) -> Result<FsverityChunkedFileReader<F, M>, FsverityError> {
179 // TODO(victorhsieh): Use generic constant directly once supported. No need to assert
180 // afterward.
181 let mut buf = [0u8; 4096];
182 assert_eq!(buf.len() as u64, M::CHUNK_SIZE);
183 let size = merkle_tree.read_chunk(0, &mut buf)?;
184 if buf.len() != size {
185 return Err(FsverityError::InsufficientData(size));
186 }
187 let root_hash = Sha256Hasher::new()?.update(&buf[..])?.finalize()?;
188 let fsverity_digest = build_fsverity_formatted_digest(&root_hash, file_size)?;
189 let valid = authenticator.verify(&sig, &fsverity_digest)?;
190 if valid {
191 Ok(FsverityChunkedFileReader { chunked_file, file_size, merkle_tree, root_hash })
192 } else {
193 Err(FsverityError::BadSignature)
194 }
195 }
196}
197
198impl<F: ReadOnlyDataByChunk, M: ReadOnlyDataByChunk> ReadOnlyDataByChunk
199 for FsverityChunkedFileReader<F, M>
200{
201 fn read_chunk(&self, chunk_index: u64, buf: &mut [u8]) -> io::Result<usize> {
202 debug_assert!(buf.len() as u64 >= Self::CHUNK_SIZE);
203 let size = self.chunked_file.read_chunk(chunk_index, buf)?;
204 let root_hash = verity_check(&buf[..size], chunk_index, self.file_size, &self.merkle_tree)
205 .map_err(|_| io::Error::from_raw_os_error(EIO))?;
206 if root_hash != self.root_hash {
207 Err(io::Error::from_raw_os_error(EIO))
208 } else {
209 Ok(size)
210 }
211 }
212}
213
Victor Hsiehec184562020-11-06 13:50:31 -0800214#[cfg(test)]
215mod tests {
216 use super::*;
Victor Hsiehf77c5722020-11-13 14:30:05 -0800217 use crate::auth::FakeAuthenticator;
Victor Hsiehec184562020-11-06 13:50:31 -0800218 use crate::reader::ReadOnlyDataByChunk;
219 use anyhow::Result;
220
221 fn total_chunk_number(file_size: u64) -> u64 {
222 (file_size + 4095) / 4096
223 }
224
225 #[test]
226 fn fsverity_verify_full_read_4k() -> Result<()> {
227 let file = &include_bytes!("../testdata/input.4k")[..];
228 let merkle_tree = &include_bytes!("../testdata/input.4k.merkle_dump")[..];
Victor Hsiehf77c5722020-11-13 14:30:05 -0800229 let sig = include_bytes!("../testdata/input.4k.fsv_sig").to_vec();
230 let authenticator = FakeAuthenticator::always_succeed();
231 let verified_file = FsverityChunkedFileReader::new(
232 &authenticator,
233 file,
234 file.len() as u64,
235 sig,
236 merkle_tree,
237 )?;
Victor Hsiehec184562020-11-06 13:50:31 -0800238
239 for i in 0..total_chunk_number(file.len() as u64) {
Victor Hsiehf77c5722020-11-13 14:30:05 -0800240 let mut buf = [0u8; 4096];
241 assert!(verified_file.read_chunk(i, &mut buf[..]).is_ok());
Victor Hsiehec184562020-11-06 13:50:31 -0800242 }
243 Ok(())
244 }
245
246 #[test]
247 fn fsverity_verify_full_read_4k1() -> Result<()> {
248 let file = &include_bytes!("../testdata/input.4k1")[..];
249 let merkle_tree = &include_bytes!("../testdata/input.4k1.merkle_dump")[..];
Victor Hsiehf77c5722020-11-13 14:30:05 -0800250 let sig = include_bytes!("../testdata/input.4k1.fsv_sig").to_vec();
251 let authenticator = FakeAuthenticator::always_succeed();
252 let verified_file = FsverityChunkedFileReader::new(
253 &authenticator,
254 file,
255 file.len() as u64,
256 sig,
257 merkle_tree,
258 )?;
Victor Hsiehec184562020-11-06 13:50:31 -0800259
Victor Hsiehec184562020-11-06 13:50:31 -0800260 for i in 0..total_chunk_number(file.len() as u64) {
Victor Hsiehf77c5722020-11-13 14:30:05 -0800261 let mut buf = [0u8; 4096];
262 assert!(verified_file.read_chunk(i, &mut buf[..]).is_ok());
Victor Hsiehec184562020-11-06 13:50:31 -0800263 }
264 Ok(())
265 }
266
267 #[test]
268 fn fsverity_verify_full_read_4m() -> Result<()> {
269 let file = &include_bytes!("../testdata/input.4m")[..];
270 let merkle_tree = &include_bytes!("../testdata/input.4m.merkle_dump")[..];
Victor Hsiehf77c5722020-11-13 14:30:05 -0800271 let sig = include_bytes!("../testdata/input.4m.fsv_sig").to_vec();
272 let authenticator = FakeAuthenticator::always_succeed();
273 let verified_file = FsverityChunkedFileReader::new(
274 &authenticator,
275 file,
276 file.len() as u64,
277 sig,
278 merkle_tree,
279 )?;
Victor Hsiehec184562020-11-06 13:50:31 -0800280
Victor Hsiehec184562020-11-06 13:50:31 -0800281 for i in 0..total_chunk_number(file.len() as u64) {
Victor Hsiehf77c5722020-11-13 14:30:05 -0800282 let mut buf = [0u8; 4096];
283 assert!(verified_file.read_chunk(i, &mut buf[..]).is_ok());
Victor Hsiehec184562020-11-06 13:50:31 -0800284 }
285 Ok(())
286 }
287
288 #[test]
289 fn fsverity_verify_bad_merkle_tree() -> Result<()> {
290 let file = &include_bytes!("../testdata/input.4m")[..];
291 // First leaf node is corrupted.
292 let merkle_tree = &include_bytes!("../testdata/input.4m.merkle_dump.bad")[..];
Victor Hsiehf77c5722020-11-13 14:30:05 -0800293 let sig = include_bytes!("../testdata/input.4m.fsv_sig").to_vec();
294 let authenticator = FakeAuthenticator::always_succeed();
295 let verified_file = FsverityChunkedFileReader::new(
296 &authenticator,
297 file,
298 file.len() as u64,
299 sig,
300 merkle_tree,
301 )?;
Victor Hsiehec184562020-11-06 13:50:31 -0800302
303 // A lowest broken node (a 4K chunk that contains 128 sha256 hashes) will fail the read
304 // failure of the underlying chunks, but not before or after.
305 let mut buf = [0u8; 4096];
306 let num_hashes = 4096 / 32;
307 let last_index = num_hashes;
308 for i in 0..last_index {
Victor Hsiehf77c5722020-11-13 14:30:05 -0800309 assert!(verified_file.read_chunk(i, &mut buf[..]).is_err());
Victor Hsiehec184562020-11-06 13:50:31 -0800310 }
Victor Hsiehf77c5722020-11-13 14:30:05 -0800311 assert!(verified_file.read_chunk(last_index, &mut buf[..]).is_ok());
312 Ok(())
313 }
314
315 #[test]
316 fn invalid_signature() -> Result<()> {
317 let authenticator = FakeAuthenticator::always_fail();
318 let file = &include_bytes!("../testdata/input.4m")[..];
319 let merkle_tree = &include_bytes!("../testdata/input.4m.merkle_dump")[..];
320 let sig = include_bytes!("../testdata/input.4m.fsv_sig").to_vec();
321 assert!(FsverityChunkedFileReader::new(
322 &authenticator,
323 file,
324 file.len() as u64,
325 sig,
326 merkle_tree
327 )
328 .is_err());
Victor Hsiehec184562020-11-06 13:50:31 -0800329 Ok(())
330 }
331}