blob: 4af360ffa35feb05d41b5ba9fa5aaa9c38fdd5d0 [file] [log] [blame]
Victor Hsiehdde17902021-02-26 12:35:31 -08001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17use libc::EIO;
18use std::io;
19
20use super::common::{build_fsverity_digest, merkle_tree_height, FsverityError};
21use super::sys::{FS_VERITY_HASH_ALG_SHA256, FS_VERITY_MAGIC};
22use crate::auth::Authenticator;
23use crate::common::{divide_roundup, CHUNK_SIZE};
24use crate::crypto::{CryptoError, Sha256Hasher};
Victor Hsieh09e26262021-03-03 16:00:55 -080025use crate::file::ReadOnlyDataByChunk;
Victor Hsiehdde17902021-02-26 12:35:31 -080026
27const ZEROS: [u8; CHUNK_SIZE as usize] = [0u8; CHUNK_SIZE as usize];
28
29// The size of `struct fsverity_formatted_digest` in Linux with SHA-256.
30const SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256: usize = 12 + Sha256Hasher::HASH_SIZE;
31
32type HashBuffer = [u8; Sha256Hasher::HASH_SIZE];
33
34fn hash_with_padding(chunk: &[u8], pad_to: usize) -> Result<HashBuffer, CryptoError> {
35 let padding_size = pad_to - chunk.len();
36 Sha256Hasher::new()?.update(&chunk)?.update(&ZEROS[..padding_size])?.finalize()
37}
38
39fn verity_check<T: ReadOnlyDataByChunk>(
40 chunk: &[u8],
41 chunk_index: u64,
42 file_size: u64,
43 merkle_tree: &T,
44) -> Result<HashBuffer, FsverityError> {
45 // The caller should not be able to produce a chunk at the first place if `file_size` is 0. The
46 // current implementation expects to crash when a `ReadOnlyDataByChunk` implementation reads
47 // beyond the file size, including empty file.
48 assert_ne!(file_size, 0);
49
50 let chunk_hash = hash_with_padding(&chunk, CHUNK_SIZE as usize)?;
51
52 fsverity_walk(chunk_index, file_size, merkle_tree)?.try_fold(
53 chunk_hash,
54 |actual_hash, result| {
55 let (merkle_chunk, hash_offset_in_chunk) = result?;
56 let expected_hash =
57 &merkle_chunk[hash_offset_in_chunk..hash_offset_in_chunk + Sha256Hasher::HASH_SIZE];
58 if actual_hash != expected_hash {
59 return Err(FsverityError::CannotVerify);
60 }
61 Ok(hash_with_padding(&merkle_chunk, CHUNK_SIZE as usize)?)
62 },
63 )
64}
65
66/// Given a chunk index and the size of the file, returns an iterator that walks the Merkle tree
67/// from the leaf to the root. The iterator carries the slice of the chunk/node as well as the
68/// offset of the child node's hash. It is up to the iterator user to use the node and hash,
69/// e.g. for the actual verification.
70#[allow(clippy::needless_collect)]
71fn fsverity_walk<T: ReadOnlyDataByChunk>(
72 chunk_index: u64,
73 file_size: u64,
74 merkle_tree: &T,
75) -> Result<impl Iterator<Item = Result<([u8; 4096], usize), FsverityError>> + '_, FsverityError> {
76 let hashes_per_node = CHUNK_SIZE / Sha256Hasher::HASH_SIZE as u64;
77 debug_assert_eq!(hashes_per_node, 128u64);
78 let max_level = merkle_tree_height(file_size).expect("file should not be empty") as u32;
79 let root_to_leaf_steps = (0..=max_level)
80 .rev()
81 .map(|x| {
82 let leaves_per_hash = hashes_per_node.pow(x);
83 let leaves_size_per_hash = CHUNK_SIZE * leaves_per_hash;
84 let leaves_size_per_node = leaves_size_per_hash * hashes_per_node;
85 let nodes_at_level = divide_roundup(file_size, leaves_size_per_node);
86 let level_size = nodes_at_level * CHUNK_SIZE;
87 let offset_in_level = (chunk_index / leaves_per_hash) * Sha256Hasher::HASH_SIZE as u64;
88 (level_size, offset_in_level)
89 })
90 .scan(0, |level_offset, (level_size, offset_in_level)| {
91 let this_level_offset = *level_offset;
92 *level_offset += level_size;
93 let global_hash_offset = this_level_offset + offset_in_level;
94 Some(global_hash_offset)
95 })
96 .map(|global_hash_offset| {
97 let chunk_index = global_hash_offset / CHUNK_SIZE;
98 let hash_offset_in_chunk = (global_hash_offset % CHUNK_SIZE) as usize;
99 (chunk_index, hash_offset_in_chunk)
100 })
101 .collect::<Vec<_>>(); // Needs to collect first to be able to reverse below.
102
103 Ok(root_to_leaf_steps.into_iter().rev().map(move |(chunk_index, hash_offset_in_chunk)| {
104 let mut merkle_chunk = [0u8; 4096];
105 // read_chunk is supposed to return a full chunk, or an incomplete one at the end of the
106 // file. In the incomplete case, the hash is calculated with 0-padding to the chunk size.
107 // Therefore, we don't need to check the returned size here.
108 let _ = merkle_tree.read_chunk(chunk_index, &mut merkle_chunk)?;
109 Ok((merkle_chunk, hash_offset_in_chunk))
110 }))
111}
112
113fn build_fsverity_formatted_digest(
114 root_hash: &HashBuffer,
115 file_size: u64,
116) -> Result<[u8; SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256], CryptoError> {
117 let digest = build_fsverity_digest(root_hash, file_size)?;
118 // Little-endian byte representation of fsverity_formatted_digest from linux/fsverity.h
119 // Not FFI-ed as it seems easier to deal with the raw bytes manually.
120 let mut formatted_digest = [0u8; SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256];
121 formatted_digest[0..8].copy_from_slice(FS_VERITY_MAGIC);
122 formatted_digest[8..10].copy_from_slice(&(FS_VERITY_HASH_ALG_SHA256 as u16).to_le_bytes());
123 formatted_digest[10..12].copy_from_slice(&(Sha256Hasher::HASH_SIZE as u16).to_le_bytes());
124 formatted_digest[12..].copy_from_slice(&digest);
125 Ok(formatted_digest)
126}
127
Victor Hsieh09e26262021-03-03 16:00:55 -0800128pub struct VerifiedFileReader<F: ReadOnlyDataByChunk, M: ReadOnlyDataByChunk> {
Victor Hsiehdde17902021-02-26 12:35:31 -0800129 chunked_file: F,
130 file_size: u64,
131 merkle_tree: M,
132 root_hash: HashBuffer,
133}
134
Victor Hsieh09e26262021-03-03 16:00:55 -0800135impl<F: ReadOnlyDataByChunk, M: ReadOnlyDataByChunk> VerifiedFileReader<F, M> {
Victor Hsiehdde17902021-02-26 12:35:31 -0800136 pub fn new<A: Authenticator>(
137 authenticator: &A,
138 chunked_file: F,
139 file_size: u64,
140 sig: Vec<u8>,
141 merkle_tree: M,
Victor Hsieh09e26262021-03-03 16:00:55 -0800142 ) -> Result<VerifiedFileReader<F, M>, FsverityError> {
Victor Hsiehdde17902021-02-26 12:35:31 -0800143 let mut buf = [0u8; CHUNK_SIZE as usize];
144 let size = merkle_tree.read_chunk(0, &mut buf)?;
145 if buf.len() != size {
146 return Err(FsverityError::InsufficientData(size));
147 }
148 let root_hash = Sha256Hasher::new()?.update(&buf[..])?.finalize()?;
149 let formatted_digest = build_fsverity_formatted_digest(&root_hash, file_size)?;
150 let valid = authenticator.verify(&sig, &formatted_digest)?;
151 if valid {
Victor Hsieh09e26262021-03-03 16:00:55 -0800152 Ok(VerifiedFileReader { chunked_file, file_size, merkle_tree, root_hash })
Victor Hsiehdde17902021-02-26 12:35:31 -0800153 } else {
154 Err(FsverityError::BadSignature)
155 }
156 }
157}
158
159impl<F: ReadOnlyDataByChunk, M: ReadOnlyDataByChunk> ReadOnlyDataByChunk
Victor Hsieh09e26262021-03-03 16:00:55 -0800160 for VerifiedFileReader<F, M>
Victor Hsiehdde17902021-02-26 12:35:31 -0800161{
162 fn read_chunk(&self, chunk_index: u64, buf: &mut [u8]) -> io::Result<usize> {
163 debug_assert!(buf.len() as u64 >= CHUNK_SIZE);
164 let size = self.chunked_file.read_chunk(chunk_index, buf)?;
165 let root_hash = verity_check(&buf[..size], chunk_index, self.file_size, &self.merkle_tree)
166 .map_err(|_| io::Error::from_raw_os_error(EIO))?;
167 if root_hash != self.root_hash {
168 Err(io::Error::from_raw_os_error(EIO))
169 } else {
170 Ok(size)
171 }
172 }
173}
174
175#[cfg(test)]
176mod tests {
177 use super::*;
178 use crate::auth::FakeAuthenticator;
Victor Hsieh09e26262021-03-03 16:00:55 -0800179 use crate::file::{LocalFileReader, ReadOnlyDataByChunk};
Victor Hsiehdde17902021-02-26 12:35:31 -0800180 use anyhow::Result;
Victor Hsieh85b4f732021-03-09 16:02:14 -0800181 use std::fs::{self, File};
Victor Hsiehdde17902021-02-26 12:35:31 -0800182 use std::io::Read;
183
Victor Hsieh09e26262021-03-03 16:00:55 -0800184 type LocalVerifiedFileReader = VerifiedFileReader<LocalFileReader, LocalFileReader>;
Victor Hsiehdde17902021-02-26 12:35:31 -0800185
186 fn total_chunk_number(file_size: u64) -> u64 {
187 (file_size + 4095) / 4096
188 }
189
190 // Returns a reader with fs-verity verification and the file size.
191 fn new_reader_with_fsverity(
192 content_path: &str,
193 merkle_tree_path: &str,
194 signature_path: &str,
Victor Hsieh09e26262021-03-03 16:00:55 -0800195 ) -> Result<(LocalVerifiedFileReader, u64)> {
196 let file_reader = LocalFileReader::new(File::open(content_path)?)?;
Victor Hsiehdde17902021-02-26 12:35:31 -0800197 let file_size = file_reader.len();
Victor Hsieh09e26262021-03-03 16:00:55 -0800198 let merkle_tree = LocalFileReader::new(File::open(merkle_tree_path)?)?;
Victor Hsiehdde17902021-02-26 12:35:31 -0800199 let mut sig = Vec::new();
200 let _ = File::open(signature_path)?.read_to_end(&mut sig)?;
201 let authenticator = FakeAuthenticator::always_succeed();
202 Ok((
Victor Hsieh09e26262021-03-03 16:00:55 -0800203 VerifiedFileReader::new(&authenticator, file_reader, file_size, sig, merkle_tree)?,
Victor Hsiehdde17902021-02-26 12:35:31 -0800204 file_size,
205 ))
206 }
207
208 #[test]
209 fn fsverity_verify_full_read_4k() -> Result<()> {
210 let (file_reader, file_size) = new_reader_with_fsverity(
211 "testdata/input.4k",
212 "testdata/input.4k.merkle_dump",
213 "testdata/input.4k.fsv_sig",
214 )?;
215
216 for i in 0..total_chunk_number(file_size) {
217 let mut buf = [0u8; 4096];
218 assert!(file_reader.read_chunk(i, &mut buf[..]).is_ok());
219 }
220 Ok(())
221 }
222
223 #[test]
224 fn fsverity_verify_full_read_4k1() -> Result<()> {
225 let (file_reader, file_size) = new_reader_with_fsverity(
226 "testdata/input.4k1",
227 "testdata/input.4k1.merkle_dump",
228 "testdata/input.4k1.fsv_sig",
229 )?;
230
231 for i in 0..total_chunk_number(file_size) {
232 let mut buf = [0u8; 4096];
233 assert!(file_reader.read_chunk(i, &mut buf[..]).is_ok());
234 }
235 Ok(())
236 }
237
238 #[test]
239 fn fsverity_verify_full_read_4m() -> Result<()> {
240 let (file_reader, file_size) = new_reader_with_fsverity(
241 "testdata/input.4m",
242 "testdata/input.4m.merkle_dump",
243 "testdata/input.4m.fsv_sig",
244 )?;
245
246 for i in 0..total_chunk_number(file_size) {
247 let mut buf = [0u8; 4096];
248 assert!(file_reader.read_chunk(i, &mut buf[..]).is_ok());
249 }
250 Ok(())
251 }
252
253 #[test]
254 fn fsverity_verify_bad_merkle_tree() -> Result<()> {
255 let (file_reader, _) = new_reader_with_fsverity(
256 "testdata/input.4m",
257 "testdata/input.4m.merkle_dump.bad", // First leaf node is corrupted.
258 "testdata/input.4m.fsv_sig",
259 )?;
260
261 // A lowest broken node (a 4K chunk that contains 128 sha256 hashes) will fail the read
262 // failure of the underlying chunks, but not before or after.
263 let mut buf = [0u8; 4096];
264 let num_hashes = 4096 / 32;
265 let last_index = num_hashes;
266 for i in 0..last_index {
267 assert!(file_reader.read_chunk(i, &mut buf[..]).is_err());
268 }
269 assert!(file_reader.read_chunk(last_index, &mut buf[..]).is_ok());
270 Ok(())
271 }
272
273 #[test]
274 fn invalid_signature() -> Result<()> {
275 let authenticator = FakeAuthenticator::always_fail();
Victor Hsieh09e26262021-03-03 16:00:55 -0800276 let file_reader = LocalFileReader::new(File::open("testdata/input.4m")?)?;
Victor Hsiehdde17902021-02-26 12:35:31 -0800277 let file_size = file_reader.len();
Victor Hsieh09e26262021-03-03 16:00:55 -0800278 let merkle_tree = LocalFileReader::new(File::open("testdata/input.4m.merkle_dump")?)?;
Victor Hsieh85b4f732021-03-09 16:02:14 -0800279 let sig = fs::read("testdata/input.4m.fsv_sig")?;
Victor Hsieh09e26262021-03-03 16:00:55 -0800280 assert!(VerifiedFileReader::new(&authenticator, file_reader, file_size, sig, merkle_tree)
281 .is_err());
Victor Hsiehdde17902021-02-26 12:35:31 -0800282 Ok(())
283 }
284}