[go: up one dir, main page]

aes/
lib.rs

1//! Pure Rust implementation of the [Advanced Encryption Standard][AES]
2//! (AES, a.k.a. Rijndael).
3//!
4//! # ⚠️ Security Warning: Hazmat!
5//!
6//! This crate implements only the low-level block cipher function, and is intended
7//! for use for implementing higher-level constructions *only*. It is NOT
8//! intended for direct use in applications.
9//!
10//! USE AT YOUR OWN RISK!
11//!
12//! # Supported backends
13//! This crate provides multiple backends including a portable pure Rust
14//! backend as well as ones based on CPU intrinsics.
15//!
16//! By default, it performs runtime detection of CPU intrinsics and uses them
17//! if they are available.
18//!
19//! ## "soft" portable backend
20//! As a baseline implementation, this crate provides a constant-time pure Rust
21//! implementation based on [fixslicing], a more advanced form of bitslicing
22//! implemented entirely in terms of bitwise arithmetic with no use of any
23//! lookup tables or data-dependent branches.
24//!
25//! Enabling the `aes_compact` configuration flag will reduce the code size of this
26//! backend at the cost of decreased performance (using a modified form of
27//! the fixslicing technique called "semi-fixslicing").
28//!
29//! ## ARMv8 intrinsics (Rust 1.61+)
30//! On `aarch64` targets including `aarch64-apple-darwin` (Apple M1) and Linux
31//! targets such as `aarch64-unknown-linux-gnu` and `aarch64-unknown-linux-musl`,
32//! support for using AES intrinsics provided by the ARMv8 Cryptography Extensions.
33//!
34//! On Linux and macOS, support for ARMv8 AES intrinsics is autodetected at
35//! runtime. On other platforms the `aes` target feature must be enabled via
36//! RUSTFLAGS.
37//!
38//! ## `x86`/`x86_64` intrinsics (AES-NI)
39//! By default this crate uses runtime detection on `i686`/`x86_64` targets
40//! in order to determine if AES-NI is available, and if it is not, it will
41//! fallback to using a constant-time software implementation.
42//!
43//! Passing `RUSTFLAGS=-C target-feature=+aes,+ssse3` explicitly at compile-time
44//! will override runtime detection and ensure that AES-NI is always used.
45//! Programs built in this manner will crash with an illegal instruction on
46//! CPUs which do not have AES-NI enabled.
47//!
48//! Note: runtime detection is not possible on SGX targets. Please use the
49//! afforementioned `RUSTFLAGS` to leverage AES-NI on these targets.
50//!
51//! # Examples
52//! ```
53//! use aes::Aes128;
54//! use aes::cipher::{Array, BlockCipherEncrypt, BlockCipherDecrypt, KeyInit};
55//!
56//! let key = Array::from([0u8; 16]);
57//! let mut block = Array::from([42u8; 16]);
58//!
59//! // Initialize cipher
60//! let cipher = Aes128::new(&key);
61//!
62//! let block_copy = block.clone();
63//!
64//! // Encrypt block in-place
65//! cipher.encrypt_block(&mut block);
66//!
67//! // And decrypt it back
68//! cipher.decrypt_block(&mut block);
69//! assert_eq!(block, block_copy);
70//!
71//! // Implementation supports parallel block processing. Number of blocks
72//! // processed in parallel depends in general on hardware capabilities.
73//! // This is achieved by instruction-level parallelism (ILP) on a single
74//! // CPU core, which is differen from multi-threaded parallelism.
75//! let mut blocks = [block; 100];
76//! cipher.encrypt_blocks(&mut blocks);
77//!
78//! for block in blocks.iter_mut() {
79//!     cipher.decrypt_block(block);
80//!     assert_eq!(block, &block_copy);
81//! }
82//!
83//! // `decrypt_blocks` also supports parallel block processing.
84//! cipher.decrypt_blocks(&mut blocks);
85//!
86//! for block in blocks.iter_mut() {
87//!     cipher.encrypt_block(block);
88//!     assert_eq!(block, &block_copy);
89//! }
90//! ```
91//!
92//! For implementation of block cipher modes of operation see
93//! [`block-modes`] repository.
94//!
95//! # Configuration Flags
96//!
97//! You can modify crate using the following configuration flags:
98//!
99//! - `aes_force_soft`: force software implementation.
100//! - `aes_compact`: reduce code size at the cost of slower performance
101//!   (affects only software backend).
102//!
103//! It can be enabled using `RUSTFLAGS` environmental variable
104//! (e.g. `RUSTFLAGS="--cfg aes_compact"`) or by modifying `.cargo/config`.
105//!
106//! [AES]: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard
107//! [fixslicing]: https://eprint.iacr.org/2020/1123.pdf
108//! [AES-NI]: https://en.wikipedia.org/wiki/AES_instruction_set
109//! [`block-modes`]: https://github.com/RustCrypto/block-modes/
110
111#![no_std]
112#![doc(
113    html_logo_url = "https://raw.githubusercontent.com/RustCrypto/media/26acc39f/logo.svg",
114    html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/media/26acc39f/logo.svg"
115)]
116#![cfg_attr(docsrs, feature(doc_auto_cfg))]
117#![warn(missing_docs, rust_2018_idioms)]
118
119#[cfg(feature = "hazmat")]
120pub mod hazmat;
121
122#[macro_use]
123mod macros;
124mod soft;
125
126use cfg_if::cfg_if;
127
128cfg_if! {
129    if #[cfg(all(target_arch = "aarch64", not(aes_force_soft)))] {
130        mod armv8;
131        mod autodetect;
132        pub use autodetect::*;
133    } else if #[cfg(all(
134        any(target_arch = "x86", target_arch = "x86_64"),
135        not(aes_force_soft)
136    ))] {
137        mod autodetect;
138        mod ni;
139        pub use autodetect::*;
140    } else {
141        pub use soft::*;
142    }
143}
144
145pub use cipher;
146use cipher::{array::Array, consts::U16, crypto_common::WeakKeyError};
147
148/// 128-bit AES block
149pub type Block = Array<u8, U16>;
150
151/// Check if any bit of the upper half of the key is set.
152///
153/// This follows the interpretation laid out in section `11.4.10.4 Reject of weak keys`
154/// from the [TPM specification][0]:
155/// ```text
156/// In the case of AES, at least one bit in the upper half of the key must be set
157/// ```
158///
159/// [0]: https://trustedcomputinggroup.org/wp-content/uploads/TPM-2.0-1.83-Part-1-Architecture.pdf#page=82
160pub(crate) fn weak_key_test<const N: usize>(key: &[u8; N]) -> Result<(), WeakKeyError> {
161    let t = match N {
162        16 => u64::from_ne_bytes(key[..8].try_into().unwrap()),
163        24 => {
164            let t1 = u64::from_ne_bytes(key[..8].try_into().unwrap());
165            let t2 = u32::from_ne_bytes(key[8..12].try_into().unwrap());
166            t1 | u64::from(t2)
167        }
168        32 => {
169            let t1 = u64::from_ne_bytes(key[..8].try_into().unwrap());
170            let t2 = u64::from_ne_bytes(key[8..16].try_into().unwrap());
171            t1 | t2
172        }
173        _ => unreachable!(),
174    };
175    match t {
176        0 => Err(WeakKeyError),
177        _ => Ok(()),
178    }
179}
180
181#[cfg(test)]
182mod tests {
183    #[cfg(feature = "zeroize")]
184    #[test]
185    fn zeroize_works() {
186        use super::soft;
187
188        fn test_for<T: zeroize::ZeroizeOnDrop>(val: T) {
189            use core::mem::{ManuallyDrop, size_of};
190
191            let mut val = ManuallyDrop::new(val);
192            let ptr = &val as *const _ as *const u8;
193            let len = size_of::<ManuallyDrop<T>>();
194
195            unsafe { ManuallyDrop::drop(&mut val) };
196
197            let slice = unsafe { core::slice::from_raw_parts(ptr, len) };
198
199            assert!(slice.iter().all(|&byte| byte == 0));
200        }
201
202        let key_128 = [42; 16].into();
203        let key_192 = [42; 24].into();
204        let key_256 = [42; 32].into();
205
206        use cipher::KeyInit as _;
207        test_for(soft::Aes128::new(&key_128));
208        test_for(soft::Aes128Enc::new(&key_128));
209        test_for(soft::Aes128Dec::new(&key_128));
210        test_for(soft::Aes192::new(&key_192));
211        test_for(soft::Aes192Enc::new(&key_192));
212        test_for(soft::Aes192Dec::new(&key_192));
213        test_for(soft::Aes256::new(&key_256));
214        test_for(soft::Aes256Enc::new(&key_256));
215        test_for(soft::Aes256Dec::new(&key_256));
216
217        #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), not(aes_force_soft)))]
218        {
219            use super::ni;
220
221            cpufeatures::new!(aes_intrinsics, "aes");
222            if aes_intrinsics::get() {
223                test_for(ni::Aes128::new(&key_128));
224                test_for(ni::Aes128Enc::new(&key_128));
225                test_for(ni::Aes128Dec::new(&key_128));
226                test_for(ni::Aes192::new(&key_192));
227                test_for(ni::Aes192Enc::new(&key_192));
228                test_for(ni::Aes192Dec::new(&key_192));
229                test_for(ni::Aes256::new(&key_256));
230                test_for(ni::Aes256Enc::new(&key_256));
231                test_for(ni::Aes256Dec::new(&key_256));
232            }
233        }
234
235        #[cfg(all(target_arch = "aarch64", not(aes_force_soft)))]
236        {
237            use super::armv8;
238
239            cpufeatures::new!(aes_intrinsics, "aes");
240            if aes_intrinsics::get() {
241                test_for(armv8::Aes128::new(&key_128));
242                test_for(armv8::Aes128Enc::new(&key_128));
243                test_for(armv8::Aes128Dec::new(&key_128));
244                test_for(armv8::Aes192::new(&key_192));
245                test_for(armv8::Aes192Enc::new(&key_192));
246                test_for(armv8::Aes192Dec::new(&key_192));
247                test_for(armv8::Aes256::new(&key_256));
248                test_for(armv8::Aes256Enc::new(&key_256));
249                test_for(armv8::Aes256Dec::new(&key_256));
250            }
251        }
252    }
253}