55#include <linux/crypto.h>
66#include <crypto/internal/aead.h>
77#include <crypto/internal/cipher.h>
8- #include <crypto/internal/hash.h>
98#include <crypto/internal/skcipher.h>
109#include <crypto/aes.h>
1110#include <crypto/sha1.h>
1211#include <crypto/sha2.h>
13- #include <crypto/hmac.h>
1412#include <crypto/algapi.h>
1513#include <crypto/authenc.h>
1614#include <crypto/scatterwalk.h>
@@ -68,16 +66,10 @@ struct qat_alg_aead_ctx {
6866 dma_addr_t dec_cd_paddr ;
6967 struct icp_qat_fw_la_bulk_req enc_fw_req ;
7068 struct icp_qat_fw_la_bulk_req dec_fw_req ;
71- struct crypto_shash * hash_tfm ;
7269 enum icp_qat_hw_auth_algo qat_hash_alg ;
70+ unsigned int hash_digestsize ;
71+ unsigned int hash_blocksize ;
7372 struct qat_crypto_instance * inst ;
74- union {
75- struct sha1_state sha1 ;
76- struct sha256_state sha256 ;
77- struct sha512_state sha512 ;
78- };
79- char ipad [SHA512_BLOCK_SIZE ]; /* sufficient for SHA-1/SHA-256 as well */
80- char opad [SHA512_BLOCK_SIZE ];
8173};
8274
8375struct qat_alg_skcipher_ctx {
@@ -94,125 +86,57 @@ struct qat_alg_skcipher_ctx {
9486 int mode ;
9587};
9688
97- static int qat_get_inter_state_size (enum icp_qat_hw_auth_algo qat_hash_alg )
98- {
99- switch (qat_hash_alg ) {
100- case ICP_QAT_HW_AUTH_ALGO_SHA1 :
101- return ICP_QAT_HW_SHA1_STATE1_SZ ;
102- case ICP_QAT_HW_AUTH_ALGO_SHA256 :
103- return ICP_QAT_HW_SHA256_STATE1_SZ ;
104- case ICP_QAT_HW_AUTH_ALGO_SHA512 :
105- return ICP_QAT_HW_SHA512_STATE1_SZ ;
106- default :
107- return - EFAULT ;
108- }
109- }
110-
11189static int qat_alg_do_precomputes (struct icp_qat_hw_auth_algo_blk * hash ,
11290 struct qat_alg_aead_ctx * ctx ,
11391 const u8 * auth_key ,
11492 unsigned int auth_keylen )
11593{
116- SHASH_DESC_ON_STACK (shash , ctx -> hash_tfm );
117- int block_size = crypto_shash_blocksize (ctx -> hash_tfm );
118- int digest_size = crypto_shash_digestsize (ctx -> hash_tfm );
119- __be32 * hash_state_out ;
120- __be64 * hash512_state_out ;
121- int i , offset ;
122-
123- memset (ctx -> ipad , 0 , block_size );
124- memset (ctx -> opad , 0 , block_size );
125- shash -> tfm = ctx -> hash_tfm ;
126-
127- if (auth_keylen > block_size ) {
128- int ret = crypto_shash_digest (shash , auth_key ,
129- auth_keylen , ctx -> ipad );
130- if (ret )
131- return ret ;
132-
133- memcpy (ctx -> opad , ctx -> ipad , digest_size );
134- } else {
135- memcpy (ctx -> ipad , auth_key , auth_keylen );
136- memcpy (ctx -> opad , auth_key , auth_keylen );
94+ switch (ctx -> qat_hash_alg ) {
95+ case ICP_QAT_HW_AUTH_ALGO_SHA1 : {
96+ struct hmac_sha1_key key ;
97+ __be32 * istate = (__be32 * )hash -> sha .state1 ;
98+ __be32 * ostate = (__be32 * )(hash -> sha .state1 +
99+ round_up (sizeof (key .istate .h ), 8 ));
100+
101+ hmac_sha1_preparekey (& key , auth_key , auth_keylen );
102+ for (int i = 0 ; i < ARRAY_SIZE (key .istate .h ); i ++ ) {
103+ istate [i ] = cpu_to_be32 (key .istate .h [i ]);
104+ ostate [i ] = cpu_to_be32 (key .ostate .h [i ]);
105+ }
106+ memzero_explicit (& key , sizeof (key ));
107+ return 0 ;
137108 }
138-
139- for (i = 0 ; i < block_size ; i ++ ) {
140- char * ipad_ptr = ctx -> ipad + i ;
141- char * opad_ptr = ctx -> opad + i ;
142- * ipad_ptr ^= HMAC_IPAD_VALUE ;
143- * opad_ptr ^= HMAC_OPAD_VALUE ;
109+ case ICP_QAT_HW_AUTH_ALGO_SHA256 : {
110+ struct hmac_sha256_key key ;
111+ __be32 * istate = (__be32 * )hash -> sha .state1 ;
112+ __be32 * ostate = (__be32 * )(hash -> sha .state1 +
113+ sizeof (key .key .istate .h ));
114+
115+ hmac_sha256_preparekey (& key , auth_key , auth_keylen );
116+ for (int i = 0 ; i < ARRAY_SIZE (key .key .istate .h ); i ++ ) {
117+ istate [i ] = cpu_to_be32 (key .key .istate .h [i ]);
118+ ostate [i ] = cpu_to_be32 (key .key .ostate .h [i ]);
119+ }
120+ memzero_explicit (& key , sizeof (key ));
121+ return 0 ;
144122 }
145-
146- if (crypto_shash_init (shash ))
147- return - EFAULT ;
148-
149- if (crypto_shash_update (shash , ctx -> ipad , block_size ))
150- return - EFAULT ;
151-
152- hash_state_out = (__be32 * )hash -> sha .state1 ;
153- hash512_state_out = (__be64 * )hash_state_out ;
154-
155- switch (ctx -> qat_hash_alg ) {
156- case ICP_QAT_HW_AUTH_ALGO_SHA1 :
157- if (crypto_shash_export_core (shash , & ctx -> sha1 ))
158- return - EFAULT ;
159- for (i = 0 ; i < digest_size >> 2 ; i ++ , hash_state_out ++ )
160- * hash_state_out = cpu_to_be32 (ctx -> sha1 .state [i ]);
161- break ;
162- case ICP_QAT_HW_AUTH_ALGO_SHA256 :
163- if (crypto_shash_export_core (shash , & ctx -> sha256 ))
164- return - EFAULT ;
165- for (i = 0 ; i < digest_size >> 2 ; i ++ , hash_state_out ++ )
166- * hash_state_out = cpu_to_be32 (ctx -> sha256 .state [i ]);
167- break ;
168- case ICP_QAT_HW_AUTH_ALGO_SHA512 :
169- if (crypto_shash_export_core (shash , & ctx -> sha512 ))
170- return - EFAULT ;
171- for (i = 0 ; i < digest_size >> 3 ; i ++ , hash512_state_out ++ )
172- * hash512_state_out = cpu_to_be64 (ctx -> sha512 .state [i ]);
173- break ;
174- default :
175- return - EFAULT ;
123+ case ICP_QAT_HW_AUTH_ALGO_SHA512 : {
124+ struct hmac_sha512_key key ;
125+ __be64 * istate = (__be64 * )hash -> sha .state1 ;
126+ __be64 * ostate = (__be64 * )(hash -> sha .state1 +
127+ sizeof (key .key .istate .h ));
128+
129+ hmac_sha512_preparekey (& key , auth_key , auth_keylen );
130+ for (int i = 0 ; i < ARRAY_SIZE (key .key .istate .h ); i ++ ) {
131+ istate [i ] = cpu_to_be64 (key .key .istate .h [i ]);
132+ ostate [i ] = cpu_to_be64 (key .key .ostate .h [i ]);
133+ }
134+ memzero_explicit (& key , sizeof (key ));
135+ return 0 ;
176136 }
177-
178- if (crypto_shash_init (shash ))
179- return - EFAULT ;
180-
181- if (crypto_shash_update (shash , ctx -> opad , block_size ))
182- return - EFAULT ;
183-
184- offset = round_up (qat_get_inter_state_size (ctx -> qat_hash_alg ), 8 );
185- if (offset < 0 )
186- return - EFAULT ;
187-
188- hash_state_out = (__be32 * )(hash -> sha .state1 + offset );
189- hash512_state_out = (__be64 * )hash_state_out ;
190-
191- switch (ctx -> qat_hash_alg ) {
192- case ICP_QAT_HW_AUTH_ALGO_SHA1 :
193- if (crypto_shash_export_core (shash , & ctx -> sha1 ))
194- return - EFAULT ;
195- for (i = 0 ; i < digest_size >> 2 ; i ++ , hash_state_out ++ )
196- * hash_state_out = cpu_to_be32 (ctx -> sha1 .state [i ]);
197- break ;
198- case ICP_QAT_HW_AUTH_ALGO_SHA256 :
199- if (crypto_shash_export_core (shash , & ctx -> sha256 ))
200- return - EFAULT ;
201- for (i = 0 ; i < digest_size >> 2 ; i ++ , hash_state_out ++ )
202- * hash_state_out = cpu_to_be32 (ctx -> sha256 .state [i ]);
203- break ;
204- case ICP_QAT_HW_AUTH_ALGO_SHA512 :
205- if (crypto_shash_export_core (shash , & ctx -> sha512 ))
206- return - EFAULT ;
207- for (i = 0 ; i < digest_size >> 3 ; i ++ , hash512_state_out ++ )
208- * hash512_state_out = cpu_to_be64 (ctx -> sha512 .state [i ]);
209- break ;
210137 default :
211138 return - EFAULT ;
212139 }
213- memzero_explicit (ctx -> ipad , block_size );
214- memzero_explicit (ctx -> opad , block_size );
215- return 0 ;
216140}
217141
218142static void qat_alg_init_common_hdr (struct icp_qat_fw_comn_req_hdr * header )
@@ -259,7 +183,7 @@ static int qat_alg_aead_init_enc_session(struct crypto_aead *aead_tfm,
259183 ICP_QAT_HW_AUTH_CONFIG_BUILD (ICP_QAT_HW_AUTH_MODE1 ,
260184 ctx -> qat_hash_alg , digestsize );
261185 hash -> sha .inner_setup .auth_counter .counter =
262- cpu_to_be32 (crypto_shash_blocksize ( ctx -> hash_tfm ) );
186+ cpu_to_be32 (ctx -> hash_blocksize );
263187
264188 if (qat_alg_do_precomputes (hash , ctx , keys -> authkey , keys -> authkeylen ))
265189 return - EFAULT ;
@@ -326,7 +250,7 @@ static int qat_alg_aead_init_dec_session(struct crypto_aead *aead_tfm,
326250 struct icp_qat_hw_cipher_algo_blk * cipher =
327251 (struct icp_qat_hw_cipher_algo_blk * )((char * )dec_ctx +
328252 sizeof (struct icp_qat_hw_auth_setup ) +
329- roundup (crypto_shash_digestsize ( ctx -> hash_tfm ) , 8 ) * 2 );
253+ roundup (ctx -> hash_digestsize , 8 ) * 2 );
330254 struct icp_qat_fw_la_bulk_req * req_tmpl = & ctx -> dec_fw_req ;
331255 struct icp_qat_fw_comn_req_hdr_cd_pars * cd_pars = & req_tmpl -> cd_pars ;
332256 struct icp_qat_fw_comn_req_hdr * header = & req_tmpl -> comn_hdr ;
@@ -346,7 +270,7 @@ static int qat_alg_aead_init_dec_session(struct crypto_aead *aead_tfm,
346270 ctx -> qat_hash_alg ,
347271 digestsize );
348272 hash -> sha .inner_setup .auth_counter .counter =
349- cpu_to_be32 (crypto_shash_blocksize ( ctx -> hash_tfm ) );
273+ cpu_to_be32 (ctx -> hash_blocksize );
350274
351275 if (qat_alg_do_precomputes (hash , ctx , keys -> authkey , keys -> authkeylen ))
352276 return - EFAULT ;
@@ -368,7 +292,7 @@ static int qat_alg_aead_init_dec_session(struct crypto_aead *aead_tfm,
368292 cipher_cd_ctrl -> cipher_state_sz = AES_BLOCK_SIZE >> 3 ;
369293 cipher_cd_ctrl -> cipher_cfg_offset =
370294 (sizeof (struct icp_qat_hw_auth_setup ) +
371- roundup (crypto_shash_digestsize ( ctx -> hash_tfm ) , 8 ) * 2 ) >> 3 ;
295+ roundup (ctx -> hash_digestsize , 8 ) * 2 ) >> 3 ;
372296 ICP_QAT_FW_COMN_CURR_ID_SET (cipher_cd_ctrl , ICP_QAT_FW_SLICE_CIPHER );
373297 ICP_QAT_FW_COMN_NEXT_ID_SET (cipher_cd_ctrl , ICP_QAT_FW_SLICE_DRAM_WR );
374298
@@ -1150,32 +1074,35 @@ static int qat_alg_skcipher_xts_decrypt(struct skcipher_request *req)
11501074}
11511075
11521076static int qat_alg_aead_init (struct crypto_aead * tfm ,
1153- enum icp_qat_hw_auth_algo hash ,
1154- const char * hash_name )
1077+ enum icp_qat_hw_auth_algo hash_alg ,
1078+ unsigned int hash_digestsize ,
1079+ unsigned int hash_blocksize )
11551080{
11561081 struct qat_alg_aead_ctx * ctx = crypto_aead_ctx (tfm );
11571082
1158- ctx -> hash_tfm = crypto_alloc_shash (hash_name , 0 , 0 );
1159- if (IS_ERR (ctx -> hash_tfm ))
1160- return PTR_ERR (ctx -> hash_tfm );
1161- ctx -> qat_hash_alg = hash ;
1083+ ctx -> qat_hash_alg = hash_alg ;
1084+ ctx -> hash_digestsize = hash_digestsize ;
1085+ ctx -> hash_blocksize = hash_blocksize ;
11621086 crypto_aead_set_reqsize (tfm , sizeof (struct qat_crypto_request ));
11631087 return 0 ;
11641088}
11651089
11661090static int qat_alg_aead_sha1_init (struct crypto_aead * tfm )
11671091{
1168- return qat_alg_aead_init (tfm , ICP_QAT_HW_AUTH_ALGO_SHA1 , "sha1" );
1092+ return qat_alg_aead_init (tfm , ICP_QAT_HW_AUTH_ALGO_SHA1 ,
1093+ SHA1_DIGEST_SIZE , SHA1_BLOCK_SIZE );
11691094}
11701095
11711096static int qat_alg_aead_sha256_init (struct crypto_aead * tfm )
11721097{
1173- return qat_alg_aead_init (tfm , ICP_QAT_HW_AUTH_ALGO_SHA256 , "sha256" );
1098+ return qat_alg_aead_init (tfm , ICP_QAT_HW_AUTH_ALGO_SHA256 ,
1099+ SHA256_DIGEST_SIZE , SHA256_BLOCK_SIZE );
11741100}
11751101
11761102static int qat_alg_aead_sha512_init (struct crypto_aead * tfm )
11771103{
1178- return qat_alg_aead_init (tfm , ICP_QAT_HW_AUTH_ALGO_SHA512 , "sha512" );
1104+ return qat_alg_aead_init (tfm , ICP_QAT_HW_AUTH_ALGO_SHA512 ,
1105+ SHA512_DIGEST_SIZE , SHA512_BLOCK_SIZE );
11791106}
11801107
11811108static void qat_alg_aead_exit (struct crypto_aead * tfm )
@@ -1184,8 +1111,6 @@ static void qat_alg_aead_exit(struct crypto_aead *tfm)
11841111 struct qat_crypto_instance * inst = ctx -> inst ;
11851112 struct device * dev ;
11861113
1187- crypto_free_shash (ctx -> hash_tfm );
1188-
11891114 if (!inst )
11901115 return ;
11911116
0 commit comments