Merge remote-tracking branch 'lsk/v3.10/topic/gator' into linux-linaro-lsk
[firefly-linux-kernel-4.4.55.git] / arch / arm64 / crypto / sha2-ce-glue.c
1 /*
2  * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
3  *
4  * Copyright (C) 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10
11 #include <asm/neon.h>
12 #include <asm/unaligned.h>
13 #include <crypto/internal/hash.h>
14 #include <crypto/sha.h>
15 #include <linux/cpufeature.h>
16 #include <linux/crypto.h>
17 #include <linux/module.h>
18
19 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
22
23 asmlinkage int sha2_ce_transform(int blocks, u8 const *src, u32 *state,
24                                  u8 *head, long bytes);
25
26 static int sha224_init(struct shash_desc *desc)
27 {
28         struct sha256_state *sctx = shash_desc_ctx(desc);
29
30         *sctx = (struct sha256_state){
31                 .state = {
32                         SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
33                         SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
34                 }
35         };
36         return 0;
37 }
38
39 static int sha256_init(struct shash_desc *desc)
40 {
41         struct sha256_state *sctx = shash_desc_ctx(desc);
42
43         *sctx = (struct sha256_state){
44                 .state = {
45                         SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
46                         SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
47                 }
48         };
49         return 0;
50 }
51
52 static int sha2_update(struct shash_desc *desc, const u8 *data,
53                        unsigned int len)
54 {
55         struct sha256_state *sctx = shash_desc_ctx(desc);
56         unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
57
58         sctx->count += len;
59
60         if ((partial + len) >= SHA256_BLOCK_SIZE) {
61                 int blocks;
62
63                 if (partial) {
64                         int p = SHA256_BLOCK_SIZE - partial;
65
66                         memcpy(sctx->buf + partial, data, p);
67                         data += p;
68                         len -= p;
69                 }
70
71                 blocks = len / SHA256_BLOCK_SIZE;
72                 len %= SHA256_BLOCK_SIZE;
73
74                 kernel_neon_begin_partial(28);
75                 sha2_ce_transform(blocks, data, sctx->state,
76                                   partial ? sctx->buf : NULL, 0);
77                 kernel_neon_end();
78
79                 data += blocks * SHA256_BLOCK_SIZE;
80                 partial = 0;
81         }
82         if (len)
83                 memcpy(sctx->buf + partial, data, len);
84         return 0;
85 }
86
87 static void sha2_final(struct shash_desc *desc)
88 {
89         static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, };
90
91         struct sha256_state *sctx = shash_desc_ctx(desc);
92         __be64 bits = cpu_to_be64(sctx->count << 3);
93         u32 padlen = SHA256_BLOCK_SIZE
94                      - ((sctx->count + sizeof(bits)) % SHA256_BLOCK_SIZE);
95
96         sha2_update(desc, padding, padlen);
97         sha2_update(desc, (const u8 *)&bits, sizeof(bits));
98 }
99
100 static int sha224_final(struct shash_desc *desc, u8 *out)
101 {
102         struct sha256_state *sctx = shash_desc_ctx(desc);
103         __be32 *dst = (__be32 *)out;
104         int i;
105
106         sha2_final(desc);
107
108         for (i = 0; i < SHA224_DIGEST_SIZE / sizeof(__be32); i++)
109                 put_unaligned_be32(sctx->state[i], dst++);
110
111         *sctx = (struct sha256_state){};
112         return 0;
113 }
114
115 static int sha256_final(struct shash_desc *desc, u8 *out)
116 {
117         struct sha256_state *sctx = shash_desc_ctx(desc);
118         __be32 *dst = (__be32 *)out;
119         int i;
120
121         sha2_final(desc);
122
123         for (i = 0; i < SHA256_DIGEST_SIZE / sizeof(__be32); i++)
124                 put_unaligned_be32(sctx->state[i], dst++);
125
126         *sctx = (struct sha256_state){};
127         return 0;
128 }
129
130 static void sha2_finup(struct shash_desc *desc, const u8 *data,
131                        unsigned int len)
132 {
133         struct sha256_state *sctx = shash_desc_ctx(desc);
134         int blocks;
135
136         if (sctx->count || !len || (len % SHA256_BLOCK_SIZE)) {
137                 sha2_update(desc, data, len);
138                 sha2_final(desc);
139                 return;
140         }
141
142         /*
143          * Use a fast path if the input is a multiple of 64 bytes. In
144          * this case, there is no need to copy data around, and we can
145          * perform the entire digest calculation in a single invocation
146          * of sha2_ce_transform()
147          */
148         blocks = len / SHA256_BLOCK_SIZE;
149
150         kernel_neon_begin_partial(28);
151         sha2_ce_transform(blocks, data, sctx->state, NULL, len);
152         kernel_neon_end();
153         data += blocks * SHA256_BLOCK_SIZE;
154 }
155
156 static int sha224_finup(struct shash_desc *desc, const u8 *data,
157                         unsigned int len, u8 *out)
158 {
159         struct sha256_state *sctx = shash_desc_ctx(desc);
160         __be32 *dst = (__be32 *)out;
161         int i;
162
163         sha2_finup(desc, data, len);
164
165         for (i = 0; i < SHA224_DIGEST_SIZE / sizeof(__be32); i++)
166                 put_unaligned_be32(sctx->state[i], dst++);
167
168         *sctx = (struct sha256_state){};
169         return 0;
170 }
171
172 static int sha256_finup(struct shash_desc *desc, const u8 *data,
173                         unsigned int len, u8 *out)
174 {
175         struct sha256_state *sctx = shash_desc_ctx(desc);
176         __be32 *dst = (__be32 *)out;
177         int i;
178
179         sha2_finup(desc, data, len);
180
181         for (i = 0; i < SHA256_DIGEST_SIZE / sizeof(__be32); i++)
182                 put_unaligned_be32(sctx->state[i], dst++);
183
184         *sctx = (struct sha256_state){};
185         return 0;
186 }
187
188 static int sha2_export(struct shash_desc *desc, void *out)
189 {
190         struct sha256_state *sctx = shash_desc_ctx(desc);
191         struct sha256_state *dst = out;
192
193         *dst = *sctx;
194         return 0;
195 }
196
197 static int sha2_import(struct shash_desc *desc, const void *in)
198 {
199         struct sha256_state *sctx = shash_desc_ctx(desc);
200         struct sha256_state const *src = in;
201
202         *sctx = *src;
203         return 0;
204 }
205
206 static struct shash_alg algs[] = { {
207         .init                   = sha224_init,
208         .update                 = sha2_update,
209         .final                  = sha224_final,
210         .finup                  = sha224_finup,
211         .export                 = sha2_export,
212         .import                 = sha2_import,
213         .descsize               = sizeof(struct sha256_state),
214         .digestsize             = SHA224_DIGEST_SIZE,
215         .statesize              = sizeof(struct sha256_state),
216         .base                   = {
217                 .cra_name               = "sha224",
218                 .cra_driver_name        = "sha224-ce",
219                 .cra_priority           = 200,
220                 .cra_flags              = CRYPTO_ALG_TYPE_SHASH,
221                 .cra_blocksize          = SHA256_BLOCK_SIZE,
222                 .cra_module             = THIS_MODULE,
223         }
224 }, {
225         .init                   = sha256_init,
226         .update                 = sha2_update,
227         .final                  = sha256_final,
228         .finup                  = sha256_finup,
229         .export                 = sha2_export,
230         .import                 = sha2_import,
231         .descsize               = sizeof(struct sha256_state),
232         .digestsize             = SHA256_DIGEST_SIZE,
233         .statesize              = sizeof(struct sha256_state),
234         .base                   = {
235                 .cra_name               = "sha256",
236                 .cra_driver_name        = "sha256-ce",
237                 .cra_priority           = 200,
238                 .cra_flags              = CRYPTO_ALG_TYPE_SHASH,
239                 .cra_blocksize          = SHA256_BLOCK_SIZE,
240                 .cra_module             = THIS_MODULE,
241         }
242 } };
243
244 static int __init sha2_ce_mod_init(void)
245 {
246         return crypto_register_shashes(algs, ARRAY_SIZE(algs));
247 }
248
249 static void __exit sha2_ce_mod_fini(void)
250 {
251         crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
252 }
253
254 module_cpu_feature_match(SHA2, sha2_ce_mod_init);
255 module_exit(sha2_ce_mod_fini);