OpenCores
URL https://opencores.org/ocsvn/test_project/test_project/trunk

Subversion Repositories test_project

[/] [test_project/] [trunk/] [linux_sd_driver/] [drivers/] [crypto/] [padlock-sha.c] - Blame information for rev 62

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 62 marcus.erl
/*
2
 * Cryptographic API.
3
 *
4
 * Support for VIA PadLock hardware crypto engine.
5
 *
6
 * Copyright (c) 2006  Michal Ludvig <michal@logix.cz>
7
 *
8
 * This program is free software; you can redistribute it and/or modify
9
 * it under the terms of the GNU General Public License as published by
10
 * the Free Software Foundation; either version 2 of the License, or
11
 * (at your option) any later version.
12
 *
13
 */
14
 
15
#include <crypto/algapi.h>
16
#include <crypto/sha.h>
17
#include <linux/err.h>
18
#include <linux/module.h>
19
#include <linux/init.h>
20
#include <linux/errno.h>
21
#include <linux/cryptohash.h>
22
#include <linux/interrupt.h>
23
#include <linux/kernel.h>
24
#include <linux/scatterlist.h>
25
#include "padlock.h"
26
 
27
#define SHA1_DEFAULT_FALLBACK   "sha1-generic"
28
#define SHA256_DEFAULT_FALLBACK "sha256-generic"
29
 
30
struct padlock_sha_ctx {
31
        char            *data;
32
        size_t          used;
33
        int             bypass;
34
        void (*f_sha_padlock)(const char *in, char *out, int count);
35
        struct hash_desc fallback;
36
};
37
 
38
static inline struct padlock_sha_ctx *ctx(struct crypto_tfm *tfm)
39
{
40
        return crypto_tfm_ctx(tfm);
41
}
42
 
43
/* We'll need aligned address on the stack */
44
#define NEAREST_ALIGNED(ptr) \
45
        ((void *)ALIGN((size_t)(ptr), PADLOCK_ALIGNMENT))
46
 
47
static struct crypto_alg sha1_alg, sha256_alg;
48
 
49
static void padlock_sha_bypass(struct crypto_tfm *tfm)
50
{
51
        if (ctx(tfm)->bypass)
52
                return;
53
 
54
        crypto_hash_init(&ctx(tfm)->fallback);
55
        if (ctx(tfm)->data && ctx(tfm)->used) {
56
                struct scatterlist sg;
57
 
58
                sg_init_one(&sg, ctx(tfm)->data, ctx(tfm)->used);
59
                crypto_hash_update(&ctx(tfm)->fallback, &sg, sg.length);
60
        }
61
 
62
        ctx(tfm)->used = 0;
63
        ctx(tfm)->bypass = 1;
64
}
65
 
66
static void padlock_sha_init(struct crypto_tfm *tfm)
67
{
68
        ctx(tfm)->used = 0;
69
        ctx(tfm)->bypass = 0;
70
}
71
 
72
static void padlock_sha_update(struct crypto_tfm *tfm,
73
                        const uint8_t *data, unsigned int length)
74
{
75
        /* Our buffer is always one page. */
76
        if (unlikely(!ctx(tfm)->bypass &&
77
                     (ctx(tfm)->used + length > PAGE_SIZE)))
78
                padlock_sha_bypass(tfm);
79
 
80
        if (unlikely(ctx(tfm)->bypass)) {
81
                struct scatterlist sg;
82
                sg_init_one(&sg, (uint8_t *)data, length);
83
                crypto_hash_update(&ctx(tfm)->fallback, &sg, length);
84
                return;
85
        }
86
 
87
        memcpy(ctx(tfm)->data + ctx(tfm)->used, data, length);
88
        ctx(tfm)->used += length;
89
}
90
 
91
static inline void padlock_output_block(uint32_t *src,
92
                        uint32_t *dst, size_t count)
93
{
94
        while (count--)
95
                *dst++ = swab32(*src++);
96
}
97
 
98
static void padlock_do_sha1(const char *in, char *out, int count)
99
{
100
        /* We can't store directly to *out as it may be unaligned. */
101
        /* BTW Don't reduce the buffer size below 128 Bytes!
102
         *     PadLock microcode needs it that big. */
103
        char buf[128+16];
104
        char *result = NEAREST_ALIGNED(buf);
105
 
106
        ((uint32_t *)result)[0] = SHA1_H0;
107
        ((uint32_t *)result)[1] = SHA1_H1;
108
        ((uint32_t *)result)[2] = SHA1_H2;
109
        ((uint32_t *)result)[3] = SHA1_H3;
110
        ((uint32_t *)result)[4] = SHA1_H4;
111
 
112
        asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */
113
                      : "+S"(in), "+D"(result)
114
                      : "c"(count), "a"(0));
115
 
116
        padlock_output_block((uint32_t *)result, (uint32_t *)out, 5);
117
}
118
 
119
static void padlock_do_sha256(const char *in, char *out, int count)
120
{
121
        /* We can't store directly to *out as it may be unaligned. */
122
        /* BTW Don't reduce the buffer size below 128 Bytes!
123
         *     PadLock microcode needs it that big. */
124
        char buf[128+16];
125
        char *result = NEAREST_ALIGNED(buf);
126
 
127
        ((uint32_t *)result)[0] = SHA256_H0;
128
        ((uint32_t *)result)[1] = SHA256_H1;
129
        ((uint32_t *)result)[2] = SHA256_H2;
130
        ((uint32_t *)result)[3] = SHA256_H3;
131
        ((uint32_t *)result)[4] = SHA256_H4;
132
        ((uint32_t *)result)[5] = SHA256_H5;
133
        ((uint32_t *)result)[6] = SHA256_H6;
134
        ((uint32_t *)result)[7] = SHA256_H7;
135
 
136
        asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */
137
                      : "+S"(in), "+D"(result)
138
                      : "c"(count), "a"(0));
139
 
140
        padlock_output_block((uint32_t *)result, (uint32_t *)out, 8);
141
}
142
 
143
static void padlock_sha_final(struct crypto_tfm *tfm, uint8_t *out)
144
{
145
        if (unlikely(ctx(tfm)->bypass)) {
146
                crypto_hash_final(&ctx(tfm)->fallback, out);
147
                ctx(tfm)->bypass = 0;
148
                return;
149
        }
150
 
151
        /* Pass the input buffer to PadLock microcode... */
152
        ctx(tfm)->f_sha_padlock(ctx(tfm)->data, out, ctx(tfm)->used);
153
 
154
        ctx(tfm)->used = 0;
155
}
156
 
157
static int padlock_cra_init(struct crypto_tfm *tfm)
158
{
159
        const char *fallback_driver_name = tfm->__crt_alg->cra_name;
160
        struct crypto_hash *fallback_tfm;
161
 
162
        /* For now we'll allocate one page. This
163
         * could eventually be configurable one day. */
164
        ctx(tfm)->data = (char *)__get_free_page(GFP_KERNEL);
165
        if (!ctx(tfm)->data)
166
                return -ENOMEM;
167
 
168
        /* Allocate a fallback and abort if it failed. */
169
        fallback_tfm = crypto_alloc_hash(fallback_driver_name, 0,
170
                                         CRYPTO_ALG_ASYNC |
171
                                         CRYPTO_ALG_NEED_FALLBACK);
172
        if (IS_ERR(fallback_tfm)) {
173
                printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n",
174
                       fallback_driver_name);
175
                free_page((unsigned long)(ctx(tfm)->data));
176
                return PTR_ERR(fallback_tfm);
177
        }
178
 
179
        ctx(tfm)->fallback.tfm = fallback_tfm;
180
        return 0;
181
}
182
 
183
static int padlock_sha1_cra_init(struct crypto_tfm *tfm)
184
{
185
        ctx(tfm)->f_sha_padlock = padlock_do_sha1;
186
 
187
        return padlock_cra_init(tfm);
188
}
189
 
190
static int padlock_sha256_cra_init(struct crypto_tfm *tfm)
191
{
192
        ctx(tfm)->f_sha_padlock = padlock_do_sha256;
193
 
194
        return padlock_cra_init(tfm);
195
}
196
 
197
static void padlock_cra_exit(struct crypto_tfm *tfm)
198
{
199
        if (ctx(tfm)->data) {
200
                free_page((unsigned long)(ctx(tfm)->data));
201
                ctx(tfm)->data = NULL;
202
        }
203
 
204
        crypto_free_hash(ctx(tfm)->fallback.tfm);
205
        ctx(tfm)->fallback.tfm = NULL;
206
}
207
 
208
static struct crypto_alg sha1_alg = {
209
        .cra_name               =       "sha1",
210
        .cra_driver_name        =       "sha1-padlock",
211
        .cra_priority           =       PADLOCK_CRA_PRIORITY,
212
        .cra_flags              =       CRYPTO_ALG_TYPE_DIGEST |
213
                                        CRYPTO_ALG_NEED_FALLBACK,
214
        .cra_blocksize          =       SHA1_BLOCK_SIZE,
215
        .cra_ctxsize            =       sizeof(struct padlock_sha_ctx),
216
        .cra_module             =       THIS_MODULE,
217
        .cra_list               =       LIST_HEAD_INIT(sha1_alg.cra_list),
218
        .cra_init               =       padlock_sha1_cra_init,
219
        .cra_exit               =       padlock_cra_exit,
220
        .cra_u                  =       {
221
                .digest = {
222
                        .dia_digestsize =       SHA1_DIGEST_SIZE,
223
                        .dia_init       =       padlock_sha_init,
224
                        .dia_update     =       padlock_sha_update,
225
                        .dia_final      =       padlock_sha_final,
226
                }
227
        }
228
};
229
 
230
static struct crypto_alg sha256_alg = {
231
        .cra_name               =       "sha256",
232
        .cra_driver_name        =       "sha256-padlock",
233
        .cra_priority           =       PADLOCK_CRA_PRIORITY,
234
        .cra_flags              =       CRYPTO_ALG_TYPE_DIGEST |
235
                                        CRYPTO_ALG_NEED_FALLBACK,
236
        .cra_blocksize          =       SHA256_BLOCK_SIZE,
237
        .cra_ctxsize            =       sizeof(struct padlock_sha_ctx),
238
        .cra_module             =       THIS_MODULE,
239
        .cra_list               =       LIST_HEAD_INIT(sha256_alg.cra_list),
240
        .cra_init               =       padlock_sha256_cra_init,
241
        .cra_exit               =       padlock_cra_exit,
242
        .cra_u                  =       {
243
                .digest = {
244
                        .dia_digestsize =       SHA256_DIGEST_SIZE,
245
                        .dia_init       =       padlock_sha_init,
246
                        .dia_update     =       padlock_sha_update,
247
                        .dia_final      =       padlock_sha_final,
248
                }
249
        }
250
};
251
 
252
static int __init padlock_init(void)
253
{
254
        int rc = -ENODEV;
255
 
256
        if (!cpu_has_phe) {
257
                printk(KERN_ERR PFX "VIA PadLock Hash Engine not detected.\n");
258
                return -ENODEV;
259
        }
260
 
261
        if (!cpu_has_phe_enabled) {
262
                printk(KERN_ERR PFX "VIA PadLock detected, but not enabled. Hmm, strange...\n");
263
                return -ENODEV;
264
        }
265
 
266
        rc = crypto_register_alg(&sha1_alg);
267
        if (rc)
268
                goto out;
269
 
270
        rc = crypto_register_alg(&sha256_alg);
271
        if (rc)
272
                goto out_unreg1;
273
 
274
        printk(KERN_NOTICE PFX "Using VIA PadLock ACE for SHA1/SHA256 algorithms.\n");
275
 
276
        return 0;
277
 
278
out_unreg1:
279
        crypto_unregister_alg(&sha1_alg);
280
out:
281
        printk(KERN_ERR PFX "VIA PadLock SHA1/SHA256 initialization failed.\n");
282
        return rc;
283
}
284
 
285
static void __exit padlock_fini(void)
286
{
287
        crypto_unregister_alg(&sha1_alg);
288
        crypto_unregister_alg(&sha256_alg);
289
}
290
 
291
module_init(padlock_init);
292
module_exit(padlock_fini);
293
 
294
MODULE_DESCRIPTION("VIA PadLock SHA1/SHA256 algorithms support.");
295
MODULE_LICENSE("GPL");
296
MODULE_AUTHOR("Michal Ludvig");
297
 
298
MODULE_ALIAS("sha1");
299
MODULE_ALIAS("sha256");
300
MODULE_ALIAS("sha1-padlock");
301
MODULE_ALIAS("sha256-padlock");

powered by: WebSVN 2.1.0

© copyright 1999-2025 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.