Merge branch 'misc' of master.kernel.org:/pub/scm/linux/kernel/git/galak/powerpc...
[pandora-kernel.git] / net / sunrpc / auth_gss / gss_krb5_crypto.c
1 /*
2  *  linux/net/sunrpc/gss_krb5_crypto.c
3  *
4  *  Copyright (c) 2000 The Regents of the University of Michigan.
5  *  All rights reserved.
6  *
7  *  Andy Adamson   <andros@umich.edu>
8  *  Bruce Fields   <bfields@umich.edu>
9  */
10
11 /*
12  * Copyright (C) 1998 by the FundsXpress, INC.
13  *
14  * All rights reserved.
15  *
16  * Export of this software from the United States of America may require
17  * a specific license from the United States Government.  It is the
18  * responsibility of any person or organization contemplating export to
19  * obtain such a license before exporting.
20  *
21  * WITHIN THAT CONSTRAINT, permission to use, copy, modify, and
22  * distribute this software and its documentation for any purpose and
23  * without fee is hereby granted, provided that the above copyright
24  * notice appear in all copies and that both that copyright notice and
25  * this permission notice appear in supporting documentation, and that
26  * the name of FundsXpress. not be used in advertising or publicity pertaining
27  * to distribution of the software without specific, written prior
28  * permission.  FundsXpress makes no representations about the suitability of
29  * this software for any purpose.  It is provided "as is" without express
30  * or implied warranty.
31  *
32  * THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR
33  * IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
34  * WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
35  */
36
37 #include <linux/types.h>
38 #include <linux/mm.h>
39 #include <linux/slab.h>
40 #include <linux/scatterlist.h>
41 #include <linux/crypto.h>
42 #include <linux/highmem.h>
43 #include <linux/pagemap.h>
44 #include <linux/sunrpc/gss_krb5.h>
45
46 #ifdef RPC_DEBUG
47 # define RPCDBG_FACILITY        RPCDBG_AUTH
48 #endif
49
50 u32
51 krb5_encrypt(
52         struct crypto_tfm *tfm,
53         void * iv,
54         void * in,
55         void * out,
56         int length)
57 {
58         u32 ret = -EINVAL;
59         struct scatterlist sg[1];
60         u8 local_iv[16] = {0};
61
62         dprintk("RPC:      krb5_encrypt: input data:\n");
63         print_hexl((u32 *)in, length, 0);
64
65         if (length % crypto_tfm_alg_blocksize(tfm) != 0)
66                 goto out;
67
68         if (crypto_tfm_alg_ivsize(tfm) > 16) {
69                 dprintk("RPC:      gss_k5encrypt: tfm iv size to large %d\n",
70                          crypto_tfm_alg_ivsize(tfm));
71                 goto out;
72         }
73
74         if (iv)
75                 memcpy(local_iv, iv, crypto_tfm_alg_ivsize(tfm));
76
77         memcpy(out, in, length);
78         sg_set_buf(sg, out, length);
79
80         ret = crypto_cipher_encrypt_iv(tfm, sg, sg, length, local_iv);
81
82         dprintk("RPC:      krb5_encrypt: output data:\n");
83         print_hexl((u32 *)out, length, 0);
84 out:
85         dprintk("RPC:      krb5_encrypt returns %d\n",ret);
86         return(ret);
87 }
88
89 EXPORT_SYMBOL(krb5_encrypt);
90
91 u32
92 krb5_decrypt(
93      struct crypto_tfm *tfm,
94      void * iv,
95      void * in,
96      void * out,
97      int length)
98 {
99         u32 ret = -EINVAL;
100         struct scatterlist sg[1];
101         u8 local_iv[16] = {0};
102
103         dprintk("RPC:      krb5_decrypt: input data:\n");
104         print_hexl((u32 *)in, length, 0);
105
106         if (length % crypto_tfm_alg_blocksize(tfm) != 0)
107                 goto out;
108
109         if (crypto_tfm_alg_ivsize(tfm) > 16) {
110                 dprintk("RPC:      gss_k5decrypt: tfm iv size to large %d\n",
111                         crypto_tfm_alg_ivsize(tfm));
112                 goto out;
113         }
114         if (iv)
115                 memcpy(local_iv,iv, crypto_tfm_alg_ivsize(tfm));
116
117         memcpy(out, in, length);
118         sg_set_buf(sg, out, length);
119
120         ret = crypto_cipher_decrypt_iv(tfm, sg, sg, length, local_iv);
121
122         dprintk("RPC:      krb5_decrypt: output_data:\n");
123         print_hexl((u32 *)out, length, 0);
124 out:
125         dprintk("RPC:      gss_k5decrypt returns %d\n",ret);
126         return(ret);
127 }
128
129 EXPORT_SYMBOL(krb5_decrypt);
130
131 static int
132 process_xdr_buf(struct xdr_buf *buf, int offset, int len,
133                 int (*actor)(struct scatterlist *, void *), void *data)
134 {
135         int i, page_len, thislen, page_offset, ret = 0;
136         struct scatterlist      sg[1];
137
138         if (offset >= buf->head[0].iov_len) {
139                 offset -= buf->head[0].iov_len;
140         } else {
141                 thislen = buf->head[0].iov_len - offset;
142                 if (thislen > len)
143                         thislen = len;
144                 sg_set_buf(sg, buf->head[0].iov_base + offset, thislen);
145                 ret = actor(sg, data);
146                 if (ret)
147                         goto out;
148                 offset = 0;
149                 len -= thislen;
150         }
151         if (len == 0)
152                 goto out;
153
154         if (offset >= buf->page_len) {
155                 offset -= buf->page_len;
156         } else {
157                 page_len = buf->page_len - offset;
158                 if (page_len > len)
159                         page_len = len;
160                 len -= page_len;
161                 page_offset = (offset + buf->page_base) & (PAGE_CACHE_SIZE - 1);
162                 i = (offset + buf->page_base) >> PAGE_CACHE_SHIFT;
163                 thislen = PAGE_CACHE_SIZE - page_offset;
164                 do {
165                         if (thislen > page_len)
166                                 thislen = page_len;
167                         sg->page = buf->pages[i];
168                         sg->offset = page_offset;
169                         sg->length = thislen;
170                         ret = actor(sg, data);
171                         if (ret)
172                                 goto out;
173                         page_len -= thislen;
174                         i++;
175                         page_offset = 0;
176                         thislen = PAGE_CACHE_SIZE;
177                 } while (page_len != 0);
178                 offset = 0;
179         }
180         if (len == 0)
181                 goto out;
182
183         if (offset < buf->tail[0].iov_len) {
184                 thislen = buf->tail[0].iov_len - offset;
185                 if (thislen > len)
186                         thislen = len;
187                 sg_set_buf(sg, buf->tail[0].iov_base + offset, thislen);
188                 ret = actor(sg, data);
189                 len -= thislen;
190         }
191         if (len != 0)
192                 ret = -EINVAL;
193 out:
194         return ret;
195 }
196
197 static int
198 checksummer(struct scatterlist *sg, void *data)
199 {
200         struct crypto_tfm *tfm = (struct crypto_tfm *)data;
201
202         crypto_digest_update(tfm, sg, 1);
203
204         return 0;
205 }
206
207 /* checksum the plaintext data and hdrlen bytes of the token header */
208 s32
209 make_checksum(s32 cksumtype, char *header, int hdrlen, struct xdr_buf *body,
210                    int body_offset, struct xdr_netobj *cksum)
211 {
212         char                            *cksumname;
213         struct crypto_tfm               *tfm = NULL; /* XXX add to ctx? */
214         struct scatterlist              sg[1];
215
216         switch (cksumtype) {
217                 case CKSUMTYPE_RSA_MD5:
218                         cksumname = "md5";
219                         break;
220                 default:
221                         dprintk("RPC:      krb5_make_checksum:"
222                                 " unsupported checksum %d", cksumtype);
223                         return GSS_S_FAILURE;
224         }
225         if (!(tfm = crypto_alloc_tfm(cksumname, CRYPTO_TFM_REQ_MAY_SLEEP)))
226                 return GSS_S_FAILURE;
227         cksum->len = crypto_tfm_alg_digestsize(tfm);
228
229         crypto_digest_init(tfm);
230         sg_set_buf(sg, header, hdrlen);
231         crypto_digest_update(tfm, sg, 1);
232         process_xdr_buf(body, body_offset, body->len - body_offset,
233                         checksummer, tfm);
234         crypto_digest_final(tfm, cksum->data);
235         crypto_free_tfm(tfm);
236         return 0;
237 }
238
239 EXPORT_SYMBOL(make_checksum);
240
241 struct encryptor_desc {
242         u8 iv[8]; /* XXX hard-coded blocksize */
243         struct crypto_tfm *tfm;
244         int pos;
245         struct xdr_buf *outbuf;
246         struct page **pages;
247         struct scatterlist infrags[4];
248         struct scatterlist outfrags[4];
249         int fragno;
250         int fraglen;
251 };
252
253 static int
254 encryptor(struct scatterlist *sg, void *data)
255 {
256         struct encryptor_desc *desc = data;
257         struct xdr_buf *outbuf = desc->outbuf;
258         struct page *in_page;
259         int thislen = desc->fraglen + sg->length;
260         int fraglen, ret;
261         int page_pos;
262
263         /* Worst case is 4 fragments: head, end of page 1, start
264          * of page 2, tail.  Anything more is a bug. */
265         BUG_ON(desc->fragno > 3);
266         desc->infrags[desc->fragno] = *sg;
267         desc->outfrags[desc->fragno] = *sg;
268
269         page_pos = desc->pos - outbuf->head[0].iov_len;
270         if (page_pos >= 0 && page_pos < outbuf->page_len) {
271                 /* pages are not in place: */
272                 int i = (page_pos + outbuf->page_base) >> PAGE_CACHE_SHIFT;
273                 in_page = desc->pages[i];
274         } else {
275                 in_page = sg->page;
276         }
277         desc->infrags[desc->fragno].page = in_page;
278         desc->fragno++;
279         desc->fraglen += sg->length;
280         desc->pos += sg->length;
281
282         fraglen = thislen & 7; /* XXX hardcoded blocksize */
283         thislen -= fraglen;
284
285         if (thislen == 0)
286                 return 0;
287
288         ret = crypto_cipher_encrypt_iv(desc->tfm, desc->outfrags, desc->infrags,
289                                         thislen, desc->iv);
290         if (ret)
291                 return ret;
292         if (fraglen) {
293                 desc->outfrags[0].page = sg->page;
294                 desc->outfrags[0].offset = sg->offset + sg->length - fraglen;
295                 desc->outfrags[0].length = fraglen;
296                 desc->infrags[0] = desc->outfrags[0];
297                 desc->infrags[0].page = in_page;
298                 desc->fragno = 1;
299                 desc->fraglen = fraglen;
300         } else {
301                 desc->fragno = 0;
302                 desc->fraglen = 0;
303         }
304         return 0;
305 }
306
307 int
308 gss_encrypt_xdr_buf(struct crypto_tfm *tfm, struct xdr_buf *buf, int offset,
309                 struct page **pages)
310 {
311         int ret;
312         struct encryptor_desc desc;
313
314         BUG_ON((buf->len - offset) % crypto_tfm_alg_blocksize(tfm) != 0);
315
316         memset(desc.iv, 0, sizeof(desc.iv));
317         desc.tfm = tfm;
318         desc.pos = offset;
319         desc.outbuf = buf;
320         desc.pages = pages;
321         desc.fragno = 0;
322         desc.fraglen = 0;
323
324         ret = process_xdr_buf(buf, offset, buf->len - offset, encryptor, &desc);
325         return ret;
326 }
327
328 EXPORT_SYMBOL(gss_encrypt_xdr_buf);
329
330 struct decryptor_desc {
331         u8 iv[8]; /* XXX hard-coded blocksize */
332         struct crypto_tfm *tfm;
333         struct scatterlist frags[4];
334         int fragno;
335         int fraglen;
336 };
337
338 static int
339 decryptor(struct scatterlist *sg, void *data)
340 {
341         struct decryptor_desc *desc = data;
342         int thislen = desc->fraglen + sg->length;
343         int fraglen, ret;
344
345         /* Worst case is 4 fragments: head, end of page 1, start
346          * of page 2, tail.  Anything more is a bug. */
347         BUG_ON(desc->fragno > 3);
348         desc->frags[desc->fragno] = *sg;
349         desc->fragno++;
350         desc->fraglen += sg->length;
351
352         fraglen = thislen & 7; /* XXX hardcoded blocksize */
353         thislen -= fraglen;
354
355         if (thislen == 0)
356                 return 0;
357
358         ret = crypto_cipher_decrypt_iv(desc->tfm, desc->frags, desc->frags,
359                                         thislen, desc->iv);
360         if (ret)
361                 return ret;
362         if (fraglen) {
363                 desc->frags[0].page = sg->page;
364                 desc->frags[0].offset = sg->offset + sg->length - fraglen;
365                 desc->frags[0].length = fraglen;
366                 desc->fragno = 1;
367                 desc->fraglen = fraglen;
368         } else {
369                 desc->fragno = 0;
370                 desc->fraglen = 0;
371         }
372         return 0;
373 }
374
375 int
376 gss_decrypt_xdr_buf(struct crypto_tfm *tfm, struct xdr_buf *buf, int offset)
377 {
378         struct decryptor_desc desc;
379
380         /* XXXJBF: */
381         BUG_ON((buf->len - offset) % crypto_tfm_alg_blocksize(tfm) != 0);
382
383         memset(desc.iv, 0, sizeof(desc.iv));
384         desc.tfm = tfm;
385         desc.fragno = 0;
386         desc.fraglen = 0;
387         return process_xdr_buf(buf, offset, buf->len - offset, decryptor, &desc);
388 }
389
390 EXPORT_SYMBOL(gss_decrypt_xdr_buf);