diff options
Diffstat (limited to 'linden/indra/libgcrypt/libgcrypt-1.2.2/cipher/cipher.c')
-rw-r--r--[-rwxr-xr-x] | linden/indra/libgcrypt/libgcrypt-1.2.2/cipher/cipher.c | 2794 |
1 files changed, 1397 insertions, 1397 deletions
diff --git a/linden/indra/libgcrypt/libgcrypt-1.2.2/cipher/cipher.c b/linden/indra/libgcrypt/libgcrypt-1.2.2/cipher/cipher.c index 2b8491c..8698c50 100755..100644 --- a/linden/indra/libgcrypt/libgcrypt-1.2.2/cipher/cipher.c +++ b/linden/indra/libgcrypt/libgcrypt-1.2.2/cipher/cipher.c | |||
@@ -1,1397 +1,1397 @@ | |||
1 | /* cipher.c - cipher dispatcher | 1 | /* cipher.c - cipher dispatcher |
2 | * Copyright (C) 1998,1999,2000,2001,2002,2003 Free Software Foundation, Inc. | 2 | * Copyright (C) 1998,1999,2000,2001,2002,2003 Free Software Foundation, Inc. |
3 | * | 3 | * |
4 | * This file is part of Libgcrypt. | 4 | * This file is part of Libgcrypt. |
5 | * | 5 | * |
6 | * Libgcrypt is free software; you can redistribute it and/or modify | 6 | * Libgcrypt is free software; you can redistribute it and/or modify |
7 | * it under the terms of the GNU Lesser general Public License as | 7 | * it under the terms of the GNU Lesser general Public License as |
8 | * published by the Free Software Foundation; either version 2.1 of | 8 | * published by the Free Software Foundation; either version 2.1 of |
9 | * the License, or (at your option) any later version. | 9 | * the License, or (at your option) any later version. |
10 | * | 10 | * |
11 | * Libgcrypt is distributed in the hope that it will be useful, | 11 | * Libgcrypt is distributed in the hope that it will be useful, |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 | * GNU Lesser General Public License for more details. | 14 | * GNU Lesser General Public License for more details. |
15 | * | 15 | * |
16 | * You should have received a copy of the GNU Lesser General Public | 16 | * You should have received a copy of the GNU Lesser General Public |
17 | * License along with this program; if not, write to the Free Software | 17 | * License along with this program; if not, write to the Free Software |
18 | * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA | 18 | * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA |
19 | */ | 19 | */ |
20 | 20 | ||
21 | #include <config.h> | 21 | #include <config.h> |
22 | #include <stdio.h> | 22 | #include <stdio.h> |
23 | #include <stdlib.h> | 23 | #include <stdlib.h> |
24 | #include <string.h> | 24 | #include <string.h> |
25 | #include <errno.h> | 25 | #include <errno.h> |
26 | #include <assert.h> | 26 | #include <assert.h> |
27 | 27 | ||
28 | #include "g10lib.h" | 28 | #include "g10lib.h" |
29 | #include "cipher.h" | 29 | #include "cipher.h" |
30 | #include "ath.h" | 30 | #include "ath.h" |
31 | 31 | ||
32 | #define MAX_BLOCKSIZE 16 | 32 | #define MAX_BLOCKSIZE 16 |
33 | #define TABLE_SIZE 14 | 33 | #define TABLE_SIZE 14 |
34 | #define CTX_MAGIC_NORMAL 0x24091964 | 34 | #define CTX_MAGIC_NORMAL 0x24091964 |
35 | #define CTX_MAGIC_SECURE 0x46919042 | 35 | #define CTX_MAGIC_SECURE 0x46919042 |
36 | 36 | ||
37 | /* This is the list of the default ciphers, which are included in | 37 | /* This is the list of the default ciphers, which are included in |
38 | libgcrypt. */ | 38 | libgcrypt. */ |
39 | static struct cipher_table_entry | 39 | static struct cipher_table_entry |
40 | { | 40 | { |
41 | gcry_cipher_spec_t *cipher; | 41 | gcry_cipher_spec_t *cipher; |
42 | unsigned int algorithm; | 42 | unsigned int algorithm; |
43 | } cipher_table[] = | 43 | } cipher_table[] = |
44 | { | 44 | { |
45 | #if USE_BLOWFISH | 45 | #if USE_BLOWFISH |
46 | { &_gcry_cipher_spec_blowfish, GCRY_CIPHER_BLOWFISH }, | 46 | { &_gcry_cipher_spec_blowfish, GCRY_CIPHER_BLOWFISH }, |
47 | #endif | 47 | #endif |
48 | #if USE_DES | 48 | #if USE_DES |
49 | { &_gcry_cipher_spec_des, GCRY_CIPHER_DES }, | 49 | { &_gcry_cipher_spec_des, GCRY_CIPHER_DES }, |
50 | { &_gcry_cipher_spec_tripledes, GCRY_CIPHER_3DES }, | 50 | { &_gcry_cipher_spec_tripledes, GCRY_CIPHER_3DES }, |
51 | #endif | 51 | #endif |
52 | #if USE_ARCFOUR | 52 | #if USE_ARCFOUR |
53 | { &_gcry_cipher_spec_arcfour, GCRY_CIPHER_ARCFOUR }, | 53 | { &_gcry_cipher_spec_arcfour, GCRY_CIPHER_ARCFOUR }, |
54 | #endif | 54 | #endif |
55 | #if USE_CAST5 | 55 | #if USE_CAST5 |
56 | { &_gcry_cipher_spec_cast5, GCRY_CIPHER_CAST5 }, | 56 | { &_gcry_cipher_spec_cast5, GCRY_CIPHER_CAST5 }, |
57 | #endif | 57 | #endif |
58 | #if USE_AES | 58 | #if USE_AES |
59 | { &_gcry_cipher_spec_aes, GCRY_CIPHER_AES }, | 59 | { &_gcry_cipher_spec_aes, GCRY_CIPHER_AES }, |
60 | { &_gcry_cipher_spec_aes192, GCRY_CIPHER_AES192 }, | 60 | { &_gcry_cipher_spec_aes192, GCRY_CIPHER_AES192 }, |
61 | { &_gcry_cipher_spec_aes256, GCRY_CIPHER_AES256 }, | 61 | { &_gcry_cipher_spec_aes256, GCRY_CIPHER_AES256 }, |
62 | #endif | 62 | #endif |
63 | #if USE_TWOFISH | 63 | #if USE_TWOFISH |
64 | { &_gcry_cipher_spec_twofish, GCRY_CIPHER_TWOFISH }, | 64 | { &_gcry_cipher_spec_twofish, GCRY_CIPHER_TWOFISH }, |
65 | { &_gcry_cipher_spec_twofish128, GCRY_CIPHER_TWOFISH128 }, | 65 | { &_gcry_cipher_spec_twofish128, GCRY_CIPHER_TWOFISH128 }, |
66 | #endif | 66 | #endif |
67 | #if USE_SERPENT | 67 | #if USE_SERPENT |
68 | { &_gcry_cipher_spec_serpent128, GCRY_CIPHER_SERPENT128 }, | 68 | { &_gcry_cipher_spec_serpent128, GCRY_CIPHER_SERPENT128 }, |
69 | { &_gcry_cipher_spec_serpent192, GCRY_CIPHER_SERPENT192 }, | 69 | { &_gcry_cipher_spec_serpent192, GCRY_CIPHER_SERPENT192 }, |
70 | { &_gcry_cipher_spec_serpent256, GCRY_CIPHER_SERPENT256 }, | 70 | { &_gcry_cipher_spec_serpent256, GCRY_CIPHER_SERPENT256 }, |
71 | #endif | 71 | #endif |
72 | #ifdef USE_RFC2268 | 72 | #ifdef USE_RFC2268 |
73 | { &_gcry_cipher_spec_rfc2268_40, GCRY_CIPHER_RFC2268_40 }, | 73 | { &_gcry_cipher_spec_rfc2268_40, GCRY_CIPHER_RFC2268_40 }, |
74 | #endif | 74 | #endif |
75 | { NULL }, | 75 | { NULL }, |
76 | }; | 76 | }; |
77 | 77 | ||
78 | /* List of registered ciphers. */ | 78 | /* List of registered ciphers. */ |
79 | static gcry_module_t ciphers_registered; | 79 | static gcry_module_t ciphers_registered; |
80 | 80 | ||
81 | /* This is the lock protecting CIPHERS_REGISTERED. */ | 81 | /* This is the lock protecting CIPHERS_REGISTERED. */ |
82 | static ath_mutex_t ciphers_registered_lock = ATH_MUTEX_INITIALIZER; | 82 | static ath_mutex_t ciphers_registered_lock = ATH_MUTEX_INITIALIZER; |
83 | 83 | ||
84 | /* Flag to check wether the default ciphers have already been | 84 | /* Flag to check wether the default ciphers have already been |
85 | registered. */ | 85 | registered. */ |
86 | static int default_ciphers_registered; | 86 | static int default_ciphers_registered; |
87 | 87 | ||
88 | /* Convenient macro for registering the default ciphers. */ | 88 | /* Convenient macro for registering the default ciphers. */ |
89 | #define REGISTER_DEFAULT_CIPHERS \ | 89 | #define REGISTER_DEFAULT_CIPHERS \ |
90 | do \ | 90 | do \ |
91 | { \ | 91 | { \ |
92 | ath_mutex_lock (&ciphers_registered_lock); \ | 92 | ath_mutex_lock (&ciphers_registered_lock); \ |
93 | if (! default_ciphers_registered) \ | 93 | if (! default_ciphers_registered) \ |
94 | { \ | 94 | { \ |
95 | gcry_cipher_register_default (); \ | 95 | gcry_cipher_register_default (); \ |
96 | default_ciphers_registered = 1; \ | 96 | default_ciphers_registered = 1; \ |
97 | } \ | 97 | } \ |
98 | ath_mutex_unlock (&ciphers_registered_lock); \ | 98 | ath_mutex_unlock (&ciphers_registered_lock); \ |
99 | } \ | 99 | } \ |
100 | while (0) | 100 | while (0) |
101 | 101 | ||
102 | /* The handle structure. */ | 102 | /* The handle structure. */ |
103 | struct gcry_cipher_handle | 103 | struct gcry_cipher_handle |
104 | { | 104 | { |
105 | int magic; | 105 | int magic; |
106 | size_t actual_handle_size; /* Allocated size of this handle. */ | 106 | size_t actual_handle_size; /* Allocated size of this handle. */ |
107 | gcry_cipher_spec_t *cipher; | 107 | gcry_cipher_spec_t *cipher; |
108 | gcry_module_t module; | 108 | gcry_module_t module; |
109 | int mode; | 109 | int mode; |
110 | unsigned int flags; | 110 | unsigned int flags; |
111 | unsigned char iv[MAX_BLOCKSIZE]; /* (this should be ulong aligned) */ | 111 | unsigned char iv[MAX_BLOCKSIZE]; /* (this should be ulong aligned) */ |
112 | unsigned char lastiv[MAX_BLOCKSIZE]; | 112 | unsigned char lastiv[MAX_BLOCKSIZE]; |
113 | int unused; /* in IV */ | 113 | int unused; /* in IV */ |
114 | unsigned char ctr[MAX_BLOCKSIZE]; /* For Counter (CTR) mode. */ | 114 | unsigned char ctr[MAX_BLOCKSIZE]; /* For Counter (CTR) mode. */ |
115 | PROPERLY_ALIGNED_TYPE context; | 115 | PROPERLY_ALIGNED_TYPE context; |
116 | }; | 116 | }; |
117 | 117 | ||
118 | 118 | ||
119 | /* These dummy functions are used in case a cipher implementation | 119 | /* These dummy functions are used in case a cipher implementation |
120 | refuses to provide it's own functions. */ | 120 | refuses to provide it's own functions. */ |
121 | 121 | ||
122 | static gcry_err_code_t | 122 | static gcry_err_code_t |
123 | dummy_setkey (void *c, const unsigned char *key, unsigned keylen) | 123 | dummy_setkey (void *c, const unsigned char *key, unsigned keylen) |
124 | { | 124 | { |
125 | return GPG_ERR_NO_ERROR; | 125 | return GPG_ERR_NO_ERROR; |
126 | } | 126 | } |
127 | 127 | ||
128 | static void | 128 | static void |
129 | dummy_encrypt_block (void *c, | 129 | dummy_encrypt_block (void *c, |
130 | unsigned char *outbuf, const unsigned char *inbuf) | 130 | unsigned char *outbuf, const unsigned char *inbuf) |
131 | { | 131 | { |
132 | BUG(); | 132 | BUG(); |
133 | } | 133 | } |
134 | 134 | ||
135 | static void | 135 | static void |
136 | dummy_decrypt_block (void *c, | 136 | dummy_decrypt_block (void *c, |
137 | unsigned char *outbuf, const unsigned char *inbuf) | 137 | unsigned char *outbuf, const unsigned char *inbuf) |
138 | { | 138 | { |
139 | BUG(); | 139 | BUG(); |
140 | } | 140 | } |
141 | 141 | ||
142 | static void | 142 | static void |
143 | dummy_encrypt_stream (void *c, | 143 | dummy_encrypt_stream (void *c, |
144 | unsigned char *outbuf, const unsigned char *inbuf, | 144 | unsigned char *outbuf, const unsigned char *inbuf, |
145 | unsigned int n) | 145 | unsigned int n) |
146 | { | 146 | { |
147 | BUG(); | 147 | BUG(); |
148 | } | 148 | } |
149 | 149 | ||
150 | static void | 150 | static void |
151 | dummy_decrypt_stream (void *c, | 151 | dummy_decrypt_stream (void *c, |
152 | unsigned char *outbuf, const unsigned char *inbuf, | 152 | unsigned char *outbuf, const unsigned char *inbuf, |
153 | unsigned int n) | 153 | unsigned int n) |
154 | { | 154 | { |
155 | BUG(); | 155 | BUG(); |
156 | } | 156 | } |
157 | 157 | ||
158 | 158 | ||
159 | /* Internal function. Register all the ciphers included in | 159 | /* Internal function. Register all the ciphers included in |
160 | CIPHER_TABLE. Note, that this function gets only used by the macro | 160 | CIPHER_TABLE. Note, that this function gets only used by the macro |
161 | REGISTER_DEFAULT_CIPHERS which protects it using a mutex. */ | 161 | REGISTER_DEFAULT_CIPHERS which protects it using a mutex. */ |
162 | static void | 162 | static void |
163 | gcry_cipher_register_default (void) | 163 | gcry_cipher_register_default (void) |
164 | { | 164 | { |
165 | gcry_err_code_t err = GPG_ERR_NO_ERROR; | 165 | gcry_err_code_t err = GPG_ERR_NO_ERROR; |
166 | int i; | 166 | int i; |
167 | 167 | ||
168 | for (i = 0; !err && cipher_table[i].cipher; i++) | 168 | for (i = 0; !err && cipher_table[i].cipher; i++) |
169 | { | 169 | { |
170 | if (! cipher_table[i].cipher->setkey) | 170 | if (! cipher_table[i].cipher->setkey) |
171 | cipher_table[i].cipher->setkey = dummy_setkey; | 171 | cipher_table[i].cipher->setkey = dummy_setkey; |
172 | if (! cipher_table[i].cipher->encrypt) | 172 | if (! cipher_table[i].cipher->encrypt) |
173 | cipher_table[i].cipher->encrypt = dummy_encrypt_block; | 173 | cipher_table[i].cipher->encrypt = dummy_encrypt_block; |
174 | if (! cipher_table[i].cipher->decrypt) | 174 | if (! cipher_table[i].cipher->decrypt) |
175 | cipher_table[i].cipher->decrypt = dummy_decrypt_block; | 175 | cipher_table[i].cipher->decrypt = dummy_decrypt_block; |
176 | if (! cipher_table[i].cipher->stencrypt) | 176 | if (! cipher_table[i].cipher->stencrypt) |
177 | cipher_table[i].cipher->stencrypt = dummy_encrypt_stream; | 177 | cipher_table[i].cipher->stencrypt = dummy_encrypt_stream; |
178 | if (! cipher_table[i].cipher->stdecrypt) | 178 | if (! cipher_table[i].cipher->stdecrypt) |
179 | cipher_table[i].cipher->stdecrypt = dummy_decrypt_stream; | 179 | cipher_table[i].cipher->stdecrypt = dummy_decrypt_stream; |
180 | 180 | ||
181 | err = _gcry_module_add (&ciphers_registered, | 181 | err = _gcry_module_add (&ciphers_registered, |
182 | cipher_table[i].algorithm, | 182 | cipher_table[i].algorithm, |
183 | (void *) cipher_table[i].cipher, | 183 | (void *) cipher_table[i].cipher, |
184 | NULL); | 184 | NULL); |
185 | } | 185 | } |
186 | 186 | ||
187 | if (err) | 187 | if (err) |
188 | BUG (); | 188 | BUG (); |
189 | } | 189 | } |
190 | 190 | ||
191 | /* Internal callback function. Used via _gcry_module_lookup. */ | 191 | /* Internal callback function. Used via _gcry_module_lookup. */ |
192 | static int | 192 | static int |
193 | gcry_cipher_lookup_func_name (void *spec, void *data) | 193 | gcry_cipher_lookup_func_name (void *spec, void *data) |
194 | { | 194 | { |
195 | gcry_cipher_spec_t *cipher = (gcry_cipher_spec_t *) spec; | 195 | gcry_cipher_spec_t *cipher = (gcry_cipher_spec_t *) spec; |
196 | char *name = (char *) data; | 196 | char *name = (char *) data; |
197 | const char **aliases = cipher->aliases; | 197 | const char **aliases = cipher->aliases; |
198 | int i, ret = ! stricmp (name, cipher->name); | 198 | int i, ret = ! stricmp (name, cipher->name); |
199 | 199 | ||
200 | if (aliases) | 200 | if (aliases) |
201 | for (i = 0; aliases[i] && (! ret); i++) | 201 | for (i = 0; aliases[i] && (! ret); i++) |
202 | ret = ! stricmp (name, aliases[i]); | 202 | ret = ! stricmp (name, aliases[i]); |
203 | 203 | ||
204 | return ret; | 204 | return ret; |
205 | } | 205 | } |
206 | 206 | ||
207 | /* Internal callback function. Used via _gcry_module_lookup. */ | 207 | /* Internal callback function. Used via _gcry_module_lookup. */ |
208 | static int | 208 | static int |
209 | gcry_cipher_lookup_func_oid (void *spec, void *data) | 209 | gcry_cipher_lookup_func_oid (void *spec, void *data) |
210 | { | 210 | { |
211 | gcry_cipher_spec_t *cipher = (gcry_cipher_spec_t *) spec; | 211 | gcry_cipher_spec_t *cipher = (gcry_cipher_spec_t *) spec; |
212 | char *oid = (char *) data; | 212 | char *oid = (char *) data; |
213 | gcry_cipher_oid_spec_t *oid_specs = cipher->oids; | 213 | gcry_cipher_oid_spec_t *oid_specs = cipher->oids; |
214 | int ret = 0, i; | 214 | int ret = 0, i; |
215 | 215 | ||
216 | if (oid_specs) | 216 | if (oid_specs) |
217 | for (i = 0; oid_specs[i].oid && (! ret); i++) | 217 | for (i = 0; oid_specs[i].oid && (! ret); i++) |
218 | if (! stricmp (oid, oid_specs[i].oid)) | 218 | if (! stricmp (oid, oid_specs[i].oid)) |
219 | ret = 1; | 219 | ret = 1; |
220 | 220 | ||
221 | return ret; | 221 | return ret; |
222 | } | 222 | } |
223 | 223 | ||
224 | /* Internal function. Lookup a cipher entry by it's name. */ | 224 | /* Internal function. Lookup a cipher entry by it's name. */ |
225 | static gcry_module_t | 225 | static gcry_module_t |
226 | gcry_cipher_lookup_name (const char *name) | 226 | gcry_cipher_lookup_name (const char *name) |
227 | { | 227 | { |
228 | gcry_module_t cipher; | 228 | gcry_module_t cipher; |
229 | 229 | ||
230 | cipher = _gcry_module_lookup (ciphers_registered, (void *) name, | 230 | cipher = _gcry_module_lookup (ciphers_registered, (void *) name, |
231 | gcry_cipher_lookup_func_name); | 231 | gcry_cipher_lookup_func_name); |
232 | 232 | ||
233 | return cipher; | 233 | return cipher; |
234 | } | 234 | } |
235 | 235 | ||
236 | /* Internal function. Lookup a cipher entry by it's oid. */ | 236 | /* Internal function. Lookup a cipher entry by it's oid. */ |
237 | static gcry_module_t | 237 | static gcry_module_t |
238 | gcry_cipher_lookup_oid (const char *oid) | 238 | gcry_cipher_lookup_oid (const char *oid) |
239 | { | 239 | { |
240 | gcry_module_t cipher; | 240 | gcry_module_t cipher; |
241 | 241 | ||
242 | cipher = _gcry_module_lookup (ciphers_registered, (void *) oid, | 242 | cipher = _gcry_module_lookup (ciphers_registered, (void *) oid, |
243 | gcry_cipher_lookup_func_oid); | 243 | gcry_cipher_lookup_func_oid); |
244 | 244 | ||
245 | return cipher; | 245 | return cipher; |
246 | } | 246 | } |
247 | 247 | ||
248 | /* Register a new cipher module whose specification can be found in | 248 | /* Register a new cipher module whose specification can be found in |
249 | CIPHER. On success, a new algorithm ID is stored in ALGORITHM_ID | 249 | CIPHER. On success, a new algorithm ID is stored in ALGORITHM_ID |
250 | and a pointer representhing this module is stored in MODULE. */ | 250 | and a pointer representhing this module is stored in MODULE. */ |
251 | gcry_error_t | 251 | gcry_error_t |
252 | gcry_cipher_register (gcry_cipher_spec_t *cipher, | 252 | gcry_cipher_register (gcry_cipher_spec_t *cipher, |
253 | int *algorithm_id, | 253 | int *algorithm_id, |
254 | gcry_module_t *module) | 254 | gcry_module_t *module) |
255 | { | 255 | { |
256 | gcry_err_code_t err = 0; | 256 | gcry_err_code_t err = 0; |
257 | gcry_module_t mod; | 257 | gcry_module_t mod; |
258 | 258 | ||
259 | ath_mutex_lock (&ciphers_registered_lock); | 259 | ath_mutex_lock (&ciphers_registered_lock); |
260 | err = _gcry_module_add (&ciphers_registered, 0, | 260 | err = _gcry_module_add (&ciphers_registered, 0, |
261 | (void *) cipher, &mod); | 261 | (void *) cipher, &mod); |
262 | ath_mutex_unlock (&ciphers_registered_lock); | 262 | ath_mutex_unlock (&ciphers_registered_lock); |
263 | 263 | ||
264 | if (! err) | 264 | if (! err) |
265 | { | 265 | { |
266 | *module = mod; | 266 | *module = mod; |
267 | *algorithm_id = mod->mod_id; | 267 | *algorithm_id = mod->mod_id; |
268 | } | 268 | } |
269 | 269 | ||
270 | return gcry_error (err); | 270 | return gcry_error (err); |
271 | } | 271 | } |
272 | 272 | ||
273 | /* Unregister the cipher identified by MODULE, which must have been | 273 | /* Unregister the cipher identified by MODULE, which must have been |
274 | registered with gcry_cipher_register. */ | 274 | registered with gcry_cipher_register. */ |
275 | void | 275 | void |
276 | gcry_cipher_unregister (gcry_module_t module) | 276 | gcry_cipher_unregister (gcry_module_t module) |
277 | { | 277 | { |
278 | ath_mutex_lock (&ciphers_registered_lock); | 278 | ath_mutex_lock (&ciphers_registered_lock); |
279 | _gcry_module_release (module); | 279 | _gcry_module_release (module); |
280 | ath_mutex_unlock (&ciphers_registered_lock); | 280 | ath_mutex_unlock (&ciphers_registered_lock); |
281 | } | 281 | } |
282 | 282 | ||
283 | /* Locate the OID in the oid table and return the index or -1 when not | 283 | /* Locate the OID in the oid table and return the index or -1 when not |
284 | found. An opitonal "oid." or "OID." prefix in OID is ignored, the | 284 | found. An opitonal "oid." or "OID." prefix in OID is ignored, the |
285 | OID is expected to be in standard IETF dotted notation. The | 285 | OID is expected to be in standard IETF dotted notation. The |
286 | internal algorithm number is returned in ALGORITHM unless it | 286 | internal algorithm number is returned in ALGORITHM unless it |
287 | ispassed as NULL. A pointer to the specification of the module | 287 | ispassed as NULL. A pointer to the specification of the module |
288 | implementing this algorithm is return in OID_SPEC unless passed as | 288 | implementing this algorithm is return in OID_SPEC unless passed as |
289 | NULL.*/ | 289 | NULL.*/ |
290 | static int | 290 | static int |
291 | search_oid (const char *oid, int *algorithm, gcry_cipher_oid_spec_t *oid_spec) | 291 | search_oid (const char *oid, int *algorithm, gcry_cipher_oid_spec_t *oid_spec) |
292 | { | 292 | { |
293 | gcry_module_t module; | 293 | gcry_module_t module; |
294 | int ret = 0; | 294 | int ret = 0; |
295 | 295 | ||
296 | if (oid && ((! strncmp (oid, "oid.", 4)) | 296 | if (oid && ((! strncmp (oid, "oid.", 4)) |
297 | || (! strncmp (oid, "OID.", 4)))) | 297 | || (! strncmp (oid, "OID.", 4)))) |
298 | oid += 4; | 298 | oid += 4; |
299 | 299 | ||
300 | module = gcry_cipher_lookup_oid (oid); | 300 | module = gcry_cipher_lookup_oid (oid); |
301 | if (module) | 301 | if (module) |
302 | { | 302 | { |
303 | gcry_cipher_spec_t *cipher = module->spec; | 303 | gcry_cipher_spec_t *cipher = module->spec; |
304 | int i; | 304 | int i; |
305 | 305 | ||
306 | for (i = 0; cipher->oids[i].oid && !ret; i++) | 306 | for (i = 0; cipher->oids[i].oid && !ret; i++) |
307 | if (! stricmp (oid, cipher->oids[i].oid)) | 307 | if (! stricmp (oid, cipher->oids[i].oid)) |
308 | { | 308 | { |
309 | if (algorithm) | 309 | if (algorithm) |
310 | *algorithm = module->mod_id; | 310 | *algorithm = module->mod_id; |
311 | if (oid_spec) | 311 | if (oid_spec) |
312 | *oid_spec = cipher->oids[i]; | 312 | *oid_spec = cipher->oids[i]; |
313 | ret = 1; | 313 | ret = 1; |
314 | } | 314 | } |
315 | _gcry_module_release (module); | 315 | _gcry_module_release (module); |
316 | } | 316 | } |
317 | 317 | ||
318 | return ret; | 318 | return ret; |
319 | } | 319 | } |
320 | 320 | ||
321 | /* Map STRING to the cipher algorithm identifier. Returns the | 321 | /* Map STRING to the cipher algorithm identifier. Returns the |
322 | algorithm ID of the cipher for the given name or 0 if the name is | 322 | algorithm ID of the cipher for the given name or 0 if the name is |
323 | not known. It is valid to pass NULL for STRING which results in a | 323 | not known. It is valid to pass NULL for STRING which results in a |
324 | return value of 0. */ | 324 | return value of 0. */ |
325 | int | 325 | int |
326 | gcry_cipher_map_name (const char *string) | 326 | gcry_cipher_map_name (const char *string) |
327 | { | 327 | { |
328 | gcry_module_t cipher; | 328 | gcry_module_t cipher; |
329 | int ret, algorithm = 0; | 329 | int ret, algorithm = 0; |
330 | 330 | ||
331 | if (! string) | 331 | if (! string) |
332 | return 0; | 332 | return 0; |
333 | 333 | ||
334 | REGISTER_DEFAULT_CIPHERS; | 334 | REGISTER_DEFAULT_CIPHERS; |
335 | 335 | ||
336 | /* If the string starts with a digit (optionally prefixed with | 336 | /* If the string starts with a digit (optionally prefixed with |
337 | either "OID." or "oid."), we first look into our table of ASN.1 | 337 | either "OID." or "oid."), we first look into our table of ASN.1 |
338 | object identifiers to figure out the algorithm */ | 338 | object identifiers to figure out the algorithm */ |
339 | 339 | ||
340 | ath_mutex_lock (&ciphers_registered_lock); | 340 | ath_mutex_lock (&ciphers_registered_lock); |
341 | 341 | ||
342 | ret = search_oid (string, &algorithm, NULL); | 342 | ret = search_oid (string, &algorithm, NULL); |
343 | if (! ret) | 343 | if (! ret) |
344 | { | 344 | { |
345 | cipher = gcry_cipher_lookup_name (string); | 345 | cipher = gcry_cipher_lookup_name (string); |
346 | if (cipher) | 346 | if (cipher) |
347 | { | 347 | { |
348 | algorithm = cipher->mod_id; | 348 | algorithm = cipher->mod_id; |
349 | _gcry_module_release (cipher); | 349 | _gcry_module_release (cipher); |
350 | } | 350 | } |
351 | } | 351 | } |
352 | 352 | ||
353 | ath_mutex_unlock (&ciphers_registered_lock); | 353 | ath_mutex_unlock (&ciphers_registered_lock); |
354 | 354 | ||
355 | return algorithm; | 355 | return algorithm; |
356 | } | 356 | } |
357 | 357 | ||
358 | 358 | ||
359 | /* Given a STRING with an OID in dotted decimal notation, this | 359 | /* Given a STRING with an OID in dotted decimal notation, this |
360 | function returns the cipher mode (GCRY_CIPHER_MODE_*) associated | 360 | function returns the cipher mode (GCRY_CIPHER_MODE_*) associated |
361 | with that OID or 0 if no mode is known. Passing NULL for string | 361 | with that OID or 0 if no mode is known. Passing NULL for string |
362 | yields a return value of 0. */ | 362 | yields a return value of 0. */ |
363 | int | 363 | int |
364 | gcry_cipher_mode_from_oid (const char *string) | 364 | gcry_cipher_mode_from_oid (const char *string) |
365 | { | 365 | { |
366 | gcry_cipher_oid_spec_t oid_spec; | 366 | gcry_cipher_oid_spec_t oid_spec; |
367 | int ret = 0, mode = 0; | 367 | int ret = 0, mode = 0; |
368 | 368 | ||
369 | if (!string) | 369 | if (!string) |
370 | return 0; | 370 | return 0; |
371 | 371 | ||
372 | ath_mutex_lock (&ciphers_registered_lock); | 372 | ath_mutex_lock (&ciphers_registered_lock); |
373 | ret = search_oid (string, NULL, &oid_spec); | 373 | ret = search_oid (string, NULL, &oid_spec); |
374 | if (ret) | 374 | if (ret) |
375 | mode = oid_spec.mode; | 375 | mode = oid_spec.mode; |
376 | ath_mutex_unlock (&ciphers_registered_lock); | 376 | ath_mutex_unlock (&ciphers_registered_lock); |
377 | 377 | ||
378 | return mode; | 378 | return mode; |
379 | } | 379 | } |
380 | 380 | ||
381 | 381 | ||
382 | /* Map the cipher algorithm identifier ALGORITHM to a string | 382 | /* Map the cipher algorithm identifier ALGORITHM to a string |
383 | representing this algorithm. This string is the default name as | 383 | representing this algorithm. This string is the default name as |
384 | used by Libgcrypt. NULL is returned for an unknown algorithm. */ | 384 | used by Libgcrypt. NULL is returned for an unknown algorithm. */ |
385 | static const char * | 385 | static const char * |
386 | cipher_algo_to_string (int algorithm) | 386 | cipher_algo_to_string (int algorithm) |
387 | { | 387 | { |
388 | gcry_module_t cipher; | 388 | gcry_module_t cipher; |
389 | const char *name = NULL; | 389 | const char *name = NULL; |
390 | 390 | ||
391 | REGISTER_DEFAULT_CIPHERS; | 391 | REGISTER_DEFAULT_CIPHERS; |
392 | 392 | ||
393 | ath_mutex_lock (&ciphers_registered_lock); | 393 | ath_mutex_lock (&ciphers_registered_lock); |
394 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); | 394 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); |
395 | if (cipher) | 395 | if (cipher) |
396 | { | 396 | { |
397 | name = ((gcry_cipher_spec_t *) cipher->spec)->name; | 397 | name = ((gcry_cipher_spec_t *) cipher->spec)->name; |
398 | _gcry_module_release (cipher); | 398 | _gcry_module_release (cipher); |
399 | } | 399 | } |
400 | ath_mutex_unlock (&ciphers_registered_lock); | 400 | ath_mutex_unlock (&ciphers_registered_lock); |
401 | 401 | ||
402 | return name; | 402 | return name; |
403 | } | 403 | } |
404 | 404 | ||
405 | /* Map the cipher algorithm identifier ALGORITHM to a string | 405 | /* Map the cipher algorithm identifier ALGORITHM to a string |
406 | representing this algorithm. This string is the default name as | 406 | representing this algorithm. This string is the default name as |
407 | used by Libgcrypt. An pointer to an empty string is returned for | 407 | used by Libgcrypt. An pointer to an empty string is returned for |
408 | an unknown algorithm. NULL is never returned. */ | 408 | an unknown algorithm. NULL is never returned. */ |
409 | const char * | 409 | const char * |
410 | gcry_cipher_algo_name (int algorithm) | 410 | gcry_cipher_algo_name (int algorithm) |
411 | { | 411 | { |
412 | const char *s = cipher_algo_to_string (algorithm); | 412 | const char *s = cipher_algo_to_string (algorithm); |
413 | return s ? s : ""; | 413 | return s ? s : ""; |
414 | } | 414 | } |
415 | 415 | ||
416 | 416 | ||
417 | /* Flag the cipher algorithm with the identifier ALGORITHM as | 417 | /* Flag the cipher algorithm with the identifier ALGORITHM as |
418 | disabled. There is no error return, the function does nothing for | 418 | disabled. There is no error return, the function does nothing for |
419 | unknown algorithms. Disabled algorithms are vitually not available | 419 | unknown algorithms. Disabled algorithms are vitually not available |
420 | in Libgcrypt. */ | 420 | in Libgcrypt. */ |
421 | static void | 421 | static void |
422 | disable_cipher_algo (int algorithm) | 422 | disable_cipher_algo (int algorithm) |
423 | { | 423 | { |
424 | gcry_module_t cipher; | 424 | gcry_module_t cipher; |
425 | 425 | ||
426 | REGISTER_DEFAULT_CIPHERS; | 426 | REGISTER_DEFAULT_CIPHERS; |
427 | 427 | ||
428 | ath_mutex_lock (&ciphers_registered_lock); | 428 | ath_mutex_lock (&ciphers_registered_lock); |
429 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); | 429 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); |
430 | if (cipher) | 430 | if (cipher) |
431 | { | 431 | { |
432 | if (! (cipher->flags & FLAG_MODULE_DISABLED)) | 432 | if (! (cipher->flags & FLAG_MODULE_DISABLED)) |
433 | cipher->flags |= FLAG_MODULE_DISABLED; | 433 | cipher->flags |= FLAG_MODULE_DISABLED; |
434 | _gcry_module_release (cipher); | 434 | _gcry_module_release (cipher); |
435 | } | 435 | } |
436 | ath_mutex_unlock (&ciphers_registered_lock); | 436 | ath_mutex_unlock (&ciphers_registered_lock); |
437 | } | 437 | } |
438 | 438 | ||
439 | 439 | ||
440 | /* Return 0 if the cipher algorithm with indentifier ALGORITHM is | 440 | /* Return 0 if the cipher algorithm with indentifier ALGORITHM is |
441 | available. Returns a basic error code value if it is not available. */ | 441 | available. Returns a basic error code value if it is not available. */ |
442 | static gcry_err_code_t | 442 | static gcry_err_code_t |
443 | check_cipher_algo (int algorithm) | 443 | check_cipher_algo (int algorithm) |
444 | { | 444 | { |
445 | gcry_err_code_t err = GPG_ERR_NO_ERROR; | 445 | gcry_err_code_t err = GPG_ERR_NO_ERROR; |
446 | gcry_module_t cipher; | 446 | gcry_module_t cipher; |
447 | 447 | ||
448 | REGISTER_DEFAULT_CIPHERS; | 448 | REGISTER_DEFAULT_CIPHERS; |
449 | 449 | ||
450 | ath_mutex_lock (&ciphers_registered_lock); | 450 | ath_mutex_lock (&ciphers_registered_lock); |
451 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); | 451 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); |
452 | if (cipher) | 452 | if (cipher) |
453 | { | 453 | { |
454 | if (cipher->flags & FLAG_MODULE_DISABLED) | 454 | if (cipher->flags & FLAG_MODULE_DISABLED) |
455 | err = GPG_ERR_CIPHER_ALGO; | 455 | err = GPG_ERR_CIPHER_ALGO; |
456 | _gcry_module_release (cipher); | 456 | _gcry_module_release (cipher); |
457 | } | 457 | } |
458 | else | 458 | else |
459 | err = GPG_ERR_CIPHER_ALGO; | 459 | err = GPG_ERR_CIPHER_ALGO; |
460 | ath_mutex_unlock (&ciphers_registered_lock); | 460 | ath_mutex_unlock (&ciphers_registered_lock); |
461 | 461 | ||
462 | return err; | 462 | return err; |
463 | } | 463 | } |
464 | 464 | ||
465 | 465 | ||
466 | /* Return the standard length of the key for the cipher algorithm with | 466 | /* Return the standard length of the key for the cipher algorithm with |
467 | the identifier ALGORITHM. This function expects a valid algorithm | 467 | the identifier ALGORITHM. This function expects a valid algorithm |
468 | and will abort if the algorithm is not available or the length of | 468 | and will abort if the algorithm is not available or the length of |
469 | the key is not known. */ | 469 | the key is not known. */ |
470 | static unsigned int | 470 | static unsigned int |
471 | cipher_get_keylen (int algorithm) | 471 | cipher_get_keylen (int algorithm) |
472 | { | 472 | { |
473 | gcry_module_t cipher; | 473 | gcry_module_t cipher; |
474 | unsigned len = 0; | 474 | unsigned len = 0; |
475 | 475 | ||
476 | REGISTER_DEFAULT_CIPHERS; | 476 | REGISTER_DEFAULT_CIPHERS; |
477 | 477 | ||
478 | ath_mutex_lock (&ciphers_registered_lock); | 478 | ath_mutex_lock (&ciphers_registered_lock); |
479 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); | 479 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); |
480 | if (cipher) | 480 | if (cipher) |
481 | { | 481 | { |
482 | len = ((gcry_cipher_spec_t *) cipher->spec)->keylen; | 482 | len = ((gcry_cipher_spec_t *) cipher->spec)->keylen; |
483 | if (! len) | 483 | if (! len) |
484 | log_bug ("cipher %d w/o key length\n", algorithm); | 484 | log_bug ("cipher %d w/o key length\n", algorithm); |
485 | _gcry_module_release (cipher); | 485 | _gcry_module_release (cipher); |
486 | } | 486 | } |
487 | else | 487 | else |
488 | log_bug ("cipher %d not found\n", algorithm); | 488 | log_bug ("cipher %d not found\n", algorithm); |
489 | ath_mutex_unlock (&ciphers_registered_lock); | 489 | ath_mutex_unlock (&ciphers_registered_lock); |
490 | 490 | ||
491 | return len; | 491 | return len; |
492 | } | 492 | } |
493 | 493 | ||
494 | /* Return the block length of the cipher algorithm with the identifier | 494 | /* Return the block length of the cipher algorithm with the identifier |
495 | ALGORITHM. This function expects a valid algorithm and will abort | 495 | ALGORITHM. This function expects a valid algorithm and will abort |
496 | if the algorithm is not available or the length of the key is not | 496 | if the algorithm is not available or the length of the key is not |
497 | known. */ | 497 | known. */ |
498 | static unsigned int | 498 | static unsigned int |
499 | cipher_get_blocksize (int algorithm) | 499 | cipher_get_blocksize (int algorithm) |
500 | { | 500 | { |
501 | gcry_module_t cipher; | 501 | gcry_module_t cipher; |
502 | unsigned len = 0; | 502 | unsigned len = 0; |
503 | 503 | ||
504 | REGISTER_DEFAULT_CIPHERS; | 504 | REGISTER_DEFAULT_CIPHERS; |
505 | 505 | ||
506 | ath_mutex_lock (&ciphers_registered_lock); | 506 | ath_mutex_lock (&ciphers_registered_lock); |
507 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); | 507 | cipher = _gcry_module_lookup_id (ciphers_registered, algorithm); |
508 | if (cipher) | 508 | if (cipher) |
509 | { | 509 | { |
510 | len = ((gcry_cipher_spec_t *) cipher->spec)->blocksize; | 510 | len = ((gcry_cipher_spec_t *) cipher->spec)->blocksize; |
511 | if (! len) | 511 | if (! len) |
512 | log_bug ("cipher %d w/o blocksize\n", algorithm); | 512 | log_bug ("cipher %d w/o blocksize\n", algorithm); |
513 | _gcry_module_release (cipher); | 513 | _gcry_module_release (cipher); |
514 | } | 514 | } |
515 | else | 515 | else |
516 | log_bug ("cipher %d not found\n", algorithm); | 516 | log_bug ("cipher %d not found\n", algorithm); |
517 | ath_mutex_unlock (&ciphers_registered_lock); | 517 | ath_mutex_unlock (&ciphers_registered_lock); |
518 | 518 | ||
519 | return len; | 519 | return len; |
520 | } | 520 | } |
521 | 521 | ||
522 | 522 | ||
523 | /* | 523 | /* |
524 | Open a cipher handle for use with cipher algorithm ALGORITHM, using | 524 | Open a cipher handle for use with cipher algorithm ALGORITHM, using |
525 | the cipher mode MODE (one of the GCRY_CIPHER_MODE_*) and return a | 525 | the cipher mode MODE (one of the GCRY_CIPHER_MODE_*) and return a |
526 | handle in HANDLE. Put NULL into HANDLE and return an error code if | 526 | handle in HANDLE. Put NULL into HANDLE and return an error code if |
527 | something goes wrong. FLAGS may be used to modify the | 527 | something goes wrong. FLAGS may be used to modify the |
528 | operation. The defined flags are: | 528 | operation. The defined flags are: |
529 | 529 | ||
530 | GCRY_CIPHER_SECURE: allocate all internal buffers in secure memory. | 530 | GCRY_CIPHER_SECURE: allocate all internal buffers in secure memory. |
531 | GCRY_CIPHER_ENABLE_SYNC: Enable the sync operation as used in OpenPGP. | 531 | GCRY_CIPHER_ENABLE_SYNC: Enable the sync operation as used in OpenPGP. |
532 | GCRY_CIPHER_CBC_CTS: Enable CTS mode. | 532 | GCRY_CIPHER_CBC_CTS: Enable CTS mode. |
533 | GCRY_CIPHER_CBC_MAC: Enable MAC mode. | 533 | GCRY_CIPHER_CBC_MAC: Enable MAC mode. |
534 | 534 | ||
535 | Values for these flags may be combined using OR. | 535 | Values for these flags may be combined using OR. |
536 | */ | 536 | */ |
537 | gcry_error_t | 537 | gcry_error_t |
538 | gcry_cipher_open (gcry_cipher_hd_t *handle, | 538 | gcry_cipher_open (gcry_cipher_hd_t *handle, |
539 | int algo, int mode, unsigned int flags) | 539 | int algo, int mode, unsigned int flags) |
540 | { | 540 | { |
541 | int secure = (flags & GCRY_CIPHER_SECURE); | 541 | int secure = (flags & GCRY_CIPHER_SECURE); |
542 | gcry_cipher_spec_t *cipher = NULL; | 542 | gcry_cipher_spec_t *cipher = NULL; |
543 | gcry_module_t module = NULL; | 543 | gcry_module_t module = NULL; |
544 | gcry_cipher_hd_t h = NULL; | 544 | gcry_cipher_hd_t h = NULL; |
545 | gcry_err_code_t err = 0; | 545 | gcry_err_code_t err = 0; |
546 | 546 | ||
547 | /* If the application missed to call the random poll function, we do | 547 | /* If the application missed to call the random poll function, we do |
548 | it here to ensure that it is used once in a while. */ | 548 | it here to ensure that it is used once in a while. */ |
549 | _gcry_fast_random_poll (); | 549 | _gcry_fast_random_poll (); |
550 | 550 | ||
551 | REGISTER_DEFAULT_CIPHERS; | 551 | REGISTER_DEFAULT_CIPHERS; |
552 | 552 | ||
553 | /* Fetch the according module and check wether the cipher is marked | 553 | /* Fetch the according module and check wether the cipher is marked |
554 | available for use. */ | 554 | available for use. */ |
555 | ath_mutex_lock (&ciphers_registered_lock); | 555 | ath_mutex_lock (&ciphers_registered_lock); |
556 | module = _gcry_module_lookup_id (ciphers_registered, algo); | 556 | module = _gcry_module_lookup_id (ciphers_registered, algo); |
557 | if (module) | 557 | if (module) |
558 | { | 558 | { |
559 | /* Found module. */ | 559 | /* Found module. */ |
560 | 560 | ||
561 | if (module->flags & FLAG_MODULE_DISABLED) | 561 | if (module->flags & FLAG_MODULE_DISABLED) |
562 | { | 562 | { |
563 | /* Not available for use. */ | 563 | /* Not available for use. */ |
564 | err = GPG_ERR_CIPHER_ALGO; | 564 | err = GPG_ERR_CIPHER_ALGO; |
565 | _gcry_module_release (module); | 565 | _gcry_module_release (module); |
566 | } | 566 | } |
567 | else | 567 | else |
568 | cipher = (gcry_cipher_spec_t *) module->spec; | 568 | cipher = (gcry_cipher_spec_t *) module->spec; |
569 | } | 569 | } |
570 | else | 570 | else |
571 | err = GPG_ERR_CIPHER_ALGO; | 571 | err = GPG_ERR_CIPHER_ALGO; |
572 | ath_mutex_unlock (&ciphers_registered_lock); | 572 | ath_mutex_unlock (&ciphers_registered_lock); |
573 | 573 | ||
574 | /* check flags */ | 574 | /* check flags */ |
575 | if ((! err) | 575 | if ((! err) |
576 | && ((flags & ~(0 | 576 | && ((flags & ~(0 |
577 | | GCRY_CIPHER_SECURE | 577 | | GCRY_CIPHER_SECURE |
578 | | GCRY_CIPHER_ENABLE_SYNC | 578 | | GCRY_CIPHER_ENABLE_SYNC |
579 | | GCRY_CIPHER_CBC_CTS | 579 | | GCRY_CIPHER_CBC_CTS |
580 | | GCRY_CIPHER_CBC_MAC)) | 580 | | GCRY_CIPHER_CBC_MAC)) |
581 | || (flags & GCRY_CIPHER_CBC_CTS & GCRY_CIPHER_CBC_MAC))) | 581 | || (flags & GCRY_CIPHER_CBC_CTS & GCRY_CIPHER_CBC_MAC))) |
582 | err = GPG_ERR_CIPHER_ALGO; | 582 | err = GPG_ERR_CIPHER_ALGO; |
583 | 583 | ||
584 | /* check that a valid mode has been requested */ | 584 | /* check that a valid mode has been requested */ |
585 | if (! err) | 585 | if (! err) |
586 | switch (mode) | 586 | switch (mode) |
587 | { | 587 | { |
588 | case GCRY_CIPHER_MODE_ECB: | 588 | case GCRY_CIPHER_MODE_ECB: |
589 | case GCRY_CIPHER_MODE_CBC: | 589 | case GCRY_CIPHER_MODE_CBC: |
590 | case GCRY_CIPHER_MODE_CFB: | 590 | case GCRY_CIPHER_MODE_CFB: |
591 | case GCRY_CIPHER_MODE_CTR: | 591 | case GCRY_CIPHER_MODE_CTR: |
592 | if ((cipher->encrypt == dummy_encrypt_block) | 592 | if ((cipher->encrypt == dummy_encrypt_block) |
593 | || (cipher->decrypt == dummy_decrypt_block)) | 593 | || (cipher->decrypt == dummy_decrypt_block)) |
594 | err = GPG_ERR_INV_CIPHER_MODE; | 594 | err = GPG_ERR_INV_CIPHER_MODE; |
595 | break; | 595 | break; |
596 | 596 | ||
597 | case GCRY_CIPHER_MODE_STREAM: | 597 | case GCRY_CIPHER_MODE_STREAM: |
598 | if ((cipher->stencrypt == dummy_encrypt_stream) | 598 | if ((cipher->stencrypt == dummy_encrypt_stream) |
599 | || (cipher->stdecrypt == dummy_decrypt_stream)) | 599 | || (cipher->stdecrypt == dummy_decrypt_stream)) |
600 | err = GPG_ERR_INV_CIPHER_MODE; | 600 | err = GPG_ERR_INV_CIPHER_MODE; |
601 | break; | 601 | break; |
602 | 602 | ||
603 | case GCRY_CIPHER_MODE_NONE: | 603 | case GCRY_CIPHER_MODE_NONE: |
604 | /* FIXME: issue a warning when this mode is used */ | 604 | /* FIXME: issue a warning when this mode is used */ |
605 | break; | 605 | break; |
606 | 606 | ||
607 | default: | 607 | default: |
608 | err = GPG_ERR_INV_CIPHER_MODE; | 608 | err = GPG_ERR_INV_CIPHER_MODE; |
609 | } | 609 | } |
610 | 610 | ||
611 | /* ? FIXME: perform selftest here and mark this with a flag in | 611 | /* ? FIXME: perform selftest here and mark this with a flag in |
612 | cipher_table ? */ | 612 | cipher_table ? */ |
613 | 613 | ||
614 | if (! err) | 614 | if (! err) |
615 | { | 615 | { |
616 | size_t size = (sizeof (*h) | 616 | size_t size = (sizeof (*h) |
617 | + 2 * cipher->contextsize | 617 | + 2 * cipher->contextsize |
618 | - sizeof (PROPERLY_ALIGNED_TYPE)); | 618 | - sizeof (PROPERLY_ALIGNED_TYPE)); |
619 | 619 | ||
620 | if (secure) | 620 | if (secure) |
621 | h = gcry_calloc_secure (1, size); | 621 | h = gcry_calloc_secure (1, size); |
622 | else | 622 | else |
623 | h = gcry_calloc (1, size); | 623 | h = gcry_calloc (1, size); |
624 | 624 | ||
625 | if (! h) | 625 | if (! h) |
626 | err = gpg_err_code_from_errno (errno); | 626 | err = gpg_err_code_from_errno (errno); |
627 | else | 627 | else |
628 | { | 628 | { |
629 | h->magic = secure ? CTX_MAGIC_SECURE : CTX_MAGIC_NORMAL; | 629 | h->magic = secure ? CTX_MAGIC_SECURE : CTX_MAGIC_NORMAL; |
630 | h->actual_handle_size = size; | 630 | h->actual_handle_size = size; |
631 | h->cipher = cipher; | 631 | h->cipher = cipher; |
632 | h->module = module; | 632 | h->module = module; |
633 | h->mode = mode; | 633 | h->mode = mode; |
634 | h->flags = flags; | 634 | h->flags = flags; |
635 | } | 635 | } |
636 | } | 636 | } |
637 | 637 | ||
638 | /* Done. */ | 638 | /* Done. */ |
639 | 639 | ||
640 | if (err) | 640 | if (err) |
641 | { | 641 | { |
642 | if (module) | 642 | if (module) |
643 | { | 643 | { |
644 | /* Release module. */ | 644 | /* Release module. */ |
645 | ath_mutex_lock (&ciphers_registered_lock); | 645 | ath_mutex_lock (&ciphers_registered_lock); |
646 | _gcry_module_release (module); | 646 | _gcry_module_release (module); |
647 | ath_mutex_unlock (&ciphers_registered_lock); | 647 | ath_mutex_unlock (&ciphers_registered_lock); |
648 | } | 648 | } |
649 | } | 649 | } |
650 | 650 | ||
651 | *handle = err ? NULL : h; | 651 | *handle = err ? NULL : h; |
652 | 652 | ||
653 | return gcry_error (err); | 653 | return gcry_error (err); |
654 | } | 654 | } |
655 | 655 | ||
656 | 656 | ||
657 | /* Release all resources associated with the cipher handle H. H may be | 657 | /* Release all resources associated with the cipher handle H. H may be |
658 | NULL in which case this is a no-operation. */ | 658 | NULL in which case this is a no-operation. */ |
659 | void | 659 | void |
660 | gcry_cipher_close (gcry_cipher_hd_t h) | 660 | gcry_cipher_close (gcry_cipher_hd_t h) |
661 | { | 661 | { |
662 | if (! h) | 662 | if (! h) |
663 | return; | 663 | return; |
664 | 664 | ||
665 | if ((h->magic != CTX_MAGIC_SECURE) | 665 | if ((h->magic != CTX_MAGIC_SECURE) |
666 | && (h->magic != CTX_MAGIC_NORMAL)) | 666 | && (h->magic != CTX_MAGIC_NORMAL)) |
667 | _gcry_fatal_error(GPG_ERR_INTERNAL, | 667 | _gcry_fatal_error(GPG_ERR_INTERNAL, |
668 | "gcry_cipher_close: already closed/invalid handle"); | 668 | "gcry_cipher_close: already closed/invalid handle"); |
669 | else | 669 | else |
670 | h->magic = 0; | 670 | h->magic = 0; |
671 | 671 | ||
672 | /* Release module. */ | 672 | /* Release module. */ |
673 | ath_mutex_lock (&ciphers_registered_lock); | 673 | ath_mutex_lock (&ciphers_registered_lock); |
674 | _gcry_module_release (h->module); | 674 | _gcry_module_release (h->module); |
675 | ath_mutex_unlock (&ciphers_registered_lock); | 675 | ath_mutex_unlock (&ciphers_registered_lock); |
676 | 676 | ||
677 | /* We always want to wipe out the memory even when the context has | 677 | /* We always want to wipe out the memory even when the context has |
678 | been allocated in secure memory. The user might have disabled | 678 | been allocated in secure memory. The user might have disabled |
679 | secure memory or is using his own implementation which does not | 679 | secure memory or is using his own implementation which does not |
680 | do the wiping. To accomplish this we need to keep track of the | 680 | do the wiping. To accomplish this we need to keep track of the |
681 | actual size of this structure because we have no way to known | 681 | actual size of this structure because we have no way to known |
682 | how large the allocated area was when using a standard malloc. */ | 682 | how large the allocated area was when using a standard malloc. */ |
683 | wipememory (h, h->actual_handle_size); | 683 | wipememory (h, h->actual_handle_size); |
684 | 684 | ||
685 | gcry_free (h); | 685 | gcry_free (h); |
686 | } | 686 | } |
687 | 687 | ||
688 | 688 | ||
689 | /* Set the key to be used for the encryption context C to KEY with | 689 | /* Set the key to be used for the encryption context C to KEY with |
690 | length KEYLEN. The length should match the required length. */ | 690 | length KEYLEN. The length should match the required length. */ |
691 | static gcry_error_t | 691 | static gcry_error_t |
692 | cipher_setkey (gcry_cipher_hd_t c, byte *key, unsigned keylen) | 692 | cipher_setkey (gcry_cipher_hd_t c, byte *key, unsigned keylen) |
693 | { | 693 | { |
694 | gcry_err_code_t ret; | 694 | gcry_err_code_t ret; |
695 | 695 | ||
696 | ret = (*c->cipher->setkey) (&c->context.c, key, keylen); | 696 | ret = (*c->cipher->setkey) (&c->context.c, key, keylen); |
697 | if (! ret) | 697 | if (! ret) |
698 | /* Duplicate initial context. */ | 698 | /* Duplicate initial context. */ |
699 | memcpy ((void *) ((char *) &c->context.c + c->cipher->contextsize), | 699 | memcpy ((void *) ((char *) &c->context.c + c->cipher->contextsize), |
700 | (void *) &c->context.c, | 700 | (void *) &c->context.c, |
701 | c->cipher->contextsize); | 701 | c->cipher->contextsize); |
702 | 702 | ||
703 | return gcry_error (ret); | 703 | return gcry_error (ret); |
704 | } | 704 | } |
705 | 705 | ||
706 | 706 | ||
707 | /* Set the IV to be used for the encryption context C to IV with | 707 | /* Set the IV to be used for the encryption context C to IV with |
708 | length IVLEN. The length should match the required length. */ | 708 | length IVLEN. The length should match the required length. */ |
709 | static void | 709 | static void |
710 | cipher_setiv( gcry_cipher_hd_t c, const byte *iv, unsigned ivlen ) | 710 | cipher_setiv( gcry_cipher_hd_t c, const byte *iv, unsigned ivlen ) |
711 | { | 711 | { |
712 | memset( c->iv, 0, c->cipher->blocksize ); | 712 | memset( c->iv, 0, c->cipher->blocksize ); |
713 | if( iv ) { | 713 | if( iv ) { |
714 | if( ivlen != c->cipher->blocksize ) | 714 | if( ivlen != c->cipher->blocksize ) |
715 | log_info("WARNING: cipher_setiv: ivlen=%u blklen=%u\n", | 715 | log_info("WARNING: cipher_setiv: ivlen=%u blklen=%u\n", |
716 | ivlen, (unsigned) c->cipher->blocksize ); | 716 | ivlen, (unsigned) c->cipher->blocksize ); |
717 | if (ivlen > c->cipher->blocksize) | 717 | if (ivlen > c->cipher->blocksize) |
718 | ivlen = c->cipher->blocksize; | 718 | ivlen = c->cipher->blocksize; |
719 | memcpy( c->iv, iv, ivlen ); | 719 | memcpy( c->iv, iv, ivlen ); |
720 | } | 720 | } |
721 | c->unused = 0; | 721 | c->unused = 0; |
722 | } | 722 | } |
723 | 723 | ||
724 | 724 | ||
725 | /* Reset the cipher context to the initial contex. This is basically | 725 | /* Reset the cipher context to the initial contex. This is basically |
726 | the same as an release followed by a new. */ | 726 | the same as an release followed by a new. */ |
727 | static void | 727 | static void |
728 | cipher_reset (gcry_cipher_hd_t c) | 728 | cipher_reset (gcry_cipher_hd_t c) |
729 | { | 729 | { |
730 | memcpy (&c->context.c, | 730 | memcpy (&c->context.c, |
731 | (char *) &c->context.c + c->cipher->contextsize, | 731 | (char *) &c->context.c + c->cipher->contextsize, |
732 | c->cipher->contextsize); | 732 | c->cipher->contextsize); |
733 | memset (c->iv, 0, c->cipher->blocksize); | 733 | memset (c->iv, 0, c->cipher->blocksize); |
734 | memset (c->lastiv, 0, c->cipher->blocksize); | 734 | memset (c->lastiv, 0, c->cipher->blocksize); |
735 | memset (c->ctr, 0, c->cipher->blocksize); | 735 | memset (c->ctr, 0, c->cipher->blocksize); |
736 | } | 736 | } |
737 | 737 | ||
738 | 738 | ||
739 | static void | 739 | static void |
740 | do_ecb_encrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, | 740 | do_ecb_encrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, |
741 | unsigned int nblocks ) | 741 | unsigned int nblocks ) |
742 | { | 742 | { |
743 | unsigned int n; | 743 | unsigned int n; |
744 | 744 | ||
745 | for(n=0; n < nblocks; n++ ) { | 745 | for(n=0; n < nblocks; n++ ) { |
746 | c->cipher->encrypt ( &c->context.c, outbuf, (byte*)/*arggg*/inbuf ); | 746 | c->cipher->encrypt ( &c->context.c, outbuf, (byte*)/*arggg*/inbuf ); |
747 | inbuf += c->cipher->blocksize; | 747 | inbuf += c->cipher->blocksize; |
748 | outbuf += c->cipher->blocksize; | 748 | outbuf += c->cipher->blocksize; |
749 | } | 749 | } |
750 | } | 750 | } |
751 | 751 | ||
752 | static void | 752 | static void |
753 | do_ecb_decrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, | 753 | do_ecb_decrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, |
754 | unsigned int nblocks ) | 754 | unsigned int nblocks ) |
755 | { | 755 | { |
756 | unsigned n; | 756 | unsigned n; |
757 | 757 | ||
758 | for(n=0; n < nblocks; n++ ) { | 758 | for(n=0; n < nblocks; n++ ) { |
759 | c->cipher->decrypt ( &c->context.c, outbuf, (byte*)/*arggg*/inbuf ); | 759 | c->cipher->decrypt ( &c->context.c, outbuf, (byte*)/*arggg*/inbuf ); |
760 | inbuf += c->cipher->blocksize; | 760 | inbuf += c->cipher->blocksize; |
761 | outbuf += c->cipher->blocksize; | 761 | outbuf += c->cipher->blocksize; |
762 | } | 762 | } |
763 | } | 763 | } |
764 | 764 | ||
765 | static void | 765 | static void |
766 | do_cbc_encrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, | 766 | do_cbc_encrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, |
767 | unsigned int nbytes ) | 767 | unsigned int nbytes ) |
768 | { | 768 | { |
769 | unsigned int n; | 769 | unsigned int n; |
770 | byte *ivp; | 770 | byte *ivp; |
771 | int i; | 771 | int i; |
772 | size_t blocksize = c->cipher->blocksize; | 772 | size_t blocksize = c->cipher->blocksize; |
773 | unsigned nblocks = nbytes / blocksize; | 773 | unsigned nblocks = nbytes / blocksize; |
774 | 774 | ||
775 | if ((c->flags & GCRY_CIPHER_CBC_CTS) && nbytes > blocksize) { | 775 | if ((c->flags & GCRY_CIPHER_CBC_CTS) && nbytes > blocksize) { |
776 | if ((nbytes % blocksize) == 0) | 776 | if ((nbytes % blocksize) == 0) |
777 | nblocks--; | 777 | nblocks--; |
778 | } | 778 | } |
779 | 779 | ||
780 | for(n=0; n < nblocks; n++ ) { | 780 | for(n=0; n < nblocks; n++ ) { |
781 | /* fixme: the xor should work on words and not on | 781 | /* fixme: the xor should work on words and not on |
782 | * bytes. Maybe it is a good idea to enhance the cipher backend | 782 | * bytes. Maybe it is a good idea to enhance the cipher backend |
783 | * API to allow for CBC handling direct in the backend */ | 783 | * API to allow for CBC handling direct in the backend */ |
784 | for(ivp=c->iv,i=0; i < blocksize; i++ ) | 784 | for(ivp=c->iv,i=0; i < blocksize; i++ ) |
785 | outbuf[i] = inbuf[i] ^ *ivp++; | 785 | outbuf[i] = inbuf[i] ^ *ivp++; |
786 | c->cipher->encrypt ( &c->context.c, outbuf, outbuf ); | 786 | c->cipher->encrypt ( &c->context.c, outbuf, outbuf ); |
787 | memcpy(c->iv, outbuf, blocksize ); | 787 | memcpy(c->iv, outbuf, blocksize ); |
788 | inbuf += c->cipher->blocksize; | 788 | inbuf += c->cipher->blocksize; |
789 | if (!(c->flags & GCRY_CIPHER_CBC_MAC)) | 789 | if (!(c->flags & GCRY_CIPHER_CBC_MAC)) |
790 | outbuf += c->cipher->blocksize; | 790 | outbuf += c->cipher->blocksize; |
791 | } | 791 | } |
792 | 792 | ||
793 | if ((c->flags & GCRY_CIPHER_CBC_CTS) && nbytes > blocksize) | 793 | if ((c->flags & GCRY_CIPHER_CBC_CTS) && nbytes > blocksize) |
794 | { | 794 | { |
795 | /* We have to be careful here, since outbuf might be equal to | 795 | /* We have to be careful here, since outbuf might be equal to |
796 | inbuf. */ | 796 | inbuf. */ |
797 | 797 | ||
798 | int restbytes; | 798 | int restbytes; |
799 | byte b; | 799 | byte b; |
800 | 800 | ||
801 | if ((nbytes % blocksize) == 0) | 801 | if ((nbytes % blocksize) == 0) |
802 | restbytes = blocksize; | 802 | restbytes = blocksize; |
803 | else | 803 | else |
804 | restbytes = nbytes % blocksize; | 804 | restbytes = nbytes % blocksize; |
805 | 805 | ||
806 | outbuf -= blocksize; | 806 | outbuf -= blocksize; |
807 | for (ivp = c->iv, i = 0; i < restbytes; i++) | 807 | for (ivp = c->iv, i = 0; i < restbytes; i++) |
808 | { | 808 | { |
809 | b = inbuf[i]; | 809 | b = inbuf[i]; |
810 | outbuf[blocksize + i] = outbuf[i]; | 810 | outbuf[blocksize + i] = outbuf[i]; |
811 | outbuf[i] = b ^ *ivp++; | 811 | outbuf[i] = b ^ *ivp++; |
812 | } | 812 | } |
813 | for (; i < blocksize; i++) | 813 | for (; i < blocksize; i++) |
814 | outbuf[i] = 0 ^ *ivp++; | 814 | outbuf[i] = 0 ^ *ivp++; |
815 | 815 | ||
816 | c->cipher->encrypt (&c->context.c, outbuf, outbuf); | 816 | c->cipher->encrypt (&c->context.c, outbuf, outbuf); |
817 | memcpy (c->iv, outbuf, blocksize); | 817 | memcpy (c->iv, outbuf, blocksize); |
818 | } | 818 | } |
819 | } | 819 | } |
820 | 820 | ||
821 | static void | 821 | static void |
822 | do_cbc_decrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, | 822 | do_cbc_decrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, |
823 | unsigned int nbytes ) | 823 | unsigned int nbytes ) |
824 | { | 824 | { |
825 | unsigned int n; | 825 | unsigned int n; |
826 | byte *ivp; | 826 | byte *ivp; |
827 | int i; | 827 | int i; |
828 | size_t blocksize = c->cipher->blocksize; | 828 | size_t blocksize = c->cipher->blocksize; |
829 | unsigned int nblocks = nbytes / blocksize; | 829 | unsigned int nblocks = nbytes / blocksize; |
830 | 830 | ||
831 | if ((c->flags & GCRY_CIPHER_CBC_CTS) && nbytes > blocksize) { | 831 | if ((c->flags & GCRY_CIPHER_CBC_CTS) && nbytes > blocksize) { |
832 | nblocks--; | 832 | nblocks--; |
833 | if ((nbytes % blocksize) == 0) | 833 | if ((nbytes % blocksize) == 0) |
834 | nblocks--; | 834 | nblocks--; |
835 | memcpy(c->lastiv, c->iv, blocksize ); | 835 | memcpy(c->lastiv, c->iv, blocksize ); |
836 | } | 836 | } |
837 | 837 | ||
838 | for(n=0; n < nblocks; n++ ) { | 838 | for(n=0; n < nblocks; n++ ) { |
839 | /* Because outbuf and inbuf might be the same, we have | 839 | /* Because outbuf and inbuf might be the same, we have |
840 | * to save the original ciphertext block. We use lastiv | 840 | * to save the original ciphertext block. We use lastiv |
841 | * for this here because it is not used otherwise. */ | 841 | * for this here because it is not used otherwise. */ |
842 | memcpy(c->lastiv, inbuf, blocksize ); | 842 | memcpy(c->lastiv, inbuf, blocksize ); |
843 | c->cipher->decrypt ( &c->context.c, outbuf, inbuf ); | 843 | c->cipher->decrypt ( &c->context.c, outbuf, inbuf ); |
844 | for(ivp=c->iv,i=0; i < blocksize; i++ ) | 844 | for(ivp=c->iv,i=0; i < blocksize; i++ ) |
845 | outbuf[i] ^= *ivp++; | 845 | outbuf[i] ^= *ivp++; |
846 | memcpy(c->iv, c->lastiv, blocksize ); | 846 | memcpy(c->iv, c->lastiv, blocksize ); |
847 | inbuf += c->cipher->blocksize; | 847 | inbuf += c->cipher->blocksize; |
848 | outbuf += c->cipher->blocksize; | 848 | outbuf += c->cipher->blocksize; |
849 | } | 849 | } |
850 | 850 | ||
851 | if ((c->flags & GCRY_CIPHER_CBC_CTS) && nbytes > blocksize) { | 851 | if ((c->flags & GCRY_CIPHER_CBC_CTS) && nbytes > blocksize) { |
852 | int restbytes; | 852 | int restbytes; |
853 | 853 | ||
854 | if ((nbytes % blocksize) == 0) | 854 | if ((nbytes % blocksize) == 0) |
855 | restbytes = blocksize; | 855 | restbytes = blocksize; |
856 | else | 856 | else |
857 | restbytes = nbytes % blocksize; | 857 | restbytes = nbytes % blocksize; |
858 | 858 | ||
859 | memcpy(c->lastiv, c->iv, blocksize ); /* save Cn-2 */ | 859 | memcpy(c->lastiv, c->iv, blocksize ); /* save Cn-2 */ |
860 | memcpy(c->iv, inbuf + blocksize, restbytes ); /* save Cn */ | 860 | memcpy(c->iv, inbuf + blocksize, restbytes ); /* save Cn */ |
861 | 861 | ||
862 | c->cipher->decrypt ( &c->context.c, outbuf, inbuf ); | 862 | c->cipher->decrypt ( &c->context.c, outbuf, inbuf ); |
863 | for(ivp=c->iv,i=0; i < restbytes; i++ ) | 863 | for(ivp=c->iv,i=0; i < restbytes; i++ ) |
864 | outbuf[i] ^= *ivp++; | 864 | outbuf[i] ^= *ivp++; |
865 | 865 | ||
866 | memcpy(outbuf + blocksize, outbuf, restbytes); | 866 | memcpy(outbuf + blocksize, outbuf, restbytes); |
867 | for(i=restbytes; i < blocksize; i++) | 867 | for(i=restbytes; i < blocksize; i++) |
868 | c->iv[i] = outbuf[i]; | 868 | c->iv[i] = outbuf[i]; |
869 | c->cipher->decrypt ( &c->context.c, outbuf, c->iv ); | 869 | c->cipher->decrypt ( &c->context.c, outbuf, c->iv ); |
870 | for(ivp=c->lastiv,i=0; i < blocksize; i++ ) | 870 | for(ivp=c->lastiv,i=0; i < blocksize; i++ ) |
871 | outbuf[i] ^= *ivp++; | 871 | outbuf[i] ^= *ivp++; |
872 | /* c->lastiv is now really lastlastiv, does this matter? */ | 872 | /* c->lastiv is now really lastlastiv, does this matter? */ |
873 | } | 873 | } |
874 | } | 874 | } |
875 | 875 | ||
876 | 876 | ||
877 | static void | 877 | static void |
878 | do_cfb_encrypt( gcry_cipher_hd_t c, | 878 | do_cfb_encrypt( gcry_cipher_hd_t c, |
879 | byte *outbuf, const byte *inbuf, unsigned nbytes ) | 879 | byte *outbuf, const byte *inbuf, unsigned nbytes ) |
880 | { | 880 | { |
881 | byte *ivp; | 881 | byte *ivp; |
882 | size_t blocksize = c->cipher->blocksize; | 882 | size_t blocksize = c->cipher->blocksize; |
883 | 883 | ||
884 | if( nbytes <= c->unused ) { | 884 | if( nbytes <= c->unused ) { |
885 | /* Short enough to be encoded by the remaining XOR mask. */ | 885 | /* Short enough to be encoded by the remaining XOR mask. */ |
886 | /* XOR the input with the IV and store input into IV. */ | 886 | /* XOR the input with the IV and store input into IV. */ |
887 | for (ivp=c->iv+c->cipher->blocksize - c->unused; | 887 | for (ivp=c->iv+c->cipher->blocksize - c->unused; |
888 | nbytes; | 888 | nbytes; |
889 | nbytes--, c->unused-- ) | 889 | nbytes--, c->unused-- ) |
890 | *outbuf++ = (*ivp++ ^= *inbuf++); | 890 | *outbuf++ = (*ivp++ ^= *inbuf++); |
891 | return; | 891 | return; |
892 | } | 892 | } |
893 | 893 | ||
894 | if( c->unused ) { | 894 | if( c->unused ) { |
895 | /* XOR the input with the IV and store input into IV */ | 895 | /* XOR the input with the IV and store input into IV */ |
896 | nbytes -= c->unused; | 896 | nbytes -= c->unused; |
897 | for(ivp=c->iv+blocksize - c->unused; c->unused; c->unused-- ) | 897 | for(ivp=c->iv+blocksize - c->unused; c->unused; c->unused-- ) |
898 | *outbuf++ = (*ivp++ ^= *inbuf++); | 898 | *outbuf++ = (*ivp++ ^= *inbuf++); |
899 | } | 899 | } |
900 | 900 | ||
901 | /* Now we can process complete blocks. */ | 901 | /* Now we can process complete blocks. */ |
902 | while( nbytes >= blocksize ) { | 902 | while( nbytes >= blocksize ) { |
903 | int i; | 903 | int i; |
904 | /* Encrypt the IV (and save the current one). */ | 904 | /* Encrypt the IV (and save the current one). */ |
905 | memcpy( c->lastiv, c->iv, blocksize ); | 905 | memcpy( c->lastiv, c->iv, blocksize ); |
906 | c->cipher->encrypt ( &c->context.c, c->iv, c->iv ); | 906 | c->cipher->encrypt ( &c->context.c, c->iv, c->iv ); |
907 | /* XOR the input with the IV and store input into IV */ | 907 | /* XOR the input with the IV and store input into IV */ |
908 | for(ivp=c->iv,i=0; i < blocksize; i++ ) | 908 | for(ivp=c->iv,i=0; i < blocksize; i++ ) |
909 | *outbuf++ = (*ivp++ ^= *inbuf++); | 909 | *outbuf++ = (*ivp++ ^= *inbuf++); |
910 | nbytes -= blocksize; | 910 | nbytes -= blocksize; |
911 | } | 911 | } |
912 | if( nbytes ) { /* process the remaining bytes */ | 912 | if( nbytes ) { /* process the remaining bytes */ |
913 | /* encrypt the IV (and save the current one) */ | 913 | /* encrypt the IV (and save the current one) */ |
914 | memcpy( c->lastiv, c->iv, blocksize ); | 914 | memcpy( c->lastiv, c->iv, blocksize ); |
915 | c->cipher->encrypt ( &c->context.c, c->iv, c->iv ); | 915 | c->cipher->encrypt ( &c->context.c, c->iv, c->iv ); |
916 | c->unused = blocksize; | 916 | c->unused = blocksize; |
917 | /* and apply the xor */ | 917 | /* and apply the xor */ |
918 | c->unused -= nbytes; | 918 | c->unused -= nbytes; |
919 | for(ivp=c->iv; nbytes; nbytes-- ) | 919 | for(ivp=c->iv; nbytes; nbytes-- ) |
920 | *outbuf++ = (*ivp++ ^= *inbuf++); | 920 | *outbuf++ = (*ivp++ ^= *inbuf++); |
921 | } | 921 | } |
922 | } | 922 | } |
923 | 923 | ||
924 | static void | 924 | static void |
925 | do_cfb_decrypt( gcry_cipher_hd_t c, | 925 | do_cfb_decrypt( gcry_cipher_hd_t c, |
926 | byte *outbuf, const byte *inbuf, unsigned int nbytes ) | 926 | byte *outbuf, const byte *inbuf, unsigned int nbytes ) |
927 | { | 927 | { |
928 | byte *ivp; | 928 | byte *ivp; |
929 | ulong temp; | 929 | ulong temp; |
930 | size_t blocksize = c->cipher->blocksize; | 930 | size_t blocksize = c->cipher->blocksize; |
931 | 931 | ||
932 | if( nbytes <= c->unused ) { | 932 | if( nbytes <= c->unused ) { |
933 | /* Short enough to be encoded by the remaining XOR mask. */ | 933 | /* Short enough to be encoded by the remaining XOR mask. */ |
934 | /* XOR the input with the IV and store input into IV. */ | 934 | /* XOR the input with the IV and store input into IV. */ |
935 | for(ivp=c->iv+blocksize - c->unused; nbytes; nbytes--,c->unused--) { | 935 | for(ivp=c->iv+blocksize - c->unused; nbytes; nbytes--,c->unused--) { |
936 | temp = *inbuf++; | 936 | temp = *inbuf++; |
937 | *outbuf++ = *ivp ^ temp; | 937 | *outbuf++ = *ivp ^ temp; |
938 | *ivp++ = temp; | 938 | *ivp++ = temp; |
939 | } | 939 | } |
940 | return; | 940 | return; |
941 | } | 941 | } |
942 | 942 | ||
943 | if( c->unused ) { | 943 | if( c->unused ) { |
944 | /* XOR the input with the IV and store input into IV. */ | 944 | /* XOR the input with the IV and store input into IV. */ |
945 | nbytes -= c->unused; | 945 | nbytes -= c->unused; |
946 | for(ivp=c->iv+blocksize - c->unused; c->unused; c->unused-- ) { | 946 | for(ivp=c->iv+blocksize - c->unused; c->unused; c->unused-- ) { |
947 | temp = *inbuf++; | 947 | temp = *inbuf++; |
948 | *outbuf++ = *ivp ^ temp; | 948 | *outbuf++ = *ivp ^ temp; |
949 | *ivp++ = temp; | 949 | *ivp++ = temp; |
950 | } | 950 | } |
951 | } | 951 | } |
952 | 952 | ||
953 | /* now we can process complete blocks */ | 953 | /* now we can process complete blocks */ |
954 | while( nbytes >= blocksize ) { | 954 | while( nbytes >= blocksize ) { |
955 | int i; | 955 | int i; |
956 | /* encrypt the IV (and save the current one) */ | 956 | /* encrypt the IV (and save the current one) */ |
957 | memcpy( c->lastiv, c->iv, blocksize ); | 957 | memcpy( c->lastiv, c->iv, blocksize ); |
958 | c->cipher->encrypt ( &c->context.c, c->iv, c->iv ); | 958 | c->cipher->encrypt ( &c->context.c, c->iv, c->iv ); |
959 | /* XOR the input with the IV and store input into IV */ | 959 | /* XOR the input with the IV and store input into IV */ |
960 | for(ivp=c->iv,i=0; i < blocksize; i++ ) { | 960 | for(ivp=c->iv,i=0; i < blocksize; i++ ) { |
961 | temp = *inbuf++; | 961 | temp = *inbuf++; |
962 | *outbuf++ = *ivp ^ temp; | 962 | *outbuf++ = *ivp ^ temp; |
963 | *ivp++ = temp; | 963 | *ivp++ = temp; |
964 | } | 964 | } |
965 | nbytes -= blocksize; | 965 | nbytes -= blocksize; |
966 | } | 966 | } |
967 | if( nbytes ) { /* process the remaining bytes */ | 967 | if( nbytes ) { /* process the remaining bytes */ |
968 | /* encrypt the IV (and save the current one) */ | 968 | /* encrypt the IV (and save the current one) */ |
969 | memcpy( c->lastiv, c->iv, blocksize ); | 969 | memcpy( c->lastiv, c->iv, blocksize ); |
970 | c->cipher->encrypt ( &c->context.c, c->iv, c->iv ); | 970 | c->cipher->encrypt ( &c->context.c, c->iv, c->iv ); |
971 | c->unused = blocksize; | 971 | c->unused = blocksize; |
972 | /* and apply the xor */ | 972 | /* and apply the xor */ |
973 | c->unused -= nbytes; | 973 | c->unused -= nbytes; |
974 | for(ivp=c->iv; nbytes; nbytes-- ) { | 974 | for(ivp=c->iv; nbytes; nbytes-- ) { |
975 | temp = *inbuf++; | 975 | temp = *inbuf++; |
976 | *outbuf++ = *ivp ^ temp; | 976 | *outbuf++ = *ivp ^ temp; |
977 | *ivp++ = temp; | 977 | *ivp++ = temp; |
978 | } | 978 | } |
979 | } | 979 | } |
980 | } | 980 | } |
981 | 981 | ||
982 | 982 | ||
983 | static void | 983 | static void |
984 | do_ctr_encrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, | 984 | do_ctr_encrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, |
985 | unsigned int nbytes ) | 985 | unsigned int nbytes ) |
986 | { | 986 | { |
987 | unsigned int n; | 987 | unsigned int n; |
988 | byte tmp[MAX_BLOCKSIZE]; | 988 | byte tmp[MAX_BLOCKSIZE]; |
989 | int i; | 989 | int i; |
990 | 990 | ||
991 | for(n=0; n < nbytes; n++) | 991 | for(n=0; n < nbytes; n++) |
992 | { | 992 | { |
993 | if ((n % c->cipher->blocksize) == 0) | 993 | if ((n % c->cipher->blocksize) == 0) |
994 | { | 994 | { |
995 | c->cipher->encrypt (&c->context.c, tmp, c->ctr); | 995 | c->cipher->encrypt (&c->context.c, tmp, c->ctr); |
996 | 996 | ||
997 | for (i = c->cipher->blocksize; i > 0; i--) | 997 | for (i = c->cipher->blocksize; i > 0; i--) |
998 | { | 998 | { |
999 | c->ctr[i-1]++; | 999 | c->ctr[i-1]++; |
1000 | if (c->ctr[i-1] != 0) | 1000 | if (c->ctr[i-1] != 0) |
1001 | break; | 1001 | break; |
1002 | } | 1002 | } |
1003 | } | 1003 | } |
1004 | 1004 | ||
1005 | /* XOR input with encrypted counter and store in output. */ | 1005 | /* XOR input with encrypted counter and store in output. */ |
1006 | outbuf[n] = inbuf[n] ^ tmp[n % c->cipher->blocksize]; | 1006 | outbuf[n] = inbuf[n] ^ tmp[n % c->cipher->blocksize]; |
1007 | } | 1007 | } |
1008 | } | 1008 | } |
1009 | 1009 | ||
1010 | static void | 1010 | static void |
1011 | do_ctr_decrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, | 1011 | do_ctr_decrypt( gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, |
1012 | unsigned int nbytes ) | 1012 | unsigned int nbytes ) |
1013 | { | 1013 | { |
1014 | do_ctr_encrypt (c, outbuf, inbuf, nbytes); | 1014 | do_ctr_encrypt (c, outbuf, inbuf, nbytes); |
1015 | } | 1015 | } |
1016 | 1016 | ||
1017 | 1017 | ||
1018 | /**************** | 1018 | /**************** |
1019 | * Encrypt INBUF to OUTBUF with the mode selected at open. | 1019 | * Encrypt INBUF to OUTBUF with the mode selected at open. |
1020 | * inbuf and outbuf may overlap or be the same. | 1020 | * inbuf and outbuf may overlap or be the same. |
1021 | * Depending on the mode some contraints apply to NBYTES. | 1021 | * Depending on the mode some contraints apply to NBYTES. |
1022 | */ | 1022 | */ |
1023 | static gcry_err_code_t | 1023 | static gcry_err_code_t |
1024 | cipher_encrypt (gcry_cipher_hd_t c, byte *outbuf, | 1024 | cipher_encrypt (gcry_cipher_hd_t c, byte *outbuf, |
1025 | const byte *inbuf, unsigned int nbytes) | 1025 | const byte *inbuf, unsigned int nbytes) |
1026 | { | 1026 | { |
1027 | gcry_err_code_t rc = GPG_ERR_NO_ERROR; | 1027 | gcry_err_code_t rc = GPG_ERR_NO_ERROR; |
1028 | 1028 | ||
1029 | switch( c->mode ) { | 1029 | switch( c->mode ) { |
1030 | case GCRY_CIPHER_MODE_ECB: | 1030 | case GCRY_CIPHER_MODE_ECB: |
1031 | if (!(nbytes%c->cipher->blocksize)) | 1031 | if (!(nbytes%c->cipher->blocksize)) |
1032 | do_ecb_encrypt(c, outbuf, inbuf, nbytes/c->cipher->blocksize ); | 1032 | do_ecb_encrypt(c, outbuf, inbuf, nbytes/c->cipher->blocksize ); |
1033 | else | 1033 | else |
1034 | rc = GPG_ERR_INV_ARG; | 1034 | rc = GPG_ERR_INV_ARG; |
1035 | break; | 1035 | break; |
1036 | case GCRY_CIPHER_MODE_CBC: | 1036 | case GCRY_CIPHER_MODE_CBC: |
1037 | if (!(nbytes%c->cipher->blocksize) | 1037 | if (!(nbytes%c->cipher->blocksize) |
1038 | || (nbytes > c->cipher->blocksize | 1038 | || (nbytes > c->cipher->blocksize |
1039 | && (c->flags & GCRY_CIPHER_CBC_CTS))) | 1039 | && (c->flags & GCRY_CIPHER_CBC_CTS))) |
1040 | do_cbc_encrypt(c, outbuf, inbuf, nbytes ); | 1040 | do_cbc_encrypt(c, outbuf, inbuf, nbytes ); |
1041 | else | 1041 | else |
1042 | rc = GPG_ERR_INV_ARG; | 1042 | rc = GPG_ERR_INV_ARG; |
1043 | break; | 1043 | break; |
1044 | case GCRY_CIPHER_MODE_CFB: | 1044 | case GCRY_CIPHER_MODE_CFB: |
1045 | do_cfb_encrypt(c, outbuf, inbuf, nbytes ); | 1045 | do_cfb_encrypt(c, outbuf, inbuf, nbytes ); |
1046 | break; | 1046 | break; |
1047 | case GCRY_CIPHER_MODE_CTR: | 1047 | case GCRY_CIPHER_MODE_CTR: |
1048 | do_ctr_encrypt(c, outbuf, inbuf, nbytes ); | 1048 | do_ctr_encrypt(c, outbuf, inbuf, nbytes ); |
1049 | break; | 1049 | break; |
1050 | case GCRY_CIPHER_MODE_STREAM: | 1050 | case GCRY_CIPHER_MODE_STREAM: |
1051 | c->cipher->stencrypt ( &c->context.c, | 1051 | c->cipher->stencrypt ( &c->context.c, |
1052 | outbuf, (byte*)/*arggg*/inbuf, nbytes ); | 1052 | outbuf, (byte*)/*arggg*/inbuf, nbytes ); |
1053 | break; | 1053 | break; |
1054 | case GCRY_CIPHER_MODE_NONE: | 1054 | case GCRY_CIPHER_MODE_NONE: |
1055 | if( inbuf != outbuf ) | 1055 | if( inbuf != outbuf ) |
1056 | memmove( outbuf, inbuf, nbytes ); | 1056 | memmove( outbuf, inbuf, nbytes ); |
1057 | break; | 1057 | break; |
1058 | default: | 1058 | default: |
1059 | log_fatal("cipher_encrypt: invalid mode %d\n", c->mode ); | 1059 | log_fatal("cipher_encrypt: invalid mode %d\n", c->mode ); |
1060 | rc = GPG_ERR_INV_CIPHER_MODE; | 1060 | rc = GPG_ERR_INV_CIPHER_MODE; |
1061 | break; | 1061 | break; |
1062 | } | 1062 | } |
1063 | return rc; | 1063 | return rc; |
1064 | } | 1064 | } |
1065 | 1065 | ||
1066 | 1066 | ||
1067 | /**************** | 1067 | /**************** |
1068 | * Encrypt IN and write it to OUT. If IN is NULL, in-place encryption has | 1068 | * Encrypt IN and write it to OUT. If IN is NULL, in-place encryption has |
1069 | * been requested. | 1069 | * been requested. |
1070 | */ | 1070 | */ |
1071 | gcry_error_t | 1071 | gcry_error_t |
1072 | gcry_cipher_encrypt (gcry_cipher_hd_t h, void *out, size_t outsize, | 1072 | gcry_cipher_encrypt (gcry_cipher_hd_t h, void *out, size_t outsize, |
1073 | const void *in, size_t inlen) | 1073 | const void *in, size_t inlen) |
1074 | { | 1074 | { |
1075 | gcry_err_code_t err; | 1075 | gcry_err_code_t err; |
1076 | 1076 | ||
1077 | if (!in) | 1077 | if (!in) |
1078 | /* Caller requested in-place encryption. */ | 1078 | /* Caller requested in-place encryption. */ |
1079 | /* Actullay cipher_encrypt() does not need to know about it, but | 1079 | /* Actullay cipher_encrypt() does not need to know about it, but |
1080 | * we may change this to get better performance. */ | 1080 | * we may change this to get better performance. */ |
1081 | err = cipher_encrypt (h, out, out, outsize); | 1081 | err = cipher_encrypt (h, out, out, outsize); |
1082 | else if (outsize < ((h->flags & GCRY_CIPHER_CBC_MAC) ? | 1082 | else if (outsize < ((h->flags & GCRY_CIPHER_CBC_MAC) ? |
1083 | h->cipher->blocksize : inlen)) | 1083 | h->cipher->blocksize : inlen)) |
1084 | err = GPG_ERR_TOO_SHORT; | 1084 | err = GPG_ERR_TOO_SHORT; |
1085 | else if ((h->mode == GCRY_CIPHER_MODE_ECB | 1085 | else if ((h->mode == GCRY_CIPHER_MODE_ECB |
1086 | || (h->mode == GCRY_CIPHER_MODE_CBC | 1086 | || (h->mode == GCRY_CIPHER_MODE_CBC |
1087 | && (! ((h->flags & GCRY_CIPHER_CBC_CTS) | 1087 | && (! ((h->flags & GCRY_CIPHER_CBC_CTS) |
1088 | && (inlen > h->cipher->blocksize))))) | 1088 | && (inlen > h->cipher->blocksize))))) |
1089 | && (inlen % h->cipher->blocksize)) | 1089 | && (inlen % h->cipher->blocksize)) |
1090 | err = GPG_ERR_INV_ARG; | 1090 | err = GPG_ERR_INV_ARG; |
1091 | else | 1091 | else |
1092 | err = cipher_encrypt (h, out, in, inlen); | 1092 | err = cipher_encrypt (h, out, in, inlen); |
1093 | 1093 | ||
1094 | if (err && out) | 1094 | if (err && out) |
1095 | memset (out, 0x42, outsize); /* Failsafe: Make sure that the | 1095 | memset (out, 0x42, outsize); /* Failsafe: Make sure that the |
1096 | plaintext will never make it into | 1096 | plaintext will never make it into |
1097 | OUT. */ | 1097 | OUT. */ |
1098 | 1098 | ||
1099 | return gcry_error (err); | 1099 | return gcry_error (err); |
1100 | } | 1100 | } |
1101 | 1101 | ||
1102 | 1102 | ||
1103 | 1103 | ||
1104 | /**************** | 1104 | /**************** |
1105 | * Decrypt INBUF to OUTBUF with the mode selected at open. | 1105 | * Decrypt INBUF to OUTBUF with the mode selected at open. |
1106 | * inbuf and outbuf may overlap or be the same. | 1106 | * inbuf and outbuf may overlap or be the same. |
1107 | * Depending on the mode some some contraints apply to NBYTES. | 1107 | * Depending on the mode some some contraints apply to NBYTES. |
1108 | */ | 1108 | */ |
1109 | static gcry_err_code_t | 1109 | static gcry_err_code_t |
1110 | cipher_decrypt (gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, | 1110 | cipher_decrypt (gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, |
1111 | unsigned int nbytes) | 1111 | unsigned int nbytes) |
1112 | { | 1112 | { |
1113 | gcry_err_code_t rc = GPG_ERR_NO_ERROR; | 1113 | gcry_err_code_t rc = GPG_ERR_NO_ERROR; |
1114 | 1114 | ||
1115 | switch( c->mode ) { | 1115 | switch( c->mode ) { |
1116 | case GCRY_CIPHER_MODE_ECB: | 1116 | case GCRY_CIPHER_MODE_ECB: |
1117 | if (!(nbytes%c->cipher->blocksize)) | 1117 | if (!(nbytes%c->cipher->blocksize)) |
1118 | do_ecb_decrypt(c, outbuf, inbuf, nbytes/c->cipher->blocksize ); | 1118 | do_ecb_decrypt(c, outbuf, inbuf, nbytes/c->cipher->blocksize ); |
1119 | else | 1119 | else |
1120 | rc = GPG_ERR_INV_ARG; | 1120 | rc = GPG_ERR_INV_ARG; |
1121 | break; | 1121 | break; |
1122 | case GCRY_CIPHER_MODE_CBC: | 1122 | case GCRY_CIPHER_MODE_CBC: |
1123 | if (!(nbytes%c->cipher->blocksize) | 1123 | if (!(nbytes%c->cipher->blocksize) |
1124 | || (nbytes > c->cipher->blocksize | 1124 | || (nbytes > c->cipher->blocksize |
1125 | && (c->flags & GCRY_CIPHER_CBC_CTS))) | 1125 | && (c->flags & GCRY_CIPHER_CBC_CTS))) |
1126 | do_cbc_decrypt(c, outbuf, inbuf, nbytes ); | 1126 | do_cbc_decrypt(c, outbuf, inbuf, nbytes ); |
1127 | else | 1127 | else |
1128 | rc = GPG_ERR_INV_ARG; | 1128 | rc = GPG_ERR_INV_ARG; |
1129 | break; | 1129 | break; |
1130 | case GCRY_CIPHER_MODE_CFB: | 1130 | case GCRY_CIPHER_MODE_CFB: |
1131 | do_cfb_decrypt(c, outbuf, inbuf, nbytes ); | 1131 | do_cfb_decrypt(c, outbuf, inbuf, nbytes ); |
1132 | break; | 1132 | break; |
1133 | case GCRY_CIPHER_MODE_CTR: | 1133 | case GCRY_CIPHER_MODE_CTR: |
1134 | do_ctr_decrypt(c, outbuf, inbuf, nbytes ); | 1134 | do_ctr_decrypt(c, outbuf, inbuf, nbytes ); |
1135 | break; | 1135 | break; |
1136 | case GCRY_CIPHER_MODE_STREAM: | 1136 | case GCRY_CIPHER_MODE_STREAM: |
1137 | c->cipher->stdecrypt ( &c->context.c, | 1137 | c->cipher->stdecrypt ( &c->context.c, |
1138 | outbuf, (byte*)/*arggg*/inbuf, nbytes ); | 1138 | outbuf, (byte*)/*arggg*/inbuf, nbytes ); |
1139 | break; | 1139 | break; |
1140 | case GCRY_CIPHER_MODE_NONE: | 1140 | case GCRY_CIPHER_MODE_NONE: |
1141 | if( inbuf != outbuf ) | 1141 | if( inbuf != outbuf ) |
1142 | memmove( outbuf, inbuf, nbytes ); | 1142 | memmove( outbuf, inbuf, nbytes ); |
1143 | break; | 1143 | break; |
1144 | default: | 1144 | default: |
1145 | log_fatal ("cipher_decrypt: invalid mode %d\n", c->mode ); | 1145 | log_fatal ("cipher_decrypt: invalid mode %d\n", c->mode ); |
1146 | rc = GPG_ERR_INV_CIPHER_MODE; | 1146 | rc = GPG_ERR_INV_CIPHER_MODE; |
1147 | break; | 1147 | break; |
1148 | } | 1148 | } |
1149 | return rc; | 1149 | return rc; |
1150 | } | 1150 | } |
1151 | 1151 | ||
1152 | 1152 | ||
1153 | gcry_error_t | 1153 | gcry_error_t |
1154 | gcry_cipher_decrypt (gcry_cipher_hd_t h, void *out, size_t outsize, | 1154 | gcry_cipher_decrypt (gcry_cipher_hd_t h, void *out, size_t outsize, |
1155 | const void *in, size_t inlen) | 1155 | const void *in, size_t inlen) |
1156 | { | 1156 | { |
1157 | gcry_err_code_t err = GPG_ERR_NO_ERROR; | 1157 | gcry_err_code_t err = GPG_ERR_NO_ERROR; |
1158 | 1158 | ||
1159 | if (! in) | 1159 | if (! in) |
1160 | /* Caller requested in-place encryption. */ | 1160 | /* Caller requested in-place encryption. */ |
1161 | /* Actullay cipher_encrypt() does not need to know about it, but | 1161 | /* Actullay cipher_encrypt() does not need to know about it, but |
1162 | * we may chnage this to get better performance. */ | 1162 | * we may chnage this to get better performance. */ |
1163 | err = cipher_decrypt (h, out, out, outsize); | 1163 | err = cipher_decrypt (h, out, out, outsize); |
1164 | else if (outsize < inlen) | 1164 | else if (outsize < inlen) |
1165 | err = GPG_ERR_TOO_SHORT; | 1165 | err = GPG_ERR_TOO_SHORT; |
1166 | else if (((h->mode == GCRY_CIPHER_MODE_ECB) | 1166 | else if (((h->mode == GCRY_CIPHER_MODE_ECB) |
1167 | || ((h->mode == GCRY_CIPHER_MODE_CBC) | 1167 | || ((h->mode == GCRY_CIPHER_MODE_CBC) |
1168 | && (! ((h->flags & GCRY_CIPHER_CBC_CTS) | 1168 | && (! ((h->flags & GCRY_CIPHER_CBC_CTS) |
1169 | && (inlen > h->cipher->blocksize))))) | 1169 | && (inlen > h->cipher->blocksize))))) |
1170 | && (inlen % h->cipher->blocksize) != 0) | 1170 | && (inlen % h->cipher->blocksize) != 0) |
1171 | err = GPG_ERR_INV_ARG; | 1171 | err = GPG_ERR_INV_ARG; |
1172 | else | 1172 | else |
1173 | err = cipher_decrypt (h, out, in, inlen); | 1173 | err = cipher_decrypt (h, out, in, inlen); |
1174 | 1174 | ||
1175 | return gcry_error (err); | 1175 | return gcry_error (err); |
1176 | } | 1176 | } |
1177 | 1177 | ||
1178 | 1178 | ||
1179 | 1179 | ||
1180 | /**************** | 1180 | /**************** |
1181 | * Used for PGP's somewhat strange CFB mode. Only works if | 1181 | * Used for PGP's somewhat strange CFB mode. Only works if |
1182 | * the corresponding flag is set. | 1182 | * the corresponding flag is set. |
1183 | */ | 1183 | */ |
1184 | static void | 1184 | static void |
1185 | cipher_sync( gcry_cipher_hd_t c ) | 1185 | cipher_sync( gcry_cipher_hd_t c ) |
1186 | { | 1186 | { |
1187 | if( (c->flags & GCRY_CIPHER_ENABLE_SYNC) && c->unused ) { | 1187 | if( (c->flags & GCRY_CIPHER_ENABLE_SYNC) && c->unused ) { |
1188 | memmove(c->iv + c->unused, c->iv, c->cipher->blocksize - c->unused ); | 1188 | memmove(c->iv + c->unused, c->iv, c->cipher->blocksize - c->unused ); |
1189 | memcpy(c->iv, c->lastiv + c->cipher->blocksize - c->unused, c->unused); | 1189 | memcpy(c->iv, c->lastiv + c->cipher->blocksize - c->unused, c->unused); |
1190 | c->unused = 0; | 1190 | c->unused = 0; |
1191 | } | 1191 | } |
1192 | } | 1192 | } |
1193 | 1193 | ||
1194 | 1194 | ||
1195 | gcry_error_t | 1195 | gcry_error_t |
1196 | gcry_cipher_ctl( gcry_cipher_hd_t h, int cmd, void *buffer, size_t buflen) | 1196 | gcry_cipher_ctl( gcry_cipher_hd_t h, int cmd, void *buffer, size_t buflen) |
1197 | { | 1197 | { |
1198 | gcry_err_code_t rc = GPG_ERR_NO_ERROR; | 1198 | gcry_err_code_t rc = GPG_ERR_NO_ERROR; |
1199 | 1199 | ||
1200 | switch (cmd) | 1200 | switch (cmd) |
1201 | { | 1201 | { |
1202 | case GCRYCTL_SET_KEY: | 1202 | case GCRYCTL_SET_KEY: |
1203 | rc = cipher_setkey( h, buffer, buflen ); | 1203 | rc = cipher_setkey( h, buffer, buflen ); |
1204 | break; | 1204 | break; |
1205 | case GCRYCTL_SET_IV: | 1205 | case GCRYCTL_SET_IV: |
1206 | cipher_setiv( h, buffer, buflen ); | 1206 | cipher_setiv( h, buffer, buflen ); |
1207 | break; | 1207 | break; |
1208 | case GCRYCTL_RESET: | 1208 | case GCRYCTL_RESET: |
1209 | cipher_reset (h); | 1209 | cipher_reset (h); |
1210 | break; | 1210 | break; |
1211 | case GCRYCTL_CFB_SYNC: | 1211 | case GCRYCTL_CFB_SYNC: |
1212 | cipher_sync( h ); | 1212 | cipher_sync( h ); |
1213 | break; | 1213 | break; |
1214 | case GCRYCTL_SET_CBC_CTS: | 1214 | case GCRYCTL_SET_CBC_CTS: |
1215 | if (buflen) | 1215 | if (buflen) |
1216 | if (h->flags & GCRY_CIPHER_CBC_MAC) | 1216 | if (h->flags & GCRY_CIPHER_CBC_MAC) |
1217 | rc = GPG_ERR_INV_FLAG; | 1217 | rc = GPG_ERR_INV_FLAG; |
1218 | else | 1218 | else |
1219 | h->flags |= GCRY_CIPHER_CBC_CTS; | 1219 | h->flags |= GCRY_CIPHER_CBC_CTS; |
1220 | else | 1220 | else |
1221 | h->flags &= ~GCRY_CIPHER_CBC_CTS; | 1221 | h->flags &= ~GCRY_CIPHER_CBC_CTS; |
1222 | break; | 1222 | break; |
1223 | case GCRYCTL_SET_CBC_MAC: | 1223 | case GCRYCTL_SET_CBC_MAC: |
1224 | if (buflen) | 1224 | if (buflen) |
1225 | if (h->flags & GCRY_CIPHER_CBC_CTS) | 1225 | if (h->flags & GCRY_CIPHER_CBC_CTS) |
1226 | rc = GPG_ERR_INV_FLAG; | 1226 | rc = GPG_ERR_INV_FLAG; |
1227 | else | 1227 | else |
1228 | h->flags |= GCRY_CIPHER_CBC_MAC; | 1228 | h->flags |= GCRY_CIPHER_CBC_MAC; |
1229 | else | 1229 | else |
1230 | h->flags &= ~GCRY_CIPHER_CBC_MAC; | 1230 | h->flags &= ~GCRY_CIPHER_CBC_MAC; |
1231 | break; | 1231 | break; |
1232 | case GCRYCTL_DISABLE_ALGO: | 1232 | case GCRYCTL_DISABLE_ALGO: |
1233 | /* this one expects a NULL handle and buffer pointing to an | 1233 | /* this one expects a NULL handle and buffer pointing to an |
1234 | * integer with the algo number. | 1234 | * integer with the algo number. |
1235 | */ | 1235 | */ |
1236 | if( h || !buffer || buflen != sizeof(int) ) | 1236 | if( h || !buffer || buflen != sizeof(int) ) |
1237 | return gcry_error (GPG_ERR_CIPHER_ALGO); | 1237 | return gcry_error (GPG_ERR_CIPHER_ALGO); |
1238 | disable_cipher_algo( *(int*)buffer ); | 1238 | disable_cipher_algo( *(int*)buffer ); |
1239 | break; | 1239 | break; |
1240 | case GCRYCTL_SET_CTR: | 1240 | case GCRYCTL_SET_CTR: |
1241 | if (buffer && buflen == h->cipher->blocksize) | 1241 | if (buffer && buflen == h->cipher->blocksize) |
1242 | memcpy (h->ctr, buffer, h->cipher->blocksize); | 1242 | memcpy (h->ctr, buffer, h->cipher->blocksize); |
1243 | else if (buffer == NULL || buflen == 0) | 1243 | else if (buffer == NULL || buflen == 0) |
1244 | memset (h->ctr, 0, h->cipher->blocksize); | 1244 | memset (h->ctr, 0, h->cipher->blocksize); |
1245 | else | 1245 | else |
1246 | rc = GPG_ERR_INV_ARG; | 1246 | rc = GPG_ERR_INV_ARG; |
1247 | break; | 1247 | break; |
1248 | 1248 | ||
1249 | default: | 1249 | default: |
1250 | rc = GPG_ERR_INV_OP; | 1250 | rc = GPG_ERR_INV_OP; |
1251 | } | 1251 | } |
1252 | 1252 | ||
1253 | return gcry_error (rc); | 1253 | return gcry_error (rc); |
1254 | } | 1254 | } |
1255 | 1255 | ||
1256 | 1256 | ||
1257 | /**************** | 1257 | /**************** |
1258 | * Return information about the cipher handle. | 1258 | * Return information about the cipher handle. |
1259 | */ | 1259 | */ |
1260 | gcry_error_t | 1260 | gcry_error_t |
1261 | gcry_cipher_info( gcry_cipher_hd_t h, int cmd, void *buffer, size_t *nbytes) | 1261 | gcry_cipher_info( gcry_cipher_hd_t h, int cmd, void *buffer, size_t *nbytes) |
1262 | { | 1262 | { |
1263 | gcry_err_code_t err = GPG_ERR_NO_ERROR; | 1263 | gcry_err_code_t err = GPG_ERR_NO_ERROR; |
1264 | 1264 | ||
1265 | switch (cmd) | 1265 | switch (cmd) |
1266 | { | 1266 | { |
1267 | default: | 1267 | default: |
1268 | err = GPG_ERR_INV_OP; | 1268 | err = GPG_ERR_INV_OP; |
1269 | } | 1269 | } |
1270 | 1270 | ||
1271 | return gcry_error (err); | 1271 | return gcry_error (err); |
1272 | } | 1272 | } |
1273 | 1273 | ||
1274 | /**************** | 1274 | /**************** |
1275 | * Return information about the given cipher algorithm | 1275 | * Return information about the given cipher algorithm |
1276 | * WHAT select the kind of information returned: | 1276 | * WHAT select the kind of information returned: |
1277 | * GCRYCTL_GET_KEYLEN: | 1277 | * GCRYCTL_GET_KEYLEN: |
1278 | * Return the length of the key, if the algorithm | 1278 | * Return the length of the key, if the algorithm |
1279 | * supports multiple key length, the maximum supported value | 1279 | * supports multiple key length, the maximum supported value |
1280 | * is returnd. The length is return as number of octets. | 1280 | * is returnd. The length is return as number of octets. |
1281 | * buffer and nbytes must be zero. | 1281 | * buffer and nbytes must be zero. |
1282 | * The keylength is returned in _bytes_. | 1282 | * The keylength is returned in _bytes_. |
1283 | * GCRYCTL_GET_BLKLEN: | 1283 | * GCRYCTL_GET_BLKLEN: |
1284 | * Return the blocklength of the algorithm counted in octets. | 1284 | * Return the blocklength of the algorithm counted in octets. |
1285 | * buffer and nbytes must be zero. | 1285 | * buffer and nbytes must be zero. |
1286 | * GCRYCTL_TEST_ALGO: | 1286 | * GCRYCTL_TEST_ALGO: |
1287 | * Returns 0 when the specified algorithm is available for use. | 1287 | * Returns 0 when the specified algorithm is available for use. |
1288 | * buffer and nbytes must be zero. | 1288 | * buffer and nbytes must be zero. |
1289 | * | 1289 | * |
1290 | * Note: Because this function is in most cases used to return an | 1290 | * Note: Because this function is in most cases used to return an |
1291 | * integer value, we can make it easier for the caller to just look at | 1291 | * integer value, we can make it easier for the caller to just look at |
1292 | * the return value. The caller will in all cases consult the value | 1292 | * the return value. The caller will in all cases consult the value |
1293 | * and thereby detecting whether a error occured or not (i.e. while checking | 1293 | * and thereby detecting whether a error occured or not (i.e. while checking |
1294 | * the block size) | 1294 | * the block size) |
1295 | */ | 1295 | */ |
1296 | gcry_error_t | 1296 | gcry_error_t |
1297 | gcry_cipher_algo_info (int algo, int what, void *buffer, size_t *nbytes) | 1297 | gcry_cipher_algo_info (int algo, int what, void *buffer, size_t *nbytes) |
1298 | { | 1298 | { |
1299 | gcry_err_code_t err = GPG_ERR_NO_ERROR; | 1299 | gcry_err_code_t err = GPG_ERR_NO_ERROR; |
1300 | unsigned int ui; | 1300 | unsigned int ui; |
1301 | 1301 | ||
1302 | switch (what) | 1302 | switch (what) |
1303 | { | 1303 | { |
1304 | case GCRYCTL_GET_KEYLEN: | 1304 | case GCRYCTL_GET_KEYLEN: |
1305 | if (buffer || (! nbytes)) | 1305 | if (buffer || (! nbytes)) |
1306 | err = GPG_ERR_CIPHER_ALGO; | 1306 | err = GPG_ERR_CIPHER_ALGO; |
1307 | else | 1307 | else |
1308 | { | 1308 | { |
1309 | ui = cipher_get_keylen (algo); | 1309 | ui = cipher_get_keylen (algo); |
1310 | if ((ui > 0) && (ui <= 512)) | 1310 | if ((ui > 0) && (ui <= 512)) |
1311 | *nbytes = (size_t) ui / 8; | 1311 | *nbytes = (size_t) ui / 8; |
1312 | else | 1312 | else |
1313 | /* The only reason is an invalid algo or a strange | 1313 | /* The only reason is an invalid algo or a strange |
1314 | blocksize. */ | 1314 | blocksize. */ |
1315 | err = GPG_ERR_CIPHER_ALGO; | 1315 | err = GPG_ERR_CIPHER_ALGO; |
1316 | } | 1316 | } |
1317 | break; | 1317 | break; |
1318 | 1318 | ||
1319 | case GCRYCTL_GET_BLKLEN: | 1319 | case GCRYCTL_GET_BLKLEN: |
1320 | if (buffer || (! nbytes)) | 1320 | if (buffer || (! nbytes)) |
1321 | err = GPG_ERR_CIPHER_ALGO; | 1321 | err = GPG_ERR_CIPHER_ALGO; |
1322 | else | 1322 | else |
1323 | { | 1323 | { |
1324 | ui = cipher_get_blocksize (algo); | 1324 | ui = cipher_get_blocksize (algo); |
1325 | if ((ui > 0) && (ui < 10000)) | 1325 | if ((ui > 0) && (ui < 10000)) |
1326 | *nbytes = ui; | 1326 | *nbytes = ui; |
1327 | else | 1327 | else |
1328 | /* The only reason is an invalid algo or a strange | 1328 | /* The only reason is an invalid algo or a strange |
1329 | blocksize. */ | 1329 | blocksize. */ |
1330 | err = GPG_ERR_CIPHER_ALGO; | 1330 | err = GPG_ERR_CIPHER_ALGO; |
1331 | } | 1331 | } |
1332 | break; | 1332 | break; |
1333 | 1333 | ||
1334 | case GCRYCTL_TEST_ALGO: | 1334 | case GCRYCTL_TEST_ALGO: |
1335 | if (buffer || nbytes) | 1335 | if (buffer || nbytes) |
1336 | err = GPG_ERR_INV_ARG; | 1336 | err = GPG_ERR_INV_ARG; |
1337 | else | 1337 | else |
1338 | err = check_cipher_algo (algo); | 1338 | err = check_cipher_algo (algo); |
1339 | break; | 1339 | break; |
1340 | 1340 | ||
1341 | default: | 1341 | default: |
1342 | err = GPG_ERR_INV_OP; | 1342 | err = GPG_ERR_INV_OP; |
1343 | } | 1343 | } |
1344 | 1344 | ||
1345 | return gcry_error (err); | 1345 | return gcry_error (err); |
1346 | } | 1346 | } |
1347 | 1347 | ||
1348 | 1348 | ||
1349 | size_t | 1349 | size_t |
1350 | gcry_cipher_get_algo_keylen (int algo) | 1350 | gcry_cipher_get_algo_keylen (int algo) |
1351 | { | 1351 | { |
1352 | size_t n; | 1352 | size_t n; |
1353 | 1353 | ||
1354 | if (gcry_cipher_algo_info( algo, GCRYCTL_GET_KEYLEN, NULL, &n)) | 1354 | if (gcry_cipher_algo_info( algo, GCRYCTL_GET_KEYLEN, NULL, &n)) |
1355 | n = 0; | 1355 | n = 0; |
1356 | return n; | 1356 | return n; |
1357 | } | 1357 | } |
1358 | 1358 | ||
1359 | 1359 | ||
1360 | size_t | 1360 | size_t |
1361 | gcry_cipher_get_algo_blklen (int algo) | 1361 | gcry_cipher_get_algo_blklen (int algo) |
1362 | { | 1362 | { |
1363 | size_t n; | 1363 | size_t n; |
1364 | 1364 | ||
1365 | if (gcry_cipher_algo_info( algo, GCRYCTL_GET_BLKLEN, NULL, &n)) | 1365 | if (gcry_cipher_algo_info( algo, GCRYCTL_GET_BLKLEN, NULL, &n)) |
1366 | n = 0; | 1366 | n = 0; |
1367 | return n; | 1367 | return n; |
1368 | } | 1368 | } |
1369 | 1369 | ||
1370 | 1370 | ||
1371 | gcry_err_code_t | 1371 | gcry_err_code_t |
1372 | _gcry_cipher_init (void) | 1372 | _gcry_cipher_init (void) |
1373 | { | 1373 | { |
1374 | gcry_err_code_t err = GPG_ERR_NO_ERROR; | 1374 | gcry_err_code_t err = GPG_ERR_NO_ERROR; |
1375 | 1375 | ||
1376 | REGISTER_DEFAULT_CIPHERS; | 1376 | REGISTER_DEFAULT_CIPHERS; |
1377 | 1377 | ||
1378 | return err; | 1378 | return err; |
1379 | } | 1379 | } |
1380 | 1380 | ||
1381 | /* Get a list consisting of the IDs of the loaded cipher modules. If | 1381 | /* Get a list consisting of the IDs of the loaded cipher modules. If |
1382 | LIST is zero, write the number of loaded cipher modules to | 1382 | LIST is zero, write the number of loaded cipher modules to |
1383 | LIST_LENGTH and return. If LIST is non-zero, the first | 1383 | LIST_LENGTH and return. If LIST is non-zero, the first |
1384 | *LIST_LENGTH algorithm IDs are stored in LIST, which must be of | 1384 | *LIST_LENGTH algorithm IDs are stored in LIST, which must be of |
1385 | according size. In case there are less cipher modules than | 1385 | according size. In case there are less cipher modules than |
1386 | *LIST_LENGTH, *LIST_LENGTH is updated to the correct number. */ | 1386 | *LIST_LENGTH, *LIST_LENGTH is updated to the correct number. */ |
1387 | gcry_error_t | 1387 | gcry_error_t |
1388 | gcry_cipher_list (int *list, int *list_length) | 1388 | gcry_cipher_list (int *list, int *list_length) |
1389 | { | 1389 | { |
1390 | gcry_err_code_t err = GPG_ERR_NO_ERROR; | 1390 | gcry_err_code_t err = GPG_ERR_NO_ERROR; |
1391 | 1391 | ||
1392 | ath_mutex_lock (&ciphers_registered_lock); | 1392 | ath_mutex_lock (&ciphers_registered_lock); |
1393 | err = _gcry_module_list (ciphers_registered, list, list_length); | 1393 | err = _gcry_module_list (ciphers_registered, list, list_length); |
1394 | ath_mutex_unlock (&ciphers_registered_lock); | 1394 | ath_mutex_unlock (&ciphers_registered_lock); |
1395 | 1395 | ||
1396 | return err; | 1396 | return err; |
1397 | } | 1397 | } |