staticint do_mult_aead_op(struct test_mb_aead_data *data, int enc,
u32 num_mb, int *rc)
{ int i, err = 0;
/* Fire up a bunch of concurrent requests */ for (i = 0; i < num_mb; i++) { if (enc == ENCRYPT)
rc[i] = crypto_aead_encrypt(data[i].req); else
rc[i] = crypto_aead_decrypt(data[i].req);
}
/* Wait for all requests to finish */ for (i = 0; i < num_mb; i++) {
rc[i] = crypto_wait_req(rc[i], &data[i].wait);
if (rc[i]) {
pr_info("concurrent request %d error %d\n", i, rc[i]);
err = rc[i];
}
}
return err;
}
staticint test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc, int blen, int secs, u32 num_mb)
{ unsignedlong start, end; int bcount; int ret = 0; int *rc;
rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL); if (!rc) return -ENOMEM;
for (start = jiffies, end = start + secs * HZ, bcount = 0;
time_before(jiffies, end); bcount++) {
ret = do_mult_aead_op(data, enc, num_mb, rc); if (ret) goto out;
}
if (aad_size >= PAGE_SIZE) {
pr_err("associate data length (%u) too big\n", aad_size); return;
}
iv = kzalloc(MAX_IVLEN, GFP_KERNEL); if (!iv) return;
if (enc == ENCRYPT)
e = "encryption"; else
e = "decryption";
data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL); if (!data) goto out_free_iv;
tfm = crypto_alloc_aead(algo, 0, 0); if (IS_ERR(tfm)) {
pr_err("failed to load transform for %s: %ld\n",
algo, PTR_ERR(tfm)); goto out_free_data;
}
ret = crypto_aead_setauthsize(tfm, authsize); if (ret) {
pr_err("alg: aead: Failed to setauthsize for %s: %d\n", algo,
ret); goto out_free_tfm;
}
for (i = 0; i < num_mb; ++i) if (testmgr_alloc_buf(data[i].xbuf)) { while (i--)
testmgr_free_buf(data[i].xbuf); goto out_free_tfm;
}
for (i = 0; i < num_mb; ++i) if (testmgr_alloc_buf(data[i].axbuf)) { while (i--)
testmgr_free_buf(data[i].axbuf); goto out_free_xbuf;
}
for (i = 0; i < num_mb; ++i) if (testmgr_alloc_buf(data[i].xoutbuf)) { while (i--)
testmgr_free_buf(data[i].xoutbuf); goto out_free_axbuf;
}
for (i = 0; i < num_mb; ++i) {
data[i].req = aead_request_alloc(tfm, GFP_KERNEL); if (!data[i].req) {
pr_err("alg: aead: Failed to allocate request for %s\n",
algo); while (i--)
aead_request_free(data[i].req); goto out_free_xoutbuf;
}
}
for (i = 0; i < num_mb; ++i) {
crypto_init_wait(&data[i].wait);
aead_request_set_callback(data[i].req,
CRYPTO_TFM_REQ_MAY_BACKLOG,
crypto_req_done, &data[i].wait);
}
if (secs) {
ret = test_mb_aead_jiffies(data, enc, bs,
secs, num_mb);
cond_resched();
} else {
ret = test_mb_aead_cycles(data, enc, bs,
num_mb);
}
if (ret) {
pr_err("%s() failed return code=%d\n", e, ret); break;
}
b_size++;
i++;
} while (*b_size);
keysize++;
} while (*keysize);
out: for (i = 0; i < num_mb; ++i)
aead_request_free(data[i].req);
out_free_xoutbuf: for (i = 0; i < num_mb; ++i)
testmgr_free_buf(data[i].xoutbuf);
out_free_axbuf: for (i = 0; i < num_mb; ++i)
testmgr_free_buf(data[i].axbuf);
out_free_xbuf: for (i = 0; i < num_mb; ++i)
testmgr_free_buf(data[i].xbuf);
out_free_tfm:
crypto_free_aead(tfm);
out_free_data:
kfree(data);
out_free_iv:
kfree(iv);
}
staticint test_aead_jiffies(struct aead_request *req, int enc, int blen, int secs)
{ unsignedlong start, end; int bcount; int ret;
for (start = jiffies, end = start + secs * HZ, bcount = 0;
time_before(jiffies, end); bcount++) { if (enc)
ret = do_one_aead_op(req, crypto_aead_encrypt(req)); else
ret = do_one_aead_op(req, crypto_aead_decrypt(req));
staticint test_aead_cycles(struct aead_request *req, int enc, int blen)
{ unsignedlong cycles = 0; int ret = 0; int i;
/* Warm-up run. */ for (i = 0; i < 4; i++) { if (enc)
ret = do_one_aead_op(req, crypto_aead_encrypt(req)); else
ret = do_one_aead_op(req, crypto_aead_decrypt(req));
if (ret) goto out;
}
/* The real thing. */ for (i = 0; i < 8; i++) {
cycles_t start, end;
start = get_cycles(); if (enc)
ret = do_one_aead_op(req, crypto_aead_encrypt(req)); else
ret = do_one_aead_op(req, crypto_aead_decrypt(req));
end = get_cycles();
if (ret) goto out;
cycles += end - start;
}
out: if (ret == 0)
pr_cont("1 operation in %lu cycles (%d bytes)\n",
(cycles + 4) / 8, blen);
/* * For decryption we need a proper auth so * we do the encryption path once with buffers * reversed (input <-> output) to calculate it
*/
aead_request_set_crypt(req, sgout, sg,
bs, iv);
ret = do_one_aead_op(req,
crypto_aead_encrypt(req));
staticint test_ahash_jiffies(struct ahash_request *req, int blen, int plen, char *out, int secs)
{ unsignedlong start, end; int bcount, pcount; int ret;
if (plen == blen) return test_ahash_jiffies_digest(req, blen, out, secs);
for (start = jiffies, end = start + secs * HZ, bcount = 0;
time_before(jiffies, end); bcount++) {
ret = do_one_ahash_op(req, crypto_ahash_init(req)); if (ret) return ret; for (pcount = 0; pcount < blen; pcount += plen) {
ret = do_one_ahash_op(req, crypto_ahash_update(req)); if (ret) return ret;
} /* we assume there is enough space in 'out' for the result */
ret = do_one_ahash_op(req, crypto_ahash_final(req)); if (ret) return ret;
}
staticint test_ahash_cycles(struct ahash_request *req, int blen, int plen, char *out)
{ unsignedlong cycles = 0; int i, pcount, ret;
if (plen == blen) return test_ahash_cycles_digest(req, blen, out);
/* Warm-up run. */ for (i = 0; i < 4; i++) {
ret = do_one_ahash_op(req, crypto_ahash_init(req)); if (ret) goto out; for (pcount = 0; pcount < blen; pcount += plen) {
ret = do_one_ahash_op(req, crypto_ahash_update(req)); if (ret) goto out;
}
ret = do_one_ahash_op(req, crypto_ahash_final(req)); if (ret) goto out;
}
/* The real thing. */ for (i = 0; i < 8; i++) {
cycles_t start, end;
start = get_cycles();
ret = do_one_ahash_op(req, crypto_ahash_init(req)); if (ret) goto out; for (pcount = 0; pcount < blen; pcount += plen) {
ret = do_one_ahash_op(req, crypto_ahash_update(req)); if (ret) goto out;
}
ret = do_one_ahash_op(req, crypto_ahash_final(req)); if (ret) goto out;
output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL); if (!output) goto out_nomem;
for (i = 0; speed[i].blen != 0; i++) { if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
pr_err("template (%u) too big for tvmem (%lu)\n",
speed[i].blen, TVMEMSIZE * PAGE_SIZE); break;
}
if (klen)
crypto_ahash_setkey(tfm, tvmem[0], klen);
pr_info("test%3u " "(%5u byte blocks,%5u bytes per update,%4u updates): ",
i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
staticint do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
u32 num_mb, int *rc)
{ int i, err = 0;
/* Fire up a bunch of concurrent requests */ for (i = 0; i < num_mb; i++) { if (enc == ENCRYPT)
rc[i] = crypto_skcipher_encrypt(data[i].req); else
rc[i] = crypto_skcipher_decrypt(data[i].req);
}
/* Wait for all requests to finish */ for (i = 0; i < num_mb; i++) {
rc[i] = crypto_wait_req(rc[i], &data[i].wait);
if (rc[i]) {
pr_info("concurrent request %d error %d\n", i, rc[i]);
err = rc[i];
}
}
return err;
}
staticint test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc, int blen, int secs, u32 num_mb)
{ unsignedlong start, end; int bcount; int ret = 0; int *rc;
rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL); if (!rc) return -ENOMEM;
for (start = jiffies, end = start + secs * HZ, bcount = 0;
time_before(jiffies, end); bcount++) {
ret = do_mult_acipher_op(data, enc, num_mb, rc); if (ret) goto out;
}
staticint test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc, int blen, u32 num_mb)
{ unsignedlong cycles = 0; int ret = 0; int i; int *rc;
rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL); if (!rc) return -ENOMEM;
/* Warm-up run. */ for (i = 0; i < 4; i++) {
ret = do_mult_acipher_op(data, enc, num_mb, rc); if (ret) goto out;
}
/* The real thing. */ for (i = 0; i < 8; i++) {
cycles_t start, end;
start = get_cycles();
ret = do_mult_acipher_op(data, enc, num_mb, rc);
end = get_cycles();
staticvoid test_mb_skcipher_speed(constchar *algo, int enc, int secs, struct cipher_speed_template *template, unsignedint tcount, u8 *keysize, u32 num_mb)
{ struct test_mb_skcipher_data *data; struct crypto_skcipher *tfm; unsignedint i, j, iv_len; constint *b_size; constchar *key; constchar *e; char iv[128]; int ret;
if (enc == ENCRYPT)
e = "encryption"; else
e = "decryption";
data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL); if (!data) return;
tfm = crypto_alloc_skcipher(algo, 0, 0); if (IS_ERR(tfm)) {
pr_err("failed to load transform for %s: %ld\n",
algo, PTR_ERR(tfm)); goto out_free_data;
}
for (i = 0; i < num_mb; ++i) if (testmgr_alloc_buf(data[i].xbuf)) { while (i--)
testmgr_free_buf(data[i].xbuf); goto out_free_tfm;
}
for (i = 0; i < num_mb; ++i) {
data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL); if (!data[i].req) {
pr_err("alg: skcipher: Failed to allocate request for %s\n",
algo); while (i--)
skcipher_request_free(data[i].req); goto out_free_xbuf;
}
}
for (i = 0; i < num_mb; ++i) {
skcipher_request_set_callback(data[i].req,
CRYPTO_TFM_REQ_MAY_BACKLOG,
crypto_req_done, &data[i].wait);
crypto_init_wait(&data[i].wait);
}
if (secs) {
ret = test_mb_acipher_jiffies(data, enc,
bs, secs,
num_mb);
cond_resched();
} else {
ret = test_mb_acipher_cycles(data, enc,
bs, num_mb);
}
if (ret) {
pr_err("%s() failed flags=%x\n", e,
crypto_skcipher_get_flags(tfm)); break;
}
b_size++;
i++;
} while (*b_size);
keysize++;
} while (*keysize);
out: for (i = 0; i < num_mb; ++i)
skcipher_request_free(data[i].req);
out_free_xbuf: for (i = 0; i < num_mb; ++i)
testmgr_free_buf(data[i].xbuf);
out_free_tfm:
crypto_free_skcipher(tfm);
out_free_data:
kfree(data);
}
staticint test_acipher_cycles(struct skcipher_request *req, int enc, int blen)
{ unsignedlong cycles = 0; int ret = 0; int i;
/* Warm-up run. */ for (i = 0; i < 4; i++) { if (enc)
ret = do_one_acipher_op(req,
crypto_skcipher_encrypt(req)); else
ret = do_one_acipher_op(req,
crypto_skcipher_decrypt(req));
if (ret) goto out;
}
/* The real thing. */ for (i = 0; i < 8; i++) {
cycles_t start, end;
start = get_cycles(); if (enc)
ret = do_one_acipher_op(req,
crypto_skcipher_encrypt(req)); else
ret = do_one_acipher_op(req,
crypto_skcipher_decrypt(req));
end = get_cycles();
if (ret) goto out;
cycles += end - start;
}
out: if (ret == 0)
pr_cont("1 operation in %lu cycles (%d bytes)\n",
(cycles + 4) / 8, blen);
iv_len = crypto_skcipher_ivsize(tfm); if (iv_len)
memset(&iv, 0xff, iv_len);
skcipher_request_set_crypt(req, sg, sg, bs, iv);
if (secs) {
ret = test_acipher_jiffies(req, enc,
bs, secs);
cond_resched();
} else {
ret = test_acipher_cycles(req, enc,
bs);
}
if (ret) {
pr_err("%s() failed flags=%x\n", e,
crypto_skcipher_get_flags(tfm)); break;
}
b_size++;
i++;
} while (*b_size);
keysize++;
} while (*keysize);
staticinlineint tcrypt_test(constchar *alg)
{ int ret;
pr_debug("testing %s\n", alg);
ret = alg_test(alg, alg, 0, 0); /* non-fips algs return -EINVAL or -ECANCELED in fips mode */ if (fips_enabled && (ret == -EINVAL || ret == -ECANCELED))
ret = 0; return ret;
}
staticint do_test(constchar *alg, u32 type, u32 mask, int m, u32 num_mb)
{ int i; int ret = 0;
switch (m) { case 0: if (alg) { if (!crypto_has_alg(alg, type,
mask ?: CRYPTO_ALG_TYPE_MASK))
ret = -ENOENT; break;
}
for (i = 1; i < 200; i++)
ret = min(ret, do_test(NULL, 0, 0, i, num_mb)); break;
case 1:
ret = min(ret, tcrypt_test("md5")); break;
case 2:
ret = min(ret, tcrypt_test("sha1")); break;
case 3:
ret = min(ret, tcrypt_test("ecb(des)"));
ret = min(ret, tcrypt_test("cbc(des)"));
ret = min(ret, tcrypt_test("ctr(des)")); break;
case 4:
ret = min(ret, tcrypt_test("ecb(des3_ede)"));
ret = min(ret, tcrypt_test("cbc(des3_ede)"));
ret = min(ret, tcrypt_test("ctr(des3_ede)")); break;
case 5:
ret = min(ret, tcrypt_test("md4")); break;
case 6:
ret = min(ret, tcrypt_test("sha256")); break;
case 7:
ret = min(ret, tcrypt_test("ecb(blowfish)"));
ret = min(ret, tcrypt_test("cbc(blowfish)"));
ret = min(ret, tcrypt_test("ctr(blowfish)")); break;
case 8:
ret = min(ret, tcrypt_test("ecb(twofish)"));
ret = min(ret, tcrypt_test("cbc(twofish)"));
ret = min(ret, tcrypt_test("ctr(twofish)"));
ret = min(ret, tcrypt_test("lrw(twofish)"));
ret = min(ret, tcrypt_test("xts(twofish)")); break;
case 9:
ret = min(ret, tcrypt_test("ecb(serpent)"));
ret = min(ret, tcrypt_test("cbc(serpent)"));
ret = min(ret, tcrypt_test("ctr(serpent)"));
ret = min(ret, tcrypt_test("lrw(serpent)"));
ret = min(ret, tcrypt_test("xts(serpent)")); break;
case 10:
ret = min(ret, tcrypt_test("ecb(aes)"));
ret = min(ret, tcrypt_test("cbc(aes)"));
ret = min(ret, tcrypt_test("lrw(aes)"));
ret = min(ret, tcrypt_test("xts(aes)"));
ret = min(ret, tcrypt_test("ctr(aes)"));
ret = min(ret, tcrypt_test("rfc3686(ctr(aes))"));
ret = min(ret, tcrypt_test("xctr(aes)")); break;
case 11:
ret = min(ret, tcrypt_test("sha384")); break;
case 12:
ret = min(ret, tcrypt_test("sha512")); break;
case 13:
ret = min(ret, tcrypt_test("deflate")); break;
case 14:
ret = min(ret, tcrypt_test("ecb(cast5)"));
ret = min(ret, tcrypt_test("cbc(cast5)"));
ret = min(ret, tcrypt_test("ctr(cast5)")); break;
case 15:
ret = min(ret, tcrypt_test("ecb(cast6)"));
ret = min(ret, tcrypt_test("cbc(cast6)"));
ret = min(ret, tcrypt_test("ctr(cast6)"));
ret = min(ret, tcrypt_test("lrw(cast6)"));
ret = min(ret, tcrypt_test("xts(cast6)")); break;
case 16:
ret = min(ret, tcrypt_test("ecb(arc4)")); break;
case 17:
ret = min(ret, tcrypt_test("michael_mic")); break;
case 18:
ret = min(ret, tcrypt_test("crc32c")); break;
case 19:
ret = min(ret, tcrypt_test("ecb(tea)")); break;
case 20:
ret = min(ret, tcrypt_test("ecb(xtea)")); break;
case 21:
ret = min(ret, tcrypt_test("ecb(khazad)")); break;
case 22:
ret = min(ret, tcrypt_test("wp512")); break;
case 23:
ret = min(ret, tcrypt_test("wp384")); break;
case 24:
ret = min(ret, tcrypt_test("wp256")); break;
case 26:
ret = min(ret, tcrypt_test("ecb(anubis)"));
ret = min(ret, tcrypt_test("cbc(anubis)")); break;
case 30:
ret = min(ret, tcrypt_test("ecb(xeta)")); break;
case 31:
ret = min(ret, tcrypt_test("pcbc(fcrypt)")); break;
case 32:
ret = min(ret, tcrypt_test("ecb(camellia)"));
ret = min(ret, tcrypt_test("cbc(camellia)"));
ret = min(ret, tcrypt_test("ctr(camellia)"));
ret = min(ret, tcrypt_test("lrw(camellia)"));
ret = min(ret, tcrypt_test("xts(camellia)")); break;
case 33:
ret = min(ret, tcrypt_test("sha224")); break;
case 35:
ret = min(ret, tcrypt_test("gcm(aes)")); break;
case 36:
ret = min(ret, tcrypt_test("lzo")); break;
case 37:
ret = min(ret, tcrypt_test("ccm(aes)")); break;
case 38:
ret = min(ret, tcrypt_test("cts(cbc(aes))")); break;
case 39:
ret = min(ret, tcrypt_test("xxhash64")); break;
case 40:
ret = min(ret, tcrypt_test("rmd160")); break;
case 42:
ret = min(ret, tcrypt_test("blake2b-512")); break;
case 43:
ret = min(ret, tcrypt_test("ecb(seed)")); break;
case 45:
ret = min(ret, tcrypt_test("rfc4309(ccm(aes))")); break;
case 46:
ret = min(ret, tcrypt_test("ghash")); break;
case 48:
ret = min(ret, tcrypt_test("sha3-224")); break;
case 49:
ret = min(ret, tcrypt_test("sha3-256")); break;
case 50:
ret = min(ret, tcrypt_test("sha3-384")); break;
case 51:
ret = min(ret, tcrypt_test("sha3-512")); break;
case 52:
ret = min(ret, tcrypt_test("sm3")); break;
case 53:
ret = min(ret, tcrypt_test("streebog256")); break;
case 54:
ret = min(ret, tcrypt_test("streebog512")); break;
case 55:
ret = min(ret, tcrypt_test("gcm(sm4)")); break;
case 56:
ret = min(ret, tcrypt_test("ccm(sm4)")); break;
case 57:
ret = min(ret, tcrypt_test("polyval")); break;
case 58:
ret = min(ret, tcrypt_test("gcm(aria)")); break;
case 59:
ret = min(ret, tcrypt_test("cts(cbc(sm4))")); break;
case 100:
ret = min(ret, tcrypt_test("hmac(md5)")); break;
case 101:
ret = min(ret, tcrypt_test("hmac(sha1)")); break;
case 102:
ret = min(ret, tcrypt_test("hmac(sha256)")); break;
case 103:
ret = min(ret, tcrypt_test("hmac(sha384)")); break;
case 104:
ret = min(ret, tcrypt_test("hmac(sha512)")); break;
case 105:
ret = min(ret, tcrypt_test("hmac(sha224)")); break;
case 106:
ret = min(ret, tcrypt_test("xcbc(aes)")); break;
case 108:
ret = min(ret, tcrypt_test("hmac(rmd160)")); break;
case 111:
ret = min(ret, tcrypt_test("hmac(sha3-224)")); break;
case 112:
ret = min(ret, tcrypt_test("hmac(sha3-256)")); break;
case 113:
ret = min(ret, tcrypt_test("hmac(sha3-384)")); break;
case 114:
ret = min(ret, tcrypt_test("hmac(sha3-512)")); break;
case 115:
ret = min(ret, tcrypt_test("hmac(streebog256)")); break;
case 116:
ret = min(ret, tcrypt_test("hmac(streebog512)")); break;
case 150:
ret = min(ret, tcrypt_test("ansi_cprng")); break;
case 151:
ret = min(ret, tcrypt_test("rfc4106(gcm(aes))")); break;
case 152:
ret = min(ret, tcrypt_test("rfc4543(gcm(aes))")); break;
case 153:
ret = min(ret, tcrypt_test("cmac(aes)")); break;
case 154:
ret = min(ret, tcrypt_test("cmac(des3_ede)")); break;
case 155:
ret = min(ret, tcrypt_test("authenc(hmac(sha1),cbc(aes))")); break;
case 156:
ret = min(ret, tcrypt_test("authenc(hmac(md5),ecb(cipher_null))")); break;
case 157:
ret = min(ret, tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))")); break;
case 158:
ret = min(ret, tcrypt_test("cbcmac(sm4)")); break;
case 159:
ret = min(ret, tcrypt_test("cmac(sm4)")); break;
case 160:
ret = min(ret, tcrypt_test("xcbc(sm4)")); break;
case 181:
ret = min(ret, tcrypt_test("authenc(hmac(sha1),cbc(des))")); break; case 182:
ret = min(ret, tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))")); break; case 183:
ret = min(ret, tcrypt_test("authenc(hmac(sha224),cbc(des))")); break; case 184:
ret = min(ret, tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))")); break; case 185:
ret = min(ret, tcrypt_test("authenc(hmac(sha256),cbc(des))")); break; case 186:
ret = min(ret, tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))")); break; case 187:
ret = min(ret, tcrypt_test("authenc(hmac(sha384),cbc(des))")); break; case 188:
ret = min(ret, tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))")); break; case 189:
ret = min(ret, tcrypt_test("authenc(hmac(sha512),cbc(des))")); break; case 190:
ret = min(ret, tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))")); break; case 191:
ret = min(ret, tcrypt_test("ecb(sm4)"));
ret = min(ret, tcrypt_test("cbc(sm4)"));
ret = min(ret, tcrypt_test("ctr(sm4)"));
ret = min(ret, tcrypt_test("xts(sm4)")); break; case 192:
ret = min(ret, tcrypt_test("ecb(aria)"));
ret = min(ret, tcrypt_test("cbc(aria)"));
ret = min(ret, tcrypt_test("ctr(aria)")); break; case 193:
ret = min(ret, tcrypt_test("ffdhe2048(dh)")); break; case 200:
test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
speed_template_16_24_32);
test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
speed_template_16_24_32);
test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
speed_template_16_24_32);
test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
speed_template_16_24_32);
test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
speed_template_32_40_48);
test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
speed_template_32_40_48);
test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
speed_template_32_64);
test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
speed_template_32_64);
test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
speed_template_16_24_32);
test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
speed_template_16_24_32);
test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
speed_template_16_24_32);
test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
speed_template_16_24_32); break;
¤ Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.0.85Bemerkung:
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.