list_add(&larval->alg.cra_list, &crypto_alg_list);
 
 #ifdef CONFIG_CRYPTO_STATS
-       atomic_set(&alg->encrypt_cnt, 0);
-       atomic_set(&alg->decrypt_cnt, 0);
+       atomic64_set(&alg->encrypt_cnt, 0);
+       atomic64_set(&alg->decrypt_cnt, 0);
        atomic64_set(&alg->encrypt_tlen, 0);
        atomic64_set(&alg->decrypt_tlen, 0);
-       atomic_set(&alg->verify_cnt, 0);
-       atomic_set(&alg->cipher_err_cnt, 0);
-       atomic_set(&alg->sign_cnt, 0);
+       atomic64_set(&alg->verify_cnt, 0);
+       atomic64_set(&alg->cipher_err_cnt, 0);
+       atomic64_set(&alg->sign_cnt, 0);
 #endif
 
 out:
 
 {
        struct crypto_stat raead;
        u64 v64;
-       u32 v32;
 
        memset(&raead, 0, sizeof(raead));
 
        strscpy(raead.type, "aead", sizeof(raead.type));
 
-       v32 = atomic_read(&alg->encrypt_cnt);
-       raead.stat_encrypt_cnt = v32;
+       v64 = atomic64_read(&alg->encrypt_cnt);
+       raead.stat_encrypt_cnt = v64;
        v64 = atomic64_read(&alg->encrypt_tlen);
        raead.stat_encrypt_tlen = v64;
-       v32 = atomic_read(&alg->decrypt_cnt);
-       raead.stat_decrypt_cnt = v32;
+       v64 = atomic64_read(&alg->decrypt_cnt);
+       raead.stat_decrypt_cnt = v64;
        v64 = atomic64_read(&alg->decrypt_tlen);
        raead.stat_decrypt_tlen = v64;
-       v32 = atomic_read(&alg->aead_err_cnt);
-       raead.stat_aead_err_cnt = v32;
+       v64 = atomic64_read(&alg->aead_err_cnt);
+       raead.stat_aead_err_cnt = v64;
 
        return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead);
 }
 {
        struct crypto_stat rcipher;
        u64 v64;
-       u32 v32;
 
        memset(&rcipher, 0, sizeof(rcipher));
 
        strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
 
-       v32 = atomic_read(&alg->encrypt_cnt);
-       rcipher.stat_encrypt_cnt = v32;
+       v64 = atomic64_read(&alg->encrypt_cnt);
+       rcipher.stat_encrypt_cnt = v64;
        v64 = atomic64_read(&alg->encrypt_tlen);
        rcipher.stat_encrypt_tlen = v64;
-       v32 = atomic_read(&alg->decrypt_cnt);
-       rcipher.stat_decrypt_cnt = v32;
+       v64 = atomic64_read(&alg->decrypt_cnt);
+       rcipher.stat_decrypt_cnt = v64;
        v64 = atomic64_read(&alg->decrypt_tlen);
        rcipher.stat_decrypt_tlen = v64;
-       v32 = atomic_read(&alg->cipher_err_cnt);
-       rcipher.stat_cipher_err_cnt = v32;
+       v64 = atomic64_read(&alg->cipher_err_cnt);
+       rcipher.stat_cipher_err_cnt = v64;
 
        return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
 }
 {
        struct crypto_stat rcomp;
        u64 v64;
-       u32 v32;
 
        memset(&rcomp, 0, sizeof(rcomp));
 
        strscpy(rcomp.type, "compression", sizeof(rcomp.type));
-       v32 = atomic_read(&alg->compress_cnt);
-       rcomp.stat_compress_cnt = v32;
+       v64 = atomic64_read(&alg->compress_cnt);
+       rcomp.stat_compress_cnt = v64;
        v64 = atomic64_read(&alg->compress_tlen);
        rcomp.stat_compress_tlen = v64;
-       v32 = atomic_read(&alg->decompress_cnt);
-       rcomp.stat_decompress_cnt = v32;
+       v64 = atomic64_read(&alg->decompress_cnt);
+       rcomp.stat_decompress_cnt = v64;
        v64 = atomic64_read(&alg->decompress_tlen);
        rcomp.stat_decompress_tlen = v64;
-       v32 = atomic_read(&alg->cipher_err_cnt);
-       rcomp.stat_compress_err_cnt = v32;
+       v64 = atomic64_read(&alg->cipher_err_cnt);
+       rcomp.stat_compress_err_cnt = v64;
 
        return nla_put(skb, CRYPTOCFGA_STAT_COMPRESS, sizeof(rcomp), &rcomp);
 }
 {
        struct crypto_stat racomp;
        u64 v64;
-       u32 v32;
 
        memset(&racomp, 0, sizeof(racomp));
 
        strscpy(racomp.type, "acomp", sizeof(racomp.type));
-       v32 = atomic_read(&alg->compress_cnt);
-       racomp.stat_compress_cnt = v32;
+       v64 = atomic64_read(&alg->compress_cnt);
+       racomp.stat_compress_cnt = v64;
        v64 = atomic64_read(&alg->compress_tlen);
        racomp.stat_compress_tlen = v64;
-       v32 = atomic_read(&alg->decompress_cnt);
-       racomp.stat_decompress_cnt = v32;
+       v64 = atomic64_read(&alg->decompress_cnt);
+       racomp.stat_decompress_cnt = v64;
        v64 = atomic64_read(&alg->decompress_tlen);
        racomp.stat_decompress_tlen = v64;
-       v32 = atomic_read(&alg->cipher_err_cnt);
-       racomp.stat_compress_err_cnt = v32;
+       v64 = atomic64_read(&alg->cipher_err_cnt);
+       racomp.stat_compress_err_cnt = v64;
 
        return nla_put(skb, CRYPTOCFGA_STAT_ACOMP, sizeof(racomp), &racomp);
 }
 {
        struct crypto_stat rakcipher;
        u64 v64;
-       u32 v32;
 
        memset(&rakcipher, 0, sizeof(rakcipher));
 
        strscpy(rakcipher.type, "akcipher", sizeof(rakcipher.type));
-       v32 = atomic_read(&alg->encrypt_cnt);
-       rakcipher.stat_encrypt_cnt = v32;
+       v64 = atomic64_read(&alg->encrypt_cnt);
+       rakcipher.stat_encrypt_cnt = v64;
        v64 = atomic64_read(&alg->encrypt_tlen);
        rakcipher.stat_encrypt_tlen = v64;
-       v32 = atomic_read(&alg->decrypt_cnt);
-       rakcipher.stat_decrypt_cnt = v32;
+       v64 = atomic64_read(&alg->decrypt_cnt);
+       rakcipher.stat_decrypt_cnt = v64;
        v64 = atomic64_read(&alg->decrypt_tlen);
        rakcipher.stat_decrypt_tlen = v64;
-       v32 = atomic_read(&alg->sign_cnt);
-       rakcipher.stat_sign_cnt = v32;
-       v32 = atomic_read(&alg->verify_cnt);
-       rakcipher.stat_verify_cnt = v32;
-       v32 = atomic_read(&alg->akcipher_err_cnt);
-       rakcipher.stat_akcipher_err_cnt = v32;
+       v64 = atomic64_read(&alg->sign_cnt);
+       rakcipher.stat_sign_cnt = v64;
+       v64 = atomic64_read(&alg->verify_cnt);
+       rakcipher.stat_verify_cnt = v64;
+       v64 = atomic64_read(&alg->akcipher_err_cnt);
+       rakcipher.stat_akcipher_err_cnt = v64;
 
        return nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER,
                       sizeof(rakcipher), &rakcipher);
 static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg)
 {
        struct crypto_stat rkpp;
-       u32 v;
+       u64 v;
 
        memset(&rkpp, 0, sizeof(rkpp));
 
        strscpy(rkpp.type, "kpp", sizeof(rkpp.type));
 
-       v = atomic_read(&alg->setsecret_cnt);
+       v = atomic64_read(&alg->setsecret_cnt);
        rkpp.stat_setsecret_cnt = v;
-       v = atomic_read(&alg->generate_public_key_cnt);
+       v = atomic64_read(&alg->generate_public_key_cnt);
        rkpp.stat_generate_public_key_cnt = v;
-       v = atomic_read(&alg->compute_shared_secret_cnt);
+       v = atomic64_read(&alg->compute_shared_secret_cnt);
        rkpp.stat_compute_shared_secret_cnt = v;
-       v = atomic_read(&alg->kpp_err_cnt);
+       v = atomic64_read(&alg->kpp_err_cnt);
        rkpp.stat_kpp_err_cnt = v;
 
        return nla_put(skb, CRYPTOCFGA_STAT_KPP, sizeof(rkpp), &rkpp);
 {
        struct crypto_stat rhash;
        u64 v64;
-       u32 v32;
 
        memset(&rhash, 0, sizeof(rhash));
 
        strscpy(rhash.type, "ahash", sizeof(rhash.type));
 
-       v32 = atomic_read(&alg->hash_cnt);
-       rhash.stat_hash_cnt = v32;
+       v64 = atomic64_read(&alg->hash_cnt);
+       rhash.stat_hash_cnt = v64;
        v64 = atomic64_read(&alg->hash_tlen);
        rhash.stat_hash_tlen = v64;
-       v32 = atomic_read(&alg->hash_err_cnt);
-       rhash.stat_hash_err_cnt = v32;
+       v64 = atomic64_read(&alg->hash_err_cnt);
+       rhash.stat_hash_err_cnt = v64;
 
        return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
 }
 {
        struct crypto_stat rhash;
        u64 v64;
-       u32 v32;
 
        memset(&rhash, 0, sizeof(rhash));
 
        strscpy(rhash.type, "shash", sizeof(rhash.type));
 
-       v32 = atomic_read(&alg->hash_cnt);
-       rhash.stat_hash_cnt = v32;
+       v64 = atomic64_read(&alg->hash_cnt);
+       rhash.stat_hash_cnt = v64;
        v64 = atomic64_read(&alg->hash_tlen);
        rhash.stat_hash_tlen = v64;
-       v32 = atomic_read(&alg->hash_err_cnt);
-       rhash.stat_hash_err_cnt = v32;
+       v64 = atomic64_read(&alg->hash_err_cnt);
+       rhash.stat_hash_err_cnt = v64;
 
        return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
 }
 {
        struct crypto_stat rrng;
        u64 v64;
-       u32 v32;
 
        memset(&rrng, 0, sizeof(rrng));
 
        strscpy(rrng.type, "rng", sizeof(rrng.type));
 
-       v32 = atomic_read(&alg->generate_cnt);
-       rrng.stat_generate_cnt = v32;
+       v64 = atomic64_read(&alg->generate_cnt);
+       rrng.stat_generate_cnt = v64;
        v64 = atomic64_read(&alg->generate_tlen);
        rrng.stat_generate_tlen = v64;
-       v32 = atomic_read(&alg->seed_cnt);
-       rrng.stat_seed_cnt = v32;
-       v32 = atomic_read(&alg->hash_err_cnt);
-       rrng.stat_rng_err_cnt = v32;
+       v64 = atomic64_read(&alg->seed_cnt);
+       rrng.stat_seed_cnt = v64;
+       v64 = atomic64_read(&alg->hash_err_cnt);
+       rrng.stat_rng_err_cnt = v64;
 
        return nla_put(skb, CRYPTOCFGA_STAT_RNG, sizeof(rrng), &rrng);
 }
 
        struct crypto_acomp *tfm = crypto_acomp_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&tfm->base.__crt_alg->compress_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->compress_err_cnt);
        } else {
-               atomic_inc(&tfm->base.__crt_alg->compress_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->compress_cnt);
                atomic64_add(req->slen, &tfm->base.__crt_alg->compress_tlen);
        }
 #endif
        struct crypto_acomp *tfm = crypto_acomp_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&tfm->base.__crt_alg->compress_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->compress_err_cnt);
        } else {
-               atomic_inc(&tfm->base.__crt_alg->decompress_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->decompress_cnt);
                atomic64_add(req->slen, &tfm->base.__crt_alg->decompress_tlen);
        }
 #endif
 
        struct crypto_aead *tfm = crypto_aead_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&tfm->base.__crt_alg->aead_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->aead_err_cnt);
        } else {
-               atomic_inc(&tfm->base.__crt_alg->encrypt_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->encrypt_cnt);
                atomic64_add(req->cryptlen, &tfm->base.__crt_alg->encrypt_tlen);
        }
 #endif
        struct crypto_aead *tfm = crypto_aead_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&tfm->base.__crt_alg->aead_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->aead_err_cnt);
        } else {
-               atomic_inc(&tfm->base.__crt_alg->decrypt_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->decrypt_cnt);
                atomic64_add(req->cryptlen, &tfm->base.__crt_alg->decrypt_tlen);
        }
 #endif
 
        struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
        } else {
-               atomic_inc(&tfm->base.__crt_alg->encrypt_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->encrypt_cnt);
                atomic64_add(req->src_len, &tfm->base.__crt_alg->encrypt_tlen);
        }
 #endif
        struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
        } else {
-               atomic_inc(&tfm->base.__crt_alg->decrypt_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->decrypt_cnt);
                atomic64_add(req->src_len, &tfm->base.__crt_alg->decrypt_tlen);
        }
 #endif
        struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY)
-               atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
        else
-               atomic_inc(&tfm->base.__crt_alg->sign_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->sign_cnt);
 #endif
 }
 
        struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY)
-               atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
        else
-               atomic_inc(&tfm->base.__crt_alg->verify_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->verify_cnt);
 #endif
 }
 
 
        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY)
-               atomic_inc(&tfm->base.__crt_alg->hash_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->hash_err_cnt);
        else
                atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen);
 #endif
        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&tfm->base.__crt_alg->hash_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->hash_err_cnt);
        } else {
-               atomic_inc(&tfm->base.__crt_alg->hash_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->hash_cnt);
                atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen);
        }
 #endif
 
 {
 #ifdef CONFIG_CRYPTO_STATS
        if (ret)
-               atomic_inc(&tfm->base.__crt_alg->kpp_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt);
        else
-               atomic_inc(&tfm->base.__crt_alg->setsecret_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->setsecret_cnt);
 #endif
 }
 
        struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
 
        if (ret)
-               atomic_inc(&tfm->base.__crt_alg->kpp_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt);
        else
-               atomic_inc(&tfm->base.__crt_alg->generate_public_key_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->generate_public_key_cnt);
 #endif
 }
 
        struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
 
        if (ret)
-               atomic_inc(&tfm->base.__crt_alg->kpp_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt);
        else
-               atomic_inc(&tfm->base.__crt_alg->compute_shared_secret_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->compute_shared_secret_cnt);
 #endif
 }
 
 
 {
 #ifdef CONFIG_CRYPTO_STATS
        if (ret && ret != -EINPROGRESS && ret != -EBUSY)
-               atomic_inc(&tfm->base.__crt_alg->rng_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->rng_err_cnt);
        else
-               atomic_inc(&tfm->base.__crt_alg->seed_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->seed_cnt);
 #endif
 }
 
 {
 #ifdef CONFIG_CRYPTO_STATS
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&tfm->base.__crt_alg->rng_err_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->rng_err_cnt);
        } else {
-               atomic_inc(&tfm->base.__crt_alg->generate_cnt);
+               atomic64_inc(&tfm->base.__crt_alg->generate_cnt);
                atomic64_add(dlen, &tfm->base.__crt_alg->generate_tlen);
        }
 #endif
 
 {
 #ifdef CONFIG_CRYPTO_STATS
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&alg->cipher_err_cnt);
+               atomic64_inc(&alg->cipher_err_cnt);
        } else {
-               atomic_inc(&alg->encrypt_cnt);
+               atomic64_inc(&alg->encrypt_cnt);
                atomic64_add(req->cryptlen, &alg->encrypt_tlen);
        }
 #endif
 {
 #ifdef CONFIG_CRYPTO_STATS
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&alg->cipher_err_cnt);
+               atomic64_inc(&alg->cipher_err_cnt);
        } else {
-               atomic_inc(&alg->decrypt_cnt);
+               atomic64_inc(&alg->decrypt_cnt);
                atomic64_add(req->cryptlen, &alg->decrypt_tlen);
        }
 #endif
 
 
 #ifdef CONFIG_CRYPTO_STATS
        union {
-               atomic_t encrypt_cnt;
-               atomic_t compress_cnt;
-               atomic_t generate_cnt;
-               atomic_t hash_cnt;
-               atomic_t setsecret_cnt;
+               atomic64_t encrypt_cnt;
+               atomic64_t compress_cnt;
+               atomic64_t generate_cnt;
+               atomic64_t hash_cnt;
+               atomic64_t setsecret_cnt;
        };
        union {
                atomic64_t encrypt_tlen;
                atomic64_t hash_tlen;
        };
        union {
-               atomic_t akcipher_err_cnt;
-               atomic_t cipher_err_cnt;
-               atomic_t compress_err_cnt;
-               atomic_t aead_err_cnt;
-               atomic_t hash_err_cnt;
-               atomic_t rng_err_cnt;
-               atomic_t kpp_err_cnt;
+               atomic64_t akcipher_err_cnt;
+               atomic64_t cipher_err_cnt;
+               atomic64_t compress_err_cnt;
+               atomic64_t aead_err_cnt;
+               atomic64_t hash_err_cnt;
+               atomic64_t rng_err_cnt;
+               atomic64_t kpp_err_cnt;
        };
        union {
-               atomic_t decrypt_cnt;
-               atomic_t decompress_cnt;
-               atomic_t seed_cnt;
-               atomic_t generate_public_key_cnt;
+               atomic64_t decrypt_cnt;
+               atomic64_t decompress_cnt;
+               atomic64_t seed_cnt;
+               atomic64_t generate_public_key_cnt;
        };
        union {
                atomic64_t decrypt_tlen;
                atomic64_t decompress_tlen;
        };
        union {
-               atomic_t verify_cnt;
-               atomic_t compute_shared_secret_cnt;
+               atomic64_t verify_cnt;
+               atomic64_t compute_shared_secret_cnt;
        };
-       atomic_t sign_cnt;
+       atomic64_t sign_cnt;
 #endif /* CONFIG_CRYPTO_STATS */
 
 } CRYPTO_MINALIGN_ATTR;
                crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&crt->base->base.__crt_alg->cipher_err_cnt);
+               atomic64_inc(&crt->base->base.__crt_alg->cipher_err_cnt);
        } else {
-               atomic_inc(&crt->base->base.__crt_alg->encrypt_cnt);
+               atomic64_inc(&crt->base->base.__crt_alg->encrypt_cnt);
                atomic64_add(req->nbytes, &crt->base->base.__crt_alg->encrypt_tlen);
        }
 #endif
                crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
 
        if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
-               atomic_inc(&crt->base->base.__crt_alg->cipher_err_cnt);
+               atomic64_inc(&crt->base->base.__crt_alg->cipher_err_cnt);
        } else {
-               atomic_inc(&crt->base->base.__crt_alg->decrypt_cnt);
+               atomic64_inc(&crt->base->base.__crt_alg->decrypt_cnt);
                atomic64_add(req->nbytes, &crt->base->base.__crt_alg->decrypt_tlen);
        }
 #endif
 
 struct crypto_stat {
        char type[CRYPTO_MAX_NAME];
        union {
-               __u32 stat_encrypt_cnt;
-               __u32 stat_compress_cnt;
-               __u32 stat_generate_cnt;
-               __u32 stat_hash_cnt;
-               __u32 stat_setsecret_cnt;
+               __u64 stat_encrypt_cnt;
+               __u64 stat_compress_cnt;
+               __u64 stat_generate_cnt;
+               __u64 stat_hash_cnt;
+               __u64 stat_setsecret_cnt;
        };
        union {
                __u64 stat_encrypt_tlen;
                __u64 stat_hash_tlen;
        };
        union {
-               __u32 stat_akcipher_err_cnt;
-               __u32 stat_cipher_err_cnt;
-               __u32 stat_compress_err_cnt;
-               __u32 stat_aead_err_cnt;
-               __u32 stat_hash_err_cnt;
-               __u32 stat_rng_err_cnt;
-               __u32 stat_kpp_err_cnt;
+               __u64 stat_akcipher_err_cnt;
+               __u64 stat_cipher_err_cnt;
+               __u64 stat_compress_err_cnt;
+               __u64 stat_aead_err_cnt;
+               __u64 stat_hash_err_cnt;
+               __u64 stat_rng_err_cnt;
+               __u64 stat_kpp_err_cnt;
        };
        union {
-               __u32 stat_decrypt_cnt;
-               __u32 stat_decompress_cnt;
-               __u32 stat_seed_cnt;
-               __u32 stat_generate_public_key_cnt;
+               __u64 stat_decrypt_cnt;
+               __u64 stat_decompress_cnt;
+               __u64 stat_seed_cnt;
+               __u64 stat_generate_public_key_cnt;
        };
        union {
                __u64 stat_decrypt_tlen;
                __u64 stat_decompress_tlen;
        };
        union {
-               __u32 stat_verify_cnt;
-               __u32 stat_compute_shared_secret_cnt;
+               __u64 stat_verify_cnt;
+               __u64 stat_compute_shared_secret_cnt;
        };
-       __u32 stat_sign_cnt;
+       __u64 stat_sign_cnt;
 };
 
 struct crypto_report_larval {