return 0;
 }
 
-static void omap_aes_gcm_complete(struct crypto_async_request *req, int err)
-{
-       struct omap_aes_gcm_result *res = req->data;
-
-       if (err == -EINPROGRESS)
-               return;
-
-       res->err = err;
-       complete(&res->completion);
-}
-
 static int do_encrypt_iv(struct aead_request *req, u32 *tag, u32 *iv)
 {
-       struct scatterlist iv_sg, tag_sg;
-       struct skcipher_request *sk_req;
-       struct omap_aes_gcm_result result;
-       struct omap_aes_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
-       int ret = 0;
-
-       sk_req = skcipher_request_alloc(ctx->ctr, GFP_KERNEL);
-       if (!sk_req) {
-               pr_err("skcipher: Failed to allocate request\n");
-               return -ENOMEM;
-       }
-
-       init_completion(&result.completion);
-
-       sg_init_one(&iv_sg, iv, AES_BLOCK_SIZE);
-       sg_init_one(&tag_sg, tag, AES_BLOCK_SIZE);
-       skcipher_request_set_callback(sk_req, CRYPTO_TFM_REQ_MAY_BACKLOG,
-                                     omap_aes_gcm_complete, &result);
-       ret = crypto_skcipher_setkey(ctx->ctr, (u8 *)ctx->key, ctx->keylen);
-       skcipher_request_set_crypt(sk_req, &iv_sg, &tag_sg, AES_BLOCK_SIZE,
-                                  NULL);
-       ret = crypto_skcipher_encrypt(sk_req);
-       switch (ret) {
-       case 0:
-               break;
-       case -EINPROGRESS:
-       case -EBUSY:
-               ret = wait_for_completion_interruptible(&result.completion);
-               if (!ret) {
-                       ret = result.err;
-                       if (!ret) {
-                               reinit_completion(&result.completion);
-                               break;
-                       }
-               }
-               /* fall through */
-       default:
-               pr_err("Encryption of IV failed for GCM mode\n");
-               break;
-       }
+       struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
 
-       skcipher_request_free(sk_req);
-       return ret;
+       aes_encrypt(&ctx->actx, (u8 *)tag, (u8 *)iv);
+       return 0;
 }
 
 void omap_aes_gcm_dma_out_callback(void *data)
 static int omap_aes_gcm_handle_queue(struct omap_aes_dev *dd,
                                     struct aead_request *req)
 {
-       struct omap_aes_ctx *ctx;
+       struct omap_aes_gcm_ctx *ctx;
        struct aead_request *backlog;
        struct omap_aes_reqctx *rctx;
        unsigned long flags;
        ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
        rctx = aead_request_ctx(req);
 
-       dd->ctx = ctx;
+       dd->ctx = &ctx->octx;
        rctx->dd = dd;
        dd->aead_req = req;
 
 
 int omap_aes_4106gcm_encrypt(struct aead_request *req)
 {
-       struct omap_aes_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
+       struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
        struct omap_aes_reqctx *rctx = aead_request_ctx(req);
 
-       memcpy(rctx->iv, ctx->nonce, 4);
+       memcpy(rctx->iv, ctx->octx.nonce, 4);
        memcpy(rctx->iv + 4, req->iv, 8);
        return crypto_ipsec_check_assoclen(req->assoclen) ?:
               omap_aes_gcm_crypt(req, FLAGS_ENCRYPT | FLAGS_GCM |
 
 int omap_aes_4106gcm_decrypt(struct aead_request *req)
 {
-       struct omap_aes_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
+       struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
        struct omap_aes_reqctx *rctx = aead_request_ctx(req);
 
-       memcpy(rctx->iv, ctx->nonce, 4);
+       memcpy(rctx->iv, ctx->octx.nonce, 4);
        memcpy(rctx->iv + 4, req->iv, 8);
        return crypto_ipsec_check_assoclen(req->assoclen) ?:
               omap_aes_gcm_crypt(req, FLAGS_GCM | FLAGS_RFC4106_GCM);
 int omap_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
                        unsigned int keylen)
 {
-       struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
+       struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
+       int ret;
 
-       if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
-           keylen != AES_KEYSIZE_256)
-               return -EINVAL;
+       ret = aes_expandkey(&ctx->actx, key, keylen);
+       if (ret)
+               return ret;
 
-       memcpy(ctx->key, key, keylen);
-       ctx->keylen = keylen;
+       memcpy(ctx->octx.key, key, keylen);
+       ctx->octx.keylen = keylen;
 
        return 0;
 }
 int omap_aes_4106gcm_setkey(struct crypto_aead *tfm, const u8 *key,
                            unsigned int keylen)
 {
-       struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
+       struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
+       int ret;
 
        if (keylen < 4)
                return -EINVAL;
-
        keylen -= 4;
-       if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
-           keylen != AES_KEYSIZE_256)
-               return -EINVAL;
 
-       memcpy(ctx->key, key, keylen);
-       memcpy(ctx->nonce, key + keylen, 4);
-       ctx->keylen = keylen;
+       ret = aes_expandkey(&ctx->actx, key, keylen);
+       if (ret)
+               return ret;
+
+       memcpy(ctx->octx.key, key, keylen);
+       memcpy(ctx->octx.nonce, key + keylen, 4);
+       ctx->octx.keylen = keylen;
 
        return 0;
 }
 
 static int omap_aes_gcm_cra_init(struct crypto_aead *tfm)
 {
        struct omap_aes_dev *dd = NULL;
-       struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
        int err;
 
        /* Find AES device, currently picks the first device */
        }
 
        tfm->reqsize = sizeof(struct omap_aes_reqctx);
-       ctx->ctr = crypto_alloc_skcipher("ecb(aes)", 0, 0);
-       if (IS_ERR(ctx->ctr)) {
-               pr_warn("could not load aes driver for encrypting IV\n");
-               return PTR_ERR(ctx->ctr);
-       }
-
        return 0;
 }
 
        ctx->fallback = NULL;
 }
 
-static void omap_aes_gcm_cra_exit(struct crypto_aead *tfm)
-{
-       struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
-
-       if (ctx->fallback)
-               crypto_free_sync_skcipher(ctx->fallback);
-
-       ctx->fallback = NULL;
-
-       if (ctx->ctr)
-               crypto_free_skcipher(ctx->ctr);
-}
-
 /* ********************** ALGS ************************************ */
 
 static struct skcipher_alg algs_ecb_cbc[] = {
                .cra_flags              = CRYPTO_ALG_ASYNC |
                                          CRYPTO_ALG_KERN_DRIVER_ONLY,
                .cra_blocksize          = 1,
-               .cra_ctxsize            = sizeof(struct omap_aes_ctx),
+               .cra_ctxsize            = sizeof(struct omap_aes_gcm_ctx),
                .cra_alignmask          = 0xf,
                .cra_module             = THIS_MODULE,
        },
        .init           = omap_aes_gcm_cra_init,
-       .exit           = omap_aes_gcm_cra_exit,
        .ivsize         = GCM_AES_IV_SIZE,
        .maxauthsize    = AES_BLOCK_SIZE,
        .setkey         = omap_aes_gcm_setkey,
                .cra_flags              = CRYPTO_ALG_ASYNC |
                                          CRYPTO_ALG_KERN_DRIVER_ONLY,
                .cra_blocksize          = 1,
-               .cra_ctxsize            = sizeof(struct omap_aes_ctx),
+               .cra_ctxsize            = sizeof(struct omap_aes_gcm_ctx),
                .cra_alignmask          = 0xf,
                .cra_module             = THIS_MODULE,
        },
        .init           = omap_aes_gcm_cra_init,
-       .exit           = omap_aes_gcm_cra_exit,
        .maxauthsize    = AES_BLOCK_SIZE,
        .ivsize         = GCM_RFC4106_IV_SIZE,
        .setkey         = omap_aes_4106gcm_setkey,