};
 
 static void s5p_set_aes(struct s5p_aes_dev *dev,
-                       const u8 *key, const u8 *iv,
+                       const u8 *key, const u8 *iv, const u8 *ctr,
                        unsigned int keylen)
 {
        void __iomem *keystart;
        if (iv)
                memcpy_toio(dev->aes_ioaddr + SSS_REG_AES_IV_DATA(0), iv, 0x10);
 
+       if (ctr)
+               memcpy_toio(dev->aes_ioaddr + SSS_REG_AES_CNT_DATA(0), ctr, 0x10);
+
        if (keylen == AES_KEYSIZE_256)
                keystart = dev->aes_ioaddr + SSS_REG_AES_KEY_DATA(0);
        else if (keylen == AES_KEYSIZE_192)
        u32 aes_control;
        unsigned long flags;
        int err;
-       u8 *iv;
+       u8 *iv, *ctr;
 
+       /* This sets bit [13:12] to 00, which selects 128-bit counter */
        aes_control = SSS_AES_KEY_CHANGE_MODE;
        if (mode & FLAGS_AES_DECRYPT)
                aes_control |= SSS_AES_MODE_DECRYPT;
        if ((mode & FLAGS_AES_MODE_MASK) == FLAGS_AES_CBC) {
                aes_control |= SSS_AES_CHAIN_MODE_CBC;
                iv = req->info;
+               ctr = NULL;
        } else if ((mode & FLAGS_AES_MODE_MASK) == FLAGS_AES_CTR) {
                aes_control |= SSS_AES_CHAIN_MODE_CTR;
-               iv = req->info;
+               iv = NULL;
+               ctr = req->info;
        } else {
                iv = NULL; /* AES_ECB */
+               ctr = NULL;
        }
 
        if (dev->ctx->keylen == AES_KEYSIZE_192)
                goto outdata_error;
 
        SSS_AES_WRITE(dev, AES_CONTROL, aes_control);
-       s5p_set_aes(dev, dev->ctx->aes_key, iv, dev->ctx->keylen);
+       s5p_set_aes(dev, dev->ctx->aes_key, iv, ctr, dev->ctx->keylen);
 
        s5p_set_dma_indata(dev,  dev->sg_src);
        s5p_set_dma_outdata(dev, dev->sg_dst);
        struct s5p_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
        struct s5p_aes_dev *dev = ctx->dev;
 
-       if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) {
+       if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE) &&
+                       ((mode & FLAGS_AES_MODE_MASK) != FLAGS_AES_CTR)) {
                dev_err(dev->dev, "request size is not exact amount of AES blocks\n");
                return -EINVAL;
        }
        return s5p_aes_crypt(req, FLAGS_AES_DECRYPT | FLAGS_AES_CBC);
 }
 
+static int s5p_aes_ctr_crypt(struct ablkcipher_request *req)
+{
+       return s5p_aes_crypt(req, FLAGS_AES_CTR);
+}
+
 static int s5p_aes_cra_init(struct crypto_tfm *tfm)
 {
        struct s5p_aes_ctx *ctx = crypto_tfm_ctx(tfm);
                        .decrypt        = s5p_aes_cbc_decrypt,
                }
        },
+       {
+               .cra_name               = "ctr(aes)",
+               .cra_driver_name        = "ctr-aes-s5p",
+               .cra_priority           = 100,
+               .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
+                                         CRYPTO_ALG_ASYNC |
+                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
+               .cra_blocksize          = AES_BLOCK_SIZE,
+               .cra_ctxsize            = sizeof(struct s5p_aes_ctx),
+               .cra_alignmask          = 0x0f,
+               .cra_type               = &crypto_ablkcipher_type,
+               .cra_module             = THIS_MODULE,
+               .cra_init               = s5p_aes_cra_init,
+               .cra_u.ablkcipher = {
+                       .min_keysize    = AES_MIN_KEY_SIZE,
+                       .max_keysize    = AES_MAX_KEY_SIZE,
+                       .ivsize         = AES_BLOCK_SIZE,
+                       .setkey         = s5p_aes_setkey,
+                       .encrypt        = s5p_aes_ctr_crypt,
+                       .decrypt        = s5p_aes_ctr_crypt,
+               }
+       },
 };
 
 static int s5p_aes_probe(struct platform_device *pdev)