* The fallback cipher. If the operation can't be done in hardware,
         * fallback to a software version.
         */
-       struct crypto_skcipher          *sw_cipher;
+       struct crypto_sync_skcipher     *sw_cipher;
 };
 
 /* AEAD cipher context. */
                 * Set the fallback transform to use the same request flags as
                 * the hardware transform.
                 */
-               crypto_skcipher_clear_flags(ctx->sw_cipher,
+               crypto_sync_skcipher_clear_flags(ctx->sw_cipher,
                                            CRYPTO_TFM_REQ_MASK);
-               crypto_skcipher_set_flags(ctx->sw_cipher,
+               crypto_sync_skcipher_set_flags(ctx->sw_cipher,
                                          cipher->base.crt_flags &
                                          CRYPTO_TFM_REQ_MASK);
 
-               err = crypto_skcipher_setkey(ctx->sw_cipher, key, len);
+               err = crypto_sync_skcipher_setkey(ctx->sw_cipher, key, len);
 
                tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
                tfm->crt_flags |=
-                       crypto_skcipher_get_flags(ctx->sw_cipher) &
+                       crypto_sync_skcipher_get_flags(ctx->sw_cipher) &
                        CRYPTO_TFM_RES_MASK;
 
                if (err)
        struct crypto_tfm *old_tfm =
            crypto_ablkcipher_tfm(crypto_ablkcipher_reqtfm(req));
        struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(old_tfm);
-       SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher);
+       SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher);
        int err;
 
        /*
         * the ciphering has completed, put the old transform back into the
         * request.
         */
-       skcipher_request_set_tfm(subreq, ctx->sw_cipher);
+       skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher);
        skcipher_request_set_callback(subreq, req->base.flags, NULL, NULL);
        skcipher_request_set_crypt(subreq, req->src, req->dst,
                                   req->nbytes, req->info);
        ctx->generic.flags = spacc_alg->type;
        ctx->generic.engine = engine;
        if (alg->cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
-               ctx->sw_cipher = crypto_alloc_skcipher(
-                       alg->cra_name, 0, CRYPTO_ALG_ASYNC |
-                                         CRYPTO_ALG_NEED_FALLBACK);
+               ctx->sw_cipher = crypto_alloc_sync_skcipher(
+                       alg->cra_name, 0, CRYPTO_ALG_NEED_FALLBACK);
                if (IS_ERR(ctx->sw_cipher)) {
                        dev_warn(engine->dev, "failed to allocate fallback for %s\n",
                                 alg->cra_name);
 {
        struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(tfm);
 
-       crypto_free_skcipher(ctx->sw_cipher);
+       crypto_free_sync_skcipher(ctx->sw_cipher);
 }
 
 static int spacc_ablk_encrypt(struct ablkcipher_request *req)