else
                desc_cache = hsotg->desc_gen_cache;
 
-       qh->desc_list_sz = sizeof(struct dwc2_hcd_dma_desc) *
+       qh->desc_list_sz = sizeof(struct dwc2_dma_desc) *
                                                dwc2_max_desc_num(qh);
 
        qh->desc_list = kmem_cache_zalloc(desc_cache, flags | GFP_DMA);
        qh->ntd = 0;
 
        if (qh->desc_list)
-               memset(qh->desc_list, 0, sizeof(struct dwc2_hcd_dma_desc) *
+               memset(qh->desc_list, 0, sizeof(struct dwc2_dma_desc) *
                       dwc2_max_desc_num(qh));
 }
 
                                         struct dwc2_qh *qh, u32 max_xfer_size,
                                         u16 idx)
 {
-       struct dwc2_hcd_dma_desc *dma_desc = &qh->desc_list[idx];
+       struct dwc2_dma_desc *dma_desc = &qh->desc_list[idx];
        struct dwc2_hcd_iso_packet_desc *frame_desc;
 
        memset(dma_desc, 0, sizeof(*dma_desc));
 
        dma_sync_single_for_device(hsotg->dev,
                        qh->desc_list_dma +
-                       (idx * sizeof(struct dwc2_hcd_dma_desc)),
-                       sizeof(struct dwc2_hcd_dma_desc),
+                       (idx * sizeof(struct dwc2_dma_desc)),
+                       sizeof(struct dwc2_dma_desc),
                        DMA_TO_DEVICE);
 }
 
                qh->desc_list[idx].status |= HOST_DMA_IOC;
                dma_sync_single_for_device(hsotg->dev,
                                           qh->desc_list_dma + (idx *
-                                          sizeof(struct dwc2_hcd_dma_desc)),
-                                          sizeof(struct dwc2_hcd_dma_desc),
+                                          sizeof(struct dwc2_dma_desc)),
+                                          sizeof(struct dwc2_dma_desc),
                                           DMA_TO_DEVICE);
        }
 #else
        qh->desc_list[idx].status |= HOST_DMA_IOC;
        dma_sync_single_for_device(hsotg->dev,
                                   qh->desc_list_dma +
-                                  (idx * sizeof(struct dwc2_hcd_dma_desc)),
-                                  sizeof(struct dwc2_hcd_dma_desc),
+                                  (idx * sizeof(struct dwc2_dma_desc)),
+                                  sizeof(struct dwc2_dma_desc),
                                   DMA_TO_DEVICE);
 #endif
 }
                                    struct dwc2_qtd *qtd, struct dwc2_qh *qh,
                                    int n_desc)
 {
-       struct dwc2_hcd_dma_desc *dma_desc = &qh->desc_list[n_desc];
+       struct dwc2_dma_desc *dma_desc = &qh->desc_list[n_desc];
        int len = chan->xfer_len;
 
        if (len > MAX_DMA_DESC_SIZE - (chan->max_packet - 1))
 
        dma_sync_single_for_device(hsotg->dev,
                                   qh->desc_list_dma +
-                                  (n_desc * sizeof(struct dwc2_hcd_dma_desc)),
-                                  sizeof(struct dwc2_hcd_dma_desc),
+                                  (n_desc * sizeof(struct dwc2_dma_desc)),
+                                  sizeof(struct dwc2_dma_desc),
                                   DMA_TO_DEVICE);
 
        /*
                                dma_sync_single_for_device(hsotg->dev,
                                        qh->desc_list_dma +
                                        ((n_desc - 1) *
-                                       sizeof(struct dwc2_hcd_dma_desc)),
-                                       sizeof(struct dwc2_hcd_dma_desc),
+                                       sizeof(struct dwc2_dma_desc)),
+                                       sizeof(struct dwc2_dma_desc),
                                        DMA_TO_DEVICE);
                        }
                        dwc2_fill_host_dma_desc(hsotg, chan, qtd, qh, n_desc);
                         n_desc - 1, &qh->desc_list[n_desc - 1]);
                dma_sync_single_for_device(hsotg->dev,
                                           qh->desc_list_dma + (n_desc - 1) *
-                                          sizeof(struct dwc2_hcd_dma_desc),
-                                          sizeof(struct dwc2_hcd_dma_desc),
+                                          sizeof(struct dwc2_dma_desc),
+                                          sizeof(struct dwc2_dma_desc),
                                           DMA_TO_DEVICE);
                if (n_desc > 1) {
                        qh->desc_list[0].status |= HOST_DMA_A;
                                 &qh->desc_list[0]);
                        dma_sync_single_for_device(hsotg->dev,
                                        qh->desc_list_dma,
-                                       sizeof(struct dwc2_hcd_dma_desc),
+                                       sizeof(struct dwc2_dma_desc),
                                        DMA_TO_DEVICE);
                }
                chan->ntd = n_desc;
                                        struct dwc2_qtd *qtd,
                                        struct dwc2_qh *qh, u16 idx)
 {
-       struct dwc2_hcd_dma_desc *dma_desc;
+       struct dwc2_dma_desc *dma_desc;
        struct dwc2_hcd_iso_packet_desc *frame_desc;
        u16 remain = 0;
        int rc = 0;
                return -EINVAL;
 
        dma_sync_single_for_cpu(hsotg->dev, qh->desc_list_dma + (idx *
-                               sizeof(struct dwc2_hcd_dma_desc)),
-                               sizeof(struct dwc2_hcd_dma_desc),
+                               sizeof(struct dwc2_dma_desc)),
+                               sizeof(struct dwc2_dma_desc),
                                DMA_FROM_DEVICE);
 
        dma_desc = &qh->desc_list[idx];
 static int dwc2_update_non_isoc_urb_state_ddma(struct dwc2_hsotg *hsotg,
                                        struct dwc2_host_chan *chan,
                                        struct dwc2_qtd *qtd,
-                                       struct dwc2_hcd_dma_desc *dma_desc,
+                                       struct dwc2_dma_desc *dma_desc,
                                        enum dwc2_halt_status halt_status,
                                        u32 n_bytes, int *xfer_done)
 {
 {
        struct dwc2_qh *qh = chan->qh;
        struct dwc2_hcd_urb *urb = qtd->urb;
-       struct dwc2_hcd_dma_desc *dma_desc;
+       struct dwc2_dma_desc *dma_desc;
        u32 n_bytes;
        int failed;
 
 
        dma_sync_single_for_cpu(hsotg->dev,
                                qh->desc_list_dma + (desc_num *
-                               sizeof(struct dwc2_hcd_dma_desc)),
-                               sizeof(struct dwc2_hcd_dma_desc),
+                               sizeof(struct dwc2_dma_desc)),
+                               sizeof(struct dwc2_dma_desc),
                                DMA_FROM_DEVICE);
 
        dma_desc = &qh->desc_list[desc_num];