* Returns 0 on success.
  */
 int chsc_sadc(struct subchannel_id schid, struct chsc_scssc_area *scssc,
-             u64 summary_indicator_addr, u64 subchannel_indicator_addr, u8 isc)
+             dma64_t summary_indicator_addr, dma64_t subchannel_indicator_addr, u8 isc)
 {
        memset(scssc, 0, sizeof(*scssc));
        scssc->request.length = 0x0fe0;
                u32 key : 4;
                u32 : 28;
                u32 zeroes1;
-               u32 cub_addr1;
+               dma32_t cub_addr1;
                u32 zeroes2;
-               u32 cub_addr2;
+               dma32_t cub_addr2;
                u32 reserved[13];
                struct chsc_header response;
                u32 status : 8;
        secm_area->request.code = 0x0016;
 
        secm_area->key = PAGE_DEFAULT_KEY >> 4;
-       secm_area->cub_addr1 = virt_to_phys(css->cub_addr1);
-       secm_area->cub_addr2 = virt_to_phys(css->cub_addr2);
+       secm_area->cub_addr1 = virt_to_dma32(css->cub_addr1);
+       secm_area->cub_addr2 = virt_to_dma32(css->cub_addr2);
 
        secm_area->operation_code = enable ? 0 : 1;
 
 
 int chsc_get_channel_measurement_chars(struct channel_path *chp);
 int chsc_ssqd(struct subchannel_id schid, struct chsc_ssqd_area *ssqd);
 int chsc_sadc(struct subchannel_id schid, struct chsc_scssc_area *scssc,
-             u64 summary_indicator_addr, u64 subchannel_indicator_addr,
+             dma64_t summary_indicator_addr, dma64_t subchannel_indicator_addr,
              u8 isc);
 int chsc_sgib(u32 origin);
 int chsc_error_from_response(int response);
 
        orb->cmd.i2k = 0;
        orb->cmd.key = key >> 4;
        /* issue "Start Subchannel" */
-       orb->cmd.cpa = (u32)virt_to_phys(cpa);
+       orb->cmd.cpa = virt_to_dma32(cpa);
        ccode = ssch(sch->schid, orb);
 
        /* process condition code */
        orb->tm.key = key >> 4;
        orb->tm.b = 1;
        orb->tm.lpm = lpm ? lpm : sch->lpm;
-       orb->tm.tcw = (u32)virt_to_phys(tcw);
+       orb->tm.tcw = virt_to_dma32(tcw);
        cc = ssch(sch->schid, orb);
        switch (cc) {
        case 0:
 
                printk(KERN_WARNING "cio: orb indicates transport mode\n");
                printk(KERN_WARNING "cio: last tcw:\n");
                print_hex_dump(KERN_WARNING, "cio:  ", DUMP_PREFIX_NONE, 16, 1,
-                              phys_to_virt(orb->tm.tcw),
+                              dma32_to_virt(orb->tm.tcw),
                               sizeof(struct tcw), 0);
        } else {
                printk(KERN_WARNING "cio: orb indicates command mode\n");
-               if (phys_to_virt(orb->cmd.cpa) ==
+               if (dma32_to_virt(orb->cmd.cpa) ==
                    &private->dma_area->sense_ccw ||
-                   phys_to_virt(orb->cmd.cpa) ==
+                   dma32_to_virt(orb->cmd.cpa) ==
                    cdev->private->dma_area->iccws)
                        printk(KERN_WARNING "cio: last channel program "
                               "(intern):\n");
                        printk(KERN_WARNING "cio: last channel program:\n");
 
                print_hex_dump(KERN_WARNING, "cio:  ", DUMP_PREFIX_NONE, 16, 1,
-                              phys_to_virt(orb->cmd.cpa),
+                              dma32_to_virt(orb->cmd.cpa),
                               sizeof(struct ccw1), 0);
        }
        printk(KERN_WARNING "cio: ccw device state: %d\n",
 
        snsid_init(cdev);
        /* Channel program setup. */
        cp->cmd_code    = CCW_CMD_SENSE_ID;
-       cp->cda         = (u32)virt_to_phys(&cdev->private->dma_area->senseid);
+       cp->cda         = virt_to_dma32(&cdev->private->dma_area->senseid);
        cp->count       = sizeof(struct senseid);
        cp->flags       = CCW_FLAG_SLI;
        /* Request setup. */
 
 
        pgid->inf.fc    = fn;
        cp->cmd_code    = CCW_CMD_SET_PGID;
-       cp->cda         = (u32)virt_to_phys(pgid);
+       cp->cda         = virt_to_dma32(pgid);
        cp->count       = sizeof(*pgid);
        cp->flags       = CCW_FLAG_SLI;
        req->cp         = cp;
 
        /* Channel program setup. */
        cp->cmd_code    = CCW_CMD_SENSE_PGID;
-       cp->cda         = (u32)virt_to_phys(&cdev->private->dma_area->pgid[i]);
+       cp->cda         = virt_to_dma32(&cdev->private->dma_area->pgid[i]);
        cp->count       = sizeof(struct pgid);
        cp->flags       = CCW_FLAG_SLI;
        req->cp         = cp;
        struct ccw1 *cp = cdev->private->dma_area->iccws;
 
        cp[0].cmd_code = CCW_CMD_STLCK;
-       cp[0].cda = (u32)virt_to_phys(buf1);
+       cp[0].cda = virt_to_dma32(buf1);
        cp[0].count = 32;
        cp[0].flags = CCW_FLAG_CC;
        cp[1].cmd_code = CCW_CMD_RELEASE;
-       cp[1].cda = (u32)virt_to_phys(buf2);
+       cp[1].cda = virt_to_dma32(buf2);
        cp[1].count = 32;
        cp[1].flags = 0;
        req->cp = cp;
 
         */
        sense_ccw = &to_io_private(sch)->dma_area->sense_ccw;
        sense_ccw->cmd_code = CCW_CMD_BASIC_SENSE;
-       sense_ccw->cda = virt_to_phys(cdev->private->dma_area->irb.ecw);
+       sense_ccw->cda = virt_to_dma32(cdev->private->dma_area->irb.ecw);
        sense_ccw->count = SENSE_MAX_COUNT;
        sense_ccw->flags = CCW_FLAG_SLI;
 
 
        int cc;
 
        orb_init(orb);
-       orb->eadm.aob = (u32)virt_to_phys(aob);
+       orb->eadm.aob = virt_to_dma32(aob);
        orb->eadm.intparm = (u32)virt_to_phys(sch);
        orb->eadm.key = PAGE_DEFAULT_KEY >> 4;
 
                css_sched_sch_todo(sch, SCH_TODO_EVAL);
                return;
        }
-       scm_irq_handler(phys_to_virt(scsw->aob), error);
+       scm_irq_handler(dma32_to_virt(scsw->aob), error);
        private->state = EADM_IDLE;
 
        if (private->completion)
 
  */
 struct tcw *tcw_get_intrg(struct tcw *tcw)
 {
-       return phys_to_virt(tcw->intrg);
+       return dma32_to_virt(tcw->intrg);
 }
 EXPORT_SYMBOL(tcw_get_intrg);
 
 void *tcw_get_data(struct tcw *tcw)
 {
        if (tcw->r)
-               return phys_to_virt(tcw->input);
+               return dma64_to_virt(tcw->input);
        if (tcw->w)
-               return phys_to_virt(tcw->output);
+               return dma64_to_virt(tcw->output);
        return NULL;
 }
 EXPORT_SYMBOL(tcw_get_data);
  */
 struct tccb *tcw_get_tccb(struct tcw *tcw)
 {
-       return phys_to_virt(tcw->tccb);
+       return dma64_to_virt(tcw->tccb);
 }
 EXPORT_SYMBOL(tcw_get_tccb);
 
  */
 struct tsb *tcw_get_tsb(struct tcw *tcw)
 {
-       return phys_to_virt(tcw->tsb);
+       return dma64_to_virt(tcw->tsb);
 }
 EXPORT_SYMBOL(tcw_get_tsb);
 
  */
 void tcw_set_intrg(struct tcw *tcw, struct tcw *intrg_tcw)
 {
-       tcw->intrg = (u32)virt_to_phys(intrg_tcw);
+       tcw->intrg = virt_to_dma32(intrg_tcw);
 }
 EXPORT_SYMBOL(tcw_set_intrg);
 
 void tcw_set_data(struct tcw *tcw, void *data, int use_tidal)
 {
        if (tcw->r) {
-               tcw->input = virt_to_phys(data);
+               tcw->input = virt_to_dma64(data);
                if (use_tidal)
                        tcw->flags |= TCW_FLAGS_INPUT_TIDA;
        } else if (tcw->w) {
-               tcw->output = virt_to_phys(data);
+               tcw->output = virt_to_dma64(data);
                if (use_tidal)
                        tcw->flags |= TCW_FLAGS_OUTPUT_TIDA;
        }
  */
 void tcw_set_tccb(struct tcw *tcw, struct tccb *tccb)
 {
-       tcw->tccb = virt_to_phys(tccb);
+       tcw->tccb = virt_to_dma64(tccb);
 }
 EXPORT_SYMBOL(tcw_set_tccb);
 
  */
 void tcw_set_tsb(struct tcw *tcw, struct tsb *tsb)
 {
-       tcw->tsb = virt_to_phys(tsb);
+       tcw->tsb = virt_to_dma64(tsb);
 }
 EXPORT_SYMBOL(tcw_set_tsb);
 
        memset(tidaw, 0, sizeof(struct tidaw));
        tidaw->flags = flags;
        tidaw->count = count;
-       tidaw->addr = virt_to_phys(addr);
+       tidaw->addr = virt_to_dma64(addr);
        return tidaw;
 }
 EXPORT_SYMBOL(tcw_add_tidaw);
 
  */
 static inline int do_siga_output(unsigned long schid, unsigned long mask,
                                 unsigned int *bb, unsigned long fc,
-                                unsigned long aob)
+                                dma64_t aob)
 {
        int cc;
 
 }
 
 static int qdio_siga_output(struct qdio_q *q, unsigned int count,
-                           unsigned int *busy_bit, unsigned long aob)
+                           unsigned int *busy_bit, dma64_t aob)
 {
        unsigned long schid = *((u32 *) &q->irq_ptr->schid);
        unsigned int fc = QDIO_SIGA_WRITE;
 EXPORT_SYMBOL_GPL(qdio_inspect_output_queue);
 
 static int qdio_kick_outbound_q(struct qdio_q *q, unsigned int count,
-                               unsigned long aob)
+                               dma64_t aob)
 {
        int retries = 0, cc;
        unsigned int busy_bit;
        irq_ptr->ccw->cmd_code = ciw->cmd;
        irq_ptr->ccw->flags = CCW_FLAG_SLI;
        irq_ptr->ccw->count = ciw->count;
-       irq_ptr->ccw->cda = (u32) virt_to_phys(irq_ptr->qdr);
+       irq_ptr->ccw->cda = virt_to_dma32(irq_ptr->qdr);
 
        spin_lock_irq(get_ccwdev_lock(cdev));
        ccw_device_set_options_mask(cdev, 0);
                qperf_inc(q, outbound_queue_full);
 
        if (queue_type(q) == QDIO_IQDIO_QFMT) {
-               unsigned long phys_aob = aob ? virt_to_phys(aob) : 0;
+               dma64_t phys_aob = aob ? virt_to_dma64(aob) : 0;
 
-               WARN_ON_ONCE(!IS_ALIGNED(phys_aob, 256));
+               WARN_ON_ONCE(!IS_ALIGNED(dma64_to_u64(phys_aob), 256));
                rc = qdio_kick_outbound_q(q, count, phys_aob);
        } else if (qdio_need_siga_sync(q->irq_ptr)) {
                rc = qdio_sync_output_queue(q);
 
 
        /* fill in sl */
        for (j = 0; j < QDIO_MAX_BUFFERS_PER_Q; j++)
-               q->sl->element[j].sbal = virt_to_phys(q->sbal[j]);
+               q->sl->element[j].sbal = virt_to_dma64(q->sbal[j]);
 }
 
 static void setup_queues(struct qdio_irq *irq_ptr,
 
 static void qdio_fill_qdr_desc(struct qdesfmt0 *desc, struct qdio_q *queue)
 {
-       desc->sliba = virt_to_phys(queue->slib);
-       desc->sla = virt_to_phys(queue->sl);
-       desc->slsba = virt_to_phys(&queue->slsb);
+       desc->sliba = virt_to_dma64(queue->slib);
+       desc->sla = virt_to_dma64(queue->sl);
+       desc->slsba = virt_to_dma64(&queue->slsb);
 
        desc->akey = PAGE_DEFAULT_KEY >> 4;
        desc->bkey = PAGE_DEFAULT_KEY >> 4;
        irq_ptr->qdr->oqdcnt = qdio_init->no_output_qs;
        irq_ptr->qdr->iqdsz = sizeof(struct qdesfmt0) / 4; /* size in words */
        irq_ptr->qdr->oqdsz = sizeof(struct qdesfmt0) / 4;
-       irq_ptr->qdr->qiba = virt_to_phys(&irq_ptr->qib);
+       irq_ptr->qdr->qiba = virt_to_dma64(&irq_ptr->qib);
        irq_ptr->qdr->qkey = PAGE_DEFAULT_KEY >> 4;
 
        for (i = 0; i < qdio_init->no_input_qs; i++)
 
 static int set_subchannel_ind(struct qdio_irq *irq_ptr, int reset)
 {
        struct chsc_scssc_area *scssc = (void *)irq_ptr->chsc_page;
-       u64 summary_indicator_addr, subchannel_indicator_addr;
+       dma64_t summary_indicator_addr, subchannel_indicator_addr;
        int rc;
 
        if (reset) {
                summary_indicator_addr = 0;
                subchannel_indicator_addr = 0;
        } else {
-               summary_indicator_addr = virt_to_phys(tiqdio_airq.lsi_ptr);
-               subchannel_indicator_addr = virt_to_phys(irq_ptr->dsci);
+               summary_indicator_addr = virt_to_dma64(tiqdio_airq.lsi_ptr);
+               subchannel_indicator_addr = virt_to_dma64(irq_ptr->dsci);
        }
 
        rc = chsc_sadc(irq_ptr->schid, scssc, summary_indicator_addr,
 
                        for (i = 0;
                             i < aob->sb_count && i < queue->max_elements;
                             i++) {
-                               void *data = phys_to_virt(aob->sba[i]);
+                               void *data = dma64_to_virt(aob->sba[i]);
 
                                if (test_bit(i, buf->from_kmem_cache) && data)
                                        kmem_cache_free(qeth_core_header_cache,