00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033 #ifndef DWC_DEVICE_ONLY
00034
00039 #include "dwc_otg_hcd.h"
00040 #include "dwc_otg_regs.h"
00041
00042 static inline uint8_t frame_list_idx(uint16_t frame)
00043 {
00044 return (frame & (MAX_FRLIST_EN_NUM - 1));
00045 }
00046
00047 static inline uint16_t desclist_idx_inc(uint16_t idx, uint16_t inc, uint8_t speed)
00048 {
00049 return (idx + inc) &
00050 (((speed ==
00051 DWC_OTG_EP_SPEED_HIGH) ? MAX_DMA_DESC_NUM_HS_ISOC :
00052 MAX_DMA_DESC_NUM_GENERIC) - 1);
00053 }
00054
00055 static inline uint16_t desclist_idx_dec(uint16_t idx, uint16_t inc, uint8_t speed)
00056 {
00057 return (idx - inc) &
00058 (((speed ==
00059 DWC_OTG_EP_SPEED_HIGH) ? MAX_DMA_DESC_NUM_HS_ISOC :
00060 MAX_DMA_DESC_NUM_GENERIC) - 1);
00061 }
00062
00063 static inline uint16_t max_desc_num(dwc_otg_qh_t * qh)
00064 {
00065 return (((qh->ep_type == UE_ISOCHRONOUS)
00066 && (qh->dev_speed == DWC_OTG_EP_SPEED_HIGH))
00067 ? MAX_DMA_DESC_NUM_HS_ISOC : MAX_DMA_DESC_NUM_GENERIC);
00068 }
00069 static inline uint16_t frame_incr_val(dwc_otg_qh_t * qh)
00070 {
00071 return ((qh->dev_speed == DWC_OTG_EP_SPEED_HIGH)
00072 ? ((qh->interval + 8 - 1) / 8)
00073 : qh->interval);
00074 }
00075
00076 static int desc_list_alloc(dwc_otg_qh_t * qh)
00077 {
00078 int retval = 0;
00079
00080 qh->desc_list = (dwc_otg_host_dma_desc_t *)
00081 DWC_DMA_ALLOC(sizeof(dwc_otg_host_dma_desc_t) * max_desc_num(qh),
00082 &qh->desc_list_dma);
00083
00084 if (!qh->desc_list) {
00085 retval = -DWC_E_NO_MEMORY;
00086 DWC_ERROR("%s: DMA descriptor list allocation failed\n", __func__);
00087
00088 }
00089
00090 dwc_memset(qh->desc_list, 0x00,
00091 sizeof(dwc_otg_host_dma_desc_t) * max_desc_num(qh));
00092
00093 qh->n_bytes =
00094 (uint32_t *) DWC_ALLOC(sizeof(uint32_t) * max_desc_num(qh));
00095
00096 if (!qh->n_bytes) {
00097 retval = -DWC_E_NO_MEMORY;
00098 DWC_ERROR
00099 ("%s: Failed to allocate array for descriptors' size actual values\n",
00100 __func__);
00101
00102 }
00103 return retval;
00104
00105 }
00106
00107 static void desc_list_free(dwc_otg_qh_t * qh)
00108 {
00109 if (qh->desc_list) {
00110 DWC_DMA_FREE(max_desc_num(qh), qh->desc_list,
00111 qh->desc_list_dma);
00112 qh->desc_list = NULL;
00113 }
00114
00115 if (qh->n_bytes) {
00116 DWC_FREE(qh->n_bytes);
00117 qh->n_bytes = NULL;
00118 }
00119 }
00120
00121 static int frame_list_alloc(dwc_otg_hcd_t * hcd)
00122 {
00123 int retval = 0;
00124 if (hcd->frame_list)
00125 return 0;
00126
00127 hcd->frame_list = DWC_DMA_ALLOC(4 * MAX_FRLIST_EN_NUM,
00128 &hcd->frame_list_dma);
00129 if (!hcd->frame_list) {
00130 retval = -DWC_E_NO_MEMORY;
00131 DWC_ERROR("%s: Frame List allocation failed\n", __func__);
00132 }
00133
00134 dwc_memset(hcd->frame_list, 0x00, 4 * MAX_FRLIST_EN_NUM);
00135
00136 return retval;
00137 }
00138
00139 static void frame_list_free(dwc_otg_hcd_t * hcd)
00140 {
00141 if (!hcd->frame_list)
00142 return;
00143
00144 DWC_DMA_FREE(4 * MAX_FRLIST_EN_NUM, hcd->frame_list, hcd->frame_list_dma);
00145 hcd->frame_list = NULL;
00146 }
00147
00148 static void per_sched_enable(dwc_otg_hcd_t * hcd, uint16_t fr_list_en)
00149 {
00150
00151 hcfg_data_t hcfg;
00152
00153 hcfg.d32 = DWC_READ_REG32(&hcd->core_if->host_if->host_global_regs->hcfg);
00154
00155 if (hcfg.b.perschedena) {
00156
00157 return;
00158 }
00159
00160 DWC_WRITE_REG32(&hcd->core_if->host_if->host_global_regs->hflbaddr,
00161 hcd->frame_list_dma);
00162
00163 switch (fr_list_en) {
00164 case 64:
00165 hcfg.b.frlisten = 3;
00166 break;
00167 case 32:
00168 hcfg.b.frlisten = 2;
00169 break;
00170 case 16:
00171 hcfg.b.frlisten = 1;
00172 break;
00173 case 8:
00174 hcfg.b.frlisten = 0;
00175 break;
00176 default:
00177 break;
00178 }
00179
00180 hcfg.b.perschedena = 1;
00181
00182 DWC_DEBUGPL(DBG_HCD, "Enabling Periodic schedule\n");
00183 DWC_WRITE_REG32(&hcd->core_if->host_if->host_global_regs->hcfg, hcfg.d32);
00184
00185 }
00186
00187 static void per_sched_disable(dwc_otg_hcd_t * hcd)
00188 {
00189 hcfg_data_t hcfg;
00190
00191 hcfg.d32 = DWC_READ_REG32(&hcd->core_if->host_if->host_global_regs->hcfg);
00192
00193 if (!hcfg.b.perschedena) {
00194
00195 return;
00196 }
00197 hcfg.b.perschedena = 0;
00198
00199 DWC_DEBUGPL(DBG_HCD, "Disabling Periodic schedule\n");
00200 DWC_WRITE_REG32(&hcd->core_if->host_if->host_global_regs->hcfg, hcfg.d32);
00201 }
00202
00203
00204
00205
00206
00207 void update_frame_list(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh, uint8_t enable)
00208 {
00209 uint16_t i, j, inc;
00210 dwc_hc_t *hc = NULL;
00211
00212 if (!qh->channel) {
00213 DWC_ERROR("qh->channel = %p", qh->channel);
00214 return;
00215 }
00216
00217 if (!hcd) {
00218 DWC_ERROR("------hcd = %p", hcd);
00219 return;
00220 }
00221
00222 if (!hcd->frame_list) {
00223 DWC_ERROR("-------hcd->frame_list = %p", hcd->frame_list);
00224 return;
00225 }
00226
00227 hc = qh->channel;
00228 inc = frame_incr_val(qh);
00229 if (qh->ep_type == UE_ISOCHRONOUS)
00230 i = frame_list_idx(qh->sched_frame);
00231 else
00232 i = 0;
00233
00234 j = i;
00235 do {
00236 if (enable)
00237 hcd->frame_list[j] |= (1 << hc->hc_num);
00238 else
00239 hcd->frame_list[j] &= ~(1 << hc->hc_num);
00240 j = (j + inc) & (MAX_FRLIST_EN_NUM - 1);
00241 }
00242 while (j != i);
00243 if (!enable)
00244 return;
00245 hc->schinfo = 0;
00246 if (qh->channel->speed == DWC_OTG_EP_SPEED_HIGH) {
00247 j = 1;
00248
00249 inc = (8 + qh->interval - 1) / qh->interval;
00250 for (i = 0; i < inc; i++) {
00251 hc->schinfo |= j;
00252 j = j << qh->interval;
00253 }
00254 } else {
00255 hc->schinfo = 0xff;
00256 }
00257 }
00258
00259 #if 1
00260 void dump_frame_list(dwc_otg_hcd_t * hcd)
00261 {
00262 int i = 0;
00263 DWC_PRINTF("--FRAME LIST (hex) --\n");
00264 for (i = 0; i < MAX_FRLIST_EN_NUM; i++) {
00265 DWC_PRINTF("%x\t", hcd->frame_list[i]);
00266 if (!(i % 8) && i)
00267 DWC_PRINTF("\n");
00268 }
00269 DWC_PRINTF("\n----\n");
00270
00271 }
00272 #endif
00273
00274 static void release_channel_ddma(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00275 {
00276 dwc_hc_t *hc = qh->channel;
00277 if (dwc_qh_is_non_per(qh))
00278 hcd->non_periodic_channels--;
00279 else
00280 update_frame_list(hcd, qh, 0);
00281
00282
00283
00284
00285
00286 if (hc->qh) {
00287 dwc_otg_hc_cleanup(hcd->core_if, hc);
00288 DWC_CIRCLEQ_INSERT_TAIL(&hcd->free_hc_list, hc, hc_list_entry);
00289 hc->qh = NULL;
00290 }
00291
00292 qh->channel = NULL;
00293 qh->ntd = 0;
00294
00295 if (qh->desc_list) {
00296 dwc_memset(qh->desc_list, 0x00,
00297 sizeof(dwc_otg_host_dma_desc_t) * max_desc_num(qh));
00298 }
00299 }
00300
00312 int dwc_otg_hcd_qh_init_ddma(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00313 {
00314 int retval = 0;
00315
00316 if (qh->do_split) {
00317 DWC_ERROR("SPLIT Transfers are not supported in Descriptor DMA.\n");
00318 return -1;
00319 }
00320
00321 retval = desc_list_alloc(qh);
00322
00323 if ((retval == 0)
00324 && (qh->ep_type == UE_ISOCHRONOUS || qh->ep_type == UE_INTERRUPT)) {
00325 if (!hcd->frame_list) {
00326 retval = frame_list_alloc(hcd);
00327
00328 if (retval == 0)
00329 per_sched_enable(hcd, MAX_FRLIST_EN_NUM);
00330 }
00331 }
00332
00333 qh->ntd = 0;
00334
00335 return retval;
00336 }
00337
00346 void dwc_otg_hcd_qh_free_ddma(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00347 {
00348 desc_list_free(qh);
00349
00350
00351
00352
00353
00354
00355
00356
00357 if (qh->channel)
00358 release_channel_ddma(hcd, qh);
00359
00360 if ((qh->ep_type == UE_ISOCHRONOUS || qh->ep_type == UE_INTERRUPT)
00361 && !hcd->periodic_channels && hcd->frame_list) {
00362
00363 per_sched_disable(hcd);
00364 frame_list_free(hcd);
00365 }
00366 }
00367
00368 static uint8_t frame_to_desc_idx(dwc_otg_qh_t * qh, uint16_t frame_idx)
00369 {
00370 if (qh->dev_speed == DWC_OTG_EP_SPEED_HIGH) {
00371
00372
00373
00374
00375 return (frame_idx & ((MAX_DMA_DESC_NUM_HS_ISOC / 8) - 1)) * 8;
00376 } else {
00377 return (frame_idx & (MAX_DMA_DESC_NUM_GENERIC - 1));
00378 }
00379 }
00380
00381
00382
00383
00384
00385 static uint8_t calc_starting_frame(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh,
00386 uint8_t * skip_frames)
00387 {
00388 uint16_t frame = 0;
00389 hcd->frame_number = dwc_otg_hcd_get_frame_number(hcd);
00390
00391
00392
00393
00394
00395
00396
00397
00398
00399
00400
00401
00402
00403
00404
00405
00406 if (qh->dev_speed == DWC_OTG_EP_SPEED_HIGH) {
00407
00408
00409
00410
00411
00412
00413
00414
00415
00416 if (dwc_micro_frame_num(hcd->frame_number) >= 5) {
00417 *skip_frames = 2 * 8;
00418 frame = dwc_frame_num_inc(hcd->frame_number, *skip_frames);
00419 } else {
00420 *skip_frames = 1 * 8;
00421 frame = dwc_frame_num_inc(hcd->frame_number, *skip_frames);
00422 }
00423
00424 frame = dwc_full_frame_num(frame);
00425 } else {
00426
00427
00428
00429
00430
00431 *skip_frames = 1;
00432 frame = dwc_frame_num_inc(hcd->frame_number, 2);
00433 }
00434
00435 return frame;
00436 }
00437
00438
00439
00440
00441
00442 static uint8_t recalc_initial_desc_idx(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00443 {
00444 uint16_t frame = 0, fr_idx, fr_idx_tmp;
00445 uint8_t skip_frames = 0;
00446
00447
00448
00449
00450
00451
00452
00453
00454
00455
00456
00457
00458
00459
00460
00461 if (qh->channel) {
00462 frame = calc_starting_frame(hcd, qh, &skip_frames);
00463
00464
00465
00466
00467 fr_idx_tmp = frame_list_idx(frame);
00468 fr_idx =
00469 (MAX_FRLIST_EN_NUM + frame_list_idx(qh->sched_frame) -
00470 fr_idx_tmp)
00471 % frame_incr_val(qh);
00472 fr_idx = (fr_idx + fr_idx_tmp) % MAX_FRLIST_EN_NUM;
00473 } else {
00474 qh->sched_frame = calc_starting_frame(hcd, qh, &skip_frames);
00475 fr_idx = frame_list_idx(qh->sched_frame);
00476 }
00477
00478 qh->td_first = qh->td_last = frame_to_desc_idx(qh, fr_idx);
00479
00480 return skip_frames;
00481 }
00482
00483 #define ISOC_URB_GIVEBACK_ASAP
00484
00485 #define MAX_ISOC_XFER_SIZE_FS 1023
00486 #define MAX_ISOC_XFER_SIZE_HS 3072
00487 #define DESCNUM_THRESHOLD 4
00488
00489 static void init_isoc_dma_desc(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh,
00490 uint8_t skip_frames)
00491 {
00492 struct dwc_otg_hcd_iso_packet_desc *frame_desc;
00493 dwc_otg_qtd_t *qtd;
00494 dwc_otg_host_dma_desc_t *dma_desc;
00495 uint16_t idx, inc, n_desc, ntd_max, max_xfer_size;
00496
00497 idx = qh->td_last;
00498 inc = qh->interval;
00499 n_desc = 0;
00500
00501 ntd_max = (max_desc_num(qh) + qh->interval - 1) / qh->interval;
00502 if (skip_frames && !qh->channel)
00503 ntd_max = ntd_max - skip_frames / qh->interval;
00504
00505 max_xfer_size =
00506 (qh->dev_speed ==
00507 DWC_OTG_EP_SPEED_HIGH) ? MAX_ISOC_XFER_SIZE_HS :
00508 MAX_ISOC_XFER_SIZE_FS;
00509
00510 DWC_CIRCLEQ_FOREACH(qtd, &qh->qtd_list, qtd_list_entry) {
00511 while ((qh->ntd < ntd_max)
00512 && (qtd->isoc_frame_index_last <
00513 qtd->urb->packet_count)) {
00514
00515 dma_desc = &qh->desc_list[idx];
00516 dwc_memset(dma_desc, 0x00, sizeof(dwc_otg_host_dma_desc_t));
00517
00518 frame_desc = &qtd->urb->iso_descs[qtd->isoc_frame_index_last];
00519
00520 if (frame_desc->length > max_xfer_size)
00521 qh->n_bytes[idx] = max_xfer_size;
00522 else
00523 qh->n_bytes[idx] = frame_desc->length;
00524 dma_desc->status.b_isoc.n_bytes = qh->n_bytes[idx];
00525 dma_desc->status.b_isoc.a = 1;
00526 dma_desc->status.b_isoc.sts = 0;
00527
00528 dma_desc->buf = qtd->urb->dma + frame_desc->offset;
00529
00530 qh->ntd++;
00531
00532 qtd->isoc_frame_index_last++;
00533
00534 #ifdef ISOC_URB_GIVEBACK_ASAP
00535
00536
00537
00538
00539 if (qtd->isoc_frame_index_last ==
00540 qtd->urb->packet_count)
00541 dma_desc->status.b_isoc.ioc = 1;
00542
00543 #endif
00544 idx = desclist_idx_inc(idx, inc, qh->dev_speed);
00545 n_desc++;
00546
00547 }
00548 qtd->in_process = 1;
00549 }
00550
00551 qh->td_last = idx;
00552
00553 #ifdef ISOC_URB_GIVEBACK_ASAP
00554
00555 if (qh->ntd == ntd_max) {
00556 idx = desclist_idx_dec(qh->td_last, inc, qh->dev_speed);
00557 qh->desc_list[idx].status.b_isoc.ioc = 1;
00558 }
00559 #else
00560
00561
00562
00563
00564
00565
00566
00567 if (n_desc > DESCNUM_THRESHOLD) {
00568
00569
00570
00571
00572
00573
00574
00575
00576 idx = dwc_desclist_idx_dec(idx, inc * ((qh->ntd + 1) / 2), qh->dev_speed);
00577 } else {
00578
00579
00580
00581
00582
00583 idx = dwc_desclist_idx_dec(qh->td_last, inc, qh->dev_speed);
00584 }
00585
00586 qh->desc_list[idx].status.b_isoc.ioc = 1;
00587 #endif
00588 }
00589
00590 static void init_non_isoc_dma_desc(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00591 {
00592
00593 dwc_hc_t *hc;
00594 dwc_otg_host_dma_desc_t *dma_desc;
00595 dwc_otg_qtd_t *qtd;
00596 int num_packets, len, n_desc = 0;
00597
00598 hc = qh->channel;
00599
00600
00601
00602
00603
00604
00605
00606
00607 DWC_CIRCLEQ_FOREACH(qtd, &qh->qtd_list, qtd_list_entry) {
00608
00609 if (n_desc) {
00610
00611 hc->xfer_buff = (uint8_t *)qtd->urb->dma + qtd->urb->actual_length;
00612 hc->xfer_len = qtd->urb->length - qtd->urb->actual_length;
00613 }
00614
00615 qtd->n_desc = 0;
00616
00617 do {
00618 dma_desc = &qh->desc_list[n_desc];
00619 len = hc->xfer_len;
00620
00621 if (len > MAX_DMA_DESC_SIZE)
00622 len = MAX_DMA_DESC_SIZE - hc->max_packet + 1;
00623
00624 if (hc->ep_is_in) {
00625 if (len > 0) {
00626 num_packets = (len + hc->max_packet - 1) / hc->max_packet;
00627 } else {
00628
00629 num_packets = 1;
00630 }
00631
00632 len = num_packets * hc->max_packet;
00633 }
00634
00635 dma_desc->status.b.n_bytes = len;
00636
00637 qh->n_bytes[n_desc] = len;
00638
00639 if ((qh->ep_type == UE_CONTROL)
00640 && (qtd->control_phase == DWC_OTG_CONTROL_SETUP))
00641 dma_desc->status.b.sup = 1;
00642
00643 dma_desc->status.b.a = 1;
00644 dma_desc->status.b.sts = 0;
00645
00646 dma_desc->buf =
00647 ((unsigned long)hc->xfer_buff & 0xffffffff);
00648
00649
00650
00651
00652
00653 if (len > hc->xfer_len) {
00654 hc->xfer_len = 0;
00655 } else {
00656 hc->xfer_buff += len;
00657 hc->xfer_len -= len;
00658 }
00659
00660 qtd->n_desc++;
00661 n_desc++;
00662 }
00663 while ((hc->xfer_len > 0) && (n_desc != MAX_DMA_DESC_NUM_GENERIC));
00664
00665
00666 qtd->in_process = 1;
00667
00668 if (qh->ep_type == UE_CONTROL)
00669 break;
00670
00671 if (n_desc == MAX_DMA_DESC_NUM_GENERIC)
00672 break;
00673 }
00674
00675 if (n_desc) {
00676
00677 qh->desc_list[n_desc - 1].status.b.ioc = 1;
00678
00679 qh->desc_list[n_desc - 1].status.b.eol = 1;
00680
00681 hc->ntd = n_desc;
00682 }
00683 }
00684
00702 void dwc_otg_hcd_start_xfer_ddma(dwc_otg_hcd_t * hcd, dwc_otg_qh_t * qh)
00703 {
00704
00705 dwc_hc_t *hc = qh->channel;
00706 uint8_t skip_frames = 0;
00707
00708 switch (hc->ep_type) {
00709 case DWC_OTG_EP_TYPE_CONTROL:
00710 case DWC_OTG_EP_TYPE_BULK:
00711 init_non_isoc_dma_desc(hcd, qh);
00712
00713 dwc_otg_hc_start_transfer_ddma(hcd->core_if, hc);
00714 break;
00715 case DWC_OTG_EP_TYPE_INTR:
00716 init_non_isoc_dma_desc(hcd, qh);
00717
00718 update_frame_list(hcd, qh, 1);
00719
00720 dwc_otg_hc_start_transfer_ddma(hcd->core_if, hc);
00721 break;
00722 case DWC_OTG_EP_TYPE_ISOC:
00723
00724 if (!qh->ntd)
00725 skip_frames = recalc_initial_desc_idx(hcd, qh);
00726
00727 init_isoc_dma_desc(hcd, qh, skip_frames);
00728
00729 if (!hc->xfer_started) {
00730
00731 update_frame_list(hcd, qh, 1);
00732
00733
00734
00735
00736
00737
00738
00739 hc->ntd = max_desc_num(qh);
00740
00741 dwc_otg_hc_start_transfer_ddma(hcd->core_if, hc);
00742 }
00743
00744 break;
00745 default:
00746
00747 break;
00748 }
00749 }
00750
00751 static void complete_isoc_xfer_ddma(dwc_otg_hcd_t * hcd,
00752 dwc_hc_t * hc,
00753 dwc_otg_hc_regs_t * hc_regs,
00754 dwc_otg_halt_status_e halt_status)
00755 {
00756 struct dwc_otg_hcd_iso_packet_desc *frame_desc;
00757 dwc_otg_qtd_t *qtd, *qtd_tmp;
00758 dwc_otg_qh_t *qh;
00759 dwc_otg_host_dma_desc_t *dma_desc;
00760 uint16_t idx, remain;
00761 uint8_t urb_compl;
00762
00763 qh = hc->qh;
00764 idx = qh->td_first;
00765
00766 if (hc->halt_status == DWC_OTG_HC_XFER_URB_DEQUEUE) {
00767 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &hc->qh->qtd_list, qtd_list_entry)
00768 qtd->in_process = 0;
00769 return;
00770 } else if ((halt_status == DWC_OTG_HC_XFER_AHB_ERR) ||
00771 (halt_status == DWC_OTG_HC_XFER_BABBLE_ERR)) {
00772
00773
00774
00775
00776
00777
00778
00779
00780
00781 int err = (halt_status == DWC_OTG_HC_XFER_AHB_ERR)
00782 ? (-DWC_E_IO)
00783 : (-DWC_E_OVERFLOW);
00784
00785 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &hc->qh->qtd_list, qtd_list_entry) {
00786 for (idx = 0; idx < qtd->urb->packet_count; idx++) {
00787 frame_desc = &qtd->urb->iso_descs[idx];
00788 frame_desc->status = err;
00789 }
00790 hcd->fops->complete(hcd, qtd->urb->priv, qtd->urb, err);
00791 dwc_otg_hcd_qtd_remove_and_free(hcd, qtd, qh);
00792 }
00793 return;
00794 }
00795
00796 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &hc->qh->qtd_list, qtd_list_entry) {
00797
00798 if (!qtd->in_process)
00799 break;
00800
00801 urb_compl = 0;
00802
00803 do {
00804
00805 dma_desc = &qh->desc_list[idx];
00806
00807 frame_desc = &qtd->urb->iso_descs[qtd->isoc_frame_index];
00808 remain = hc->ep_is_in ? dma_desc->status.b_isoc.n_bytes : 0;
00809
00810 if (dma_desc->status.b_isoc.sts == DMA_DESC_STS_PKTERR) {
00811
00812
00813
00814
00815
00816 qtd->urb->error_count++;
00817 frame_desc->actual_length = qh->n_bytes[idx] - remain;
00818 frame_desc->status = -DWC_E_PROTOCOL;
00819 } else {
00820
00821
00822 frame_desc->actual_length = qh->n_bytes[idx] - remain;
00823 frame_desc->status = 0;
00824 }
00825
00826 if (++qtd->isoc_frame_index == qtd->urb->packet_count) {
00827
00828
00829
00830
00831
00832 hcd->fops->complete(hcd, qtd->urb->priv, qtd->urb, 0);
00833 dwc_otg_hcd_qtd_remove_and_free(hcd, qtd, qh);
00834
00835
00836
00837
00838
00839
00840
00841 if (hc->halt_status == DWC_OTG_HC_XFER_URB_DEQUEUE) {
00842 return;
00843 }
00844
00845 urb_compl = 1;
00846
00847 }
00848
00849 qh->ntd--;
00850
00851
00852 if (dma_desc->status.b_isoc.ioc) {
00853 idx = desclist_idx_inc(idx, qh->interval, hc->speed);
00854 goto stop_scan;
00855 }
00856
00857 idx = desclist_idx_inc(idx, qh->interval, hc->speed);
00858
00859 if (urb_compl)
00860 break;
00861 }
00862 while (idx != qh->td_first);
00863 }
00864 stop_scan:
00865 qh->td_first = idx;
00866 }
00867
00868 uint8_t update_non_isoc_urb_state_ddma(dwc_otg_hcd_t * hcd,
00869 dwc_hc_t * hc,
00870 dwc_otg_qtd_t * qtd,
00871 dwc_otg_host_dma_desc_t * dma_desc,
00872 dwc_otg_halt_status_e halt_status,
00873 uint32_t n_bytes, uint8_t * xfer_done)
00874 {
00875
00876 uint16_t remain = hc->ep_is_in ? dma_desc->status.b.n_bytes : 0;
00877 dwc_otg_hcd_urb_t *urb = qtd->urb;
00878
00879 if (halt_status == DWC_OTG_HC_XFER_AHB_ERR) {
00880 urb->status = -DWC_E_IO;
00881 return 1;
00882 }
00883 if (dma_desc->status.b.sts == DMA_DESC_STS_PKTERR) {
00884 switch (halt_status) {
00885 case DWC_OTG_HC_XFER_STALL:
00886 urb->status = -DWC_E_PIPE;
00887 break;
00888 case DWC_OTG_HC_XFER_BABBLE_ERR:
00889 urb->status = -DWC_E_OVERFLOW;
00890 break;
00891 case DWC_OTG_HC_XFER_XACT_ERR:
00892 urb->status = -DWC_E_PROTOCOL;
00893 break;
00894 default:
00895 DWC_ERROR("%s: Unhandled descriptor error status (%d)\n", __func__,
00896 halt_status);
00897 break;
00898 }
00899 return 1;
00900 }
00901
00902 if (dma_desc->status.b.a == 1) {
00903 DWC_DEBUGPL(DBG_HCDV,
00904 "Active descriptor encountered on channel %d\n",
00905 hc->hc_num);
00906 return 0;
00907 }
00908
00909 if (hc->ep_type == DWC_OTG_EP_TYPE_CONTROL) {
00910 if (qtd->control_phase == DWC_OTG_CONTROL_DATA) {
00911 urb->actual_length += n_bytes - remain;
00912 if (remain || urb->actual_length == urb->length) {
00913
00914
00915
00916
00917 *xfer_done = 1;
00918 }
00919
00920 } else if (qtd->control_phase == DWC_OTG_CONTROL_STATUS) {
00921 urb->status = 0;
00922 *xfer_done = 1;
00923 }
00924
00925 } else {
00926
00927 urb->actual_length += n_bytes - remain;
00928 if (remain || urb->actual_length == urb->length) {
00929 urb->status = 0;
00930 *xfer_done = 1;
00931 }
00932 }
00933
00934 return 0;
00935 }
00936
00937 static void complete_non_isoc_xfer_ddma(dwc_otg_hcd_t * hcd,
00938 dwc_hc_t * hc,
00939 dwc_otg_hc_regs_t * hc_regs,
00940 dwc_otg_halt_status_e halt_status)
00941 {
00942 dwc_otg_hcd_urb_t *urb = NULL;
00943 dwc_otg_qtd_t *qtd, *qtd_tmp;
00944 dwc_otg_qh_t *qh;
00945 dwc_otg_host_dma_desc_t *dma_desc;
00946 uint32_t n_bytes, n_desc, i;
00947 uint8_t failed = 0, xfer_done;
00948
00949 n_desc = 0;
00950
00951 qh = hc->qh;
00952
00953 if (hc->halt_status == DWC_OTG_HC_XFER_URB_DEQUEUE) {
00954 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &hc->qh->qtd_list, qtd_list_entry) {
00955 qtd->in_process = 0;
00956 }
00957 return;
00958 }
00959
00960 DWC_CIRCLEQ_FOREACH_SAFE(qtd, qtd_tmp, &qh->qtd_list, qtd_list_entry) {
00961
00962 urb = qtd->urb;
00963
00964 n_bytes = 0;
00965 xfer_done = 0;
00966
00967 for (i = 0; i < qtd->n_desc; i++) {
00968 dma_desc = &qh->desc_list[n_desc];
00969
00970 n_bytes = qh->n_bytes[n_desc];
00971
00972 failed =
00973 update_non_isoc_urb_state_ddma(hcd, hc, qtd,
00974 dma_desc,
00975 halt_status, n_bytes,
00976 &xfer_done);
00977
00978 if (failed
00979 || (xfer_done
00980 && (urb->status != -DWC_E_IN_PROGRESS))) {
00981
00982 hcd->fops->complete(hcd, urb->priv, urb,
00983 urb->status);
00984 dwc_otg_hcd_qtd_remove_and_free(hcd, qtd, qh);
00985
00986 if (failed)
00987 goto stop_scan;
00988 } else if (qh->ep_type == UE_CONTROL) {
00989 if (qtd->control_phase == DWC_OTG_CONTROL_SETUP) {
00990 if (urb->length > 0) {
00991 qtd->control_phase = DWC_OTG_CONTROL_DATA;
00992 } else {
00993 qtd->control_phase = DWC_OTG_CONTROL_STATUS;
00994 }
00995 DWC_DEBUGPL(DBG_HCDV, " Control setup transaction done\n");
00996 } else if (qtd->control_phase == DWC_OTG_CONTROL_DATA) {
00997 if (xfer_done) {
00998 qtd->control_phase = DWC_OTG_CONTROL_STATUS;
00999 DWC_DEBUGPL(DBG_HCDV, " Control data transfer done\n");
01000 } else if (i + 1 == qtd->n_desc) {
01001
01002
01003
01004
01005 dwc_otg_hcd_save_data_toggle(hc, hc_regs, qtd);
01006 }
01007 }
01008 }
01009
01010 n_desc++;
01011 }
01012
01013 }
01014
01015 stop_scan:
01016
01017 if (qh->ep_type != UE_CONTROL) {
01018
01019
01020
01021
01022 if (halt_status == DWC_OTG_HC_XFER_STALL)
01023 qh->data_toggle = DWC_OTG_HC_PID_DATA0;
01024 else
01025 dwc_otg_hcd_save_data_toggle(hc, hc_regs, qtd);
01026 }
01027
01028 if (halt_status == DWC_OTG_HC_XFER_COMPLETE) {
01029 hcint_data_t hcint;
01030 hcint.d32 = DWC_READ_REG32(&hc_regs->hcint);
01031 if (hcint.b.nyet) {
01032
01033
01034
01035
01036
01037 qh->ping_state = 1;
01038 clear_hc_int(hc_regs, nyet);
01039 }
01040
01041 }
01042
01043 }
01044
01062 void dwc_otg_hcd_complete_xfer_ddma(dwc_otg_hcd_t * hcd,
01063 dwc_hc_t * hc,
01064 dwc_otg_hc_regs_t * hc_regs,
01065 dwc_otg_halt_status_e halt_status)
01066 {
01067 uint8_t continue_isoc_xfer = 0;
01068 dwc_otg_transaction_type_e tr_type;
01069 dwc_otg_qh_t *qh = hc->qh;
01070
01071 if (hc->ep_type == DWC_OTG_EP_TYPE_ISOC) {
01072
01073 complete_isoc_xfer_ddma(hcd, hc, hc_regs, halt_status);
01074
01075
01076 if (halt_status != DWC_OTG_HC_XFER_COMPLETE ||
01077 DWC_CIRCLEQ_EMPTY(&qh->qtd_list)) {
01078
01079
01080 if (halt_status == DWC_OTG_HC_XFER_COMPLETE) {
01081 dwc_otg_hc_halt(hcd->core_if, hc, halt_status);
01082 }
01083
01084 release_channel_ddma(hcd, qh);
01085 dwc_otg_hcd_qh_remove(hcd, qh);
01086 } else {
01087
01088 DWC_LIST_MOVE_HEAD(&hcd->periodic_sched_assigned,
01089 &qh->qh_list_entry);
01090 continue_isoc_xfer = 1;
01091
01092 }
01096 } else {
01097
01098 complete_non_isoc_xfer_ddma(hcd, hc, hc_regs, halt_status);
01099
01100 release_channel_ddma(hcd, qh);
01101 dwc_otg_hcd_qh_remove(hcd, qh);
01102
01103 if (!DWC_CIRCLEQ_EMPTY(&qh->qtd_list)) {
01104
01105 dwc_otg_hcd_qh_add(hcd, qh);
01106 }
01107
01108 }
01109 tr_type = dwc_otg_hcd_select_transactions(hcd);
01110 if (tr_type != DWC_OTG_TRANSACTION_NONE || continue_isoc_xfer) {
01111 if (continue_isoc_xfer) {
01112 if (tr_type == DWC_OTG_TRANSACTION_NONE) {
01113 tr_type = DWC_OTG_TRANSACTION_PERIODIC;
01114 } else if (tr_type == DWC_OTG_TRANSACTION_NON_PERIODIC) {
01115 tr_type = DWC_OTG_TRANSACTION_ALL;
01116 }
01117 }
01118 dwc_otg_hcd_queue_transactions(hcd, tr_type);
01119 }
01120 }
01121
01122 #endif