Merge remote-tracking branches 'asoc/fix/tlv320aic3x' and 'asoc/fix/wm8962' into...
[linux-drm-fsl-dcu.git] / drivers / usb / gadget / udc / mv_u3d_core.c
1 /*
2  * Copyright (C) 2011 Marvell International Ltd. All rights reserved.
3  *
4  * This program is free software; you can redistribute it and/or modify it
5  * under the terms and conditions of the GNU General Public License,
6  * version 2, as published by the Free Software Foundation.
7  */
8
9 #include <linux/module.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <linux/kernel.h>
13 #include <linux/delay.h>
14 #include <linux/ioport.h>
15 #include <linux/sched.h>
16 #include <linux/slab.h>
17 #include <linux/errno.h>
18 #include <linux/timer.h>
19 #include <linux/list.h>
20 #include <linux/notifier.h>
21 #include <linux/interrupt.h>
22 #include <linux/moduleparam.h>
23 #include <linux/device.h>
24 #include <linux/usb/ch9.h>
25 #include <linux/usb/gadget.h>
26 #include <linux/pm.h>
27 #include <linux/io.h>
28 #include <linux/irq.h>
29 #include <linux/platform_device.h>
30 #include <linux/platform_data/mv_usb.h>
31 #include <linux/clk.h>
32
33 #include "mv_u3d.h"
34
35 #define DRIVER_DESC             "Marvell PXA USB3.0 Device Controller driver"
36
37 static const char driver_name[] = "mv_u3d";
38 static const char driver_desc[] = DRIVER_DESC;
39
40 static void mv_u3d_nuke(struct mv_u3d_ep *ep, int status);
41 static void mv_u3d_stop_activity(struct mv_u3d *u3d,
42                         struct usb_gadget_driver *driver);
43
44 /* for endpoint 0 operations */
45 static const struct usb_endpoint_descriptor mv_u3d_ep0_desc = {
46         .bLength =              USB_DT_ENDPOINT_SIZE,
47         .bDescriptorType =      USB_DT_ENDPOINT,
48         .bEndpointAddress =     0,
49         .bmAttributes =         USB_ENDPOINT_XFER_CONTROL,
50         .wMaxPacketSize =       MV_U3D_EP0_MAX_PKT_SIZE,
51 };
52
53 static void mv_u3d_ep0_reset(struct mv_u3d *u3d)
54 {
55         struct mv_u3d_ep *ep;
56         u32 epxcr;
57         int i;
58
59         for (i = 0; i < 2; i++) {
60                 ep = &u3d->eps[i];
61                 ep->u3d = u3d;
62
63                 /* ep0 ep context, ep0 in and out share the same ep context */
64                 ep->ep_context = &u3d->ep_context[1];
65         }
66
67         /* reset ep state machine */
68         /* reset ep0 out */
69         epxcr = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
70         epxcr |= MV_U3D_EPXCR_EP_INIT;
71         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr0);
72         udelay(5);
73         epxcr &= ~MV_U3D_EPXCR_EP_INIT;
74         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr0);
75
76         epxcr = ((MV_U3D_EP0_MAX_PKT_SIZE
77                 << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
78                 | (1 << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
79                 | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
80                 | MV_U3D_EPXCR_EP_TYPE_CONTROL);
81         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr1);
82
83         /* reset ep0 in */
84         epxcr = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
85         epxcr |= MV_U3D_EPXCR_EP_INIT;
86         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr0);
87         udelay(5);
88         epxcr &= ~MV_U3D_EPXCR_EP_INIT;
89         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr0);
90
91         epxcr = ((MV_U3D_EP0_MAX_PKT_SIZE
92                 << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
93                 | (1 << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
94                 | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
95                 | MV_U3D_EPXCR_EP_TYPE_CONTROL);
96         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr1);
97 }
98
99 static void mv_u3d_ep0_stall(struct mv_u3d *u3d)
100 {
101         u32 tmp;
102         dev_dbg(u3d->dev, "%s\n", __func__);
103
104         /* set TX and RX to stall */
105         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
106         tmp |= MV_U3D_EPXCR_EP_HALT;
107         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
108
109         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
110         tmp |= MV_U3D_EPXCR_EP_HALT;
111         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
112
113         /* update ep0 state */
114         u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
115         u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
116 }
117
118 static int mv_u3d_process_ep_req(struct mv_u3d *u3d, int index,
119         struct mv_u3d_req *curr_req)
120 {
121         struct mv_u3d_trb       *curr_trb;
122         dma_addr_t cur_deq_lo;
123         struct mv_u3d_ep_context        *curr_ep_context;
124         int trb_complete, actual, remaining_length = 0;
125         int direction, ep_num;
126         int retval = 0;
127         u32 tmp, status, length;
128
129         curr_ep_context = &u3d->ep_context[index];
130         direction = index % 2;
131         ep_num = index / 2;
132
133         trb_complete = 0;
134         actual = curr_req->req.length;
135
136         while (!list_empty(&curr_req->trb_list)) {
137                 curr_trb = list_entry(curr_req->trb_list.next,
138                                         struct mv_u3d_trb, trb_list);
139                 if (!curr_trb->trb_hw->ctrl.own) {
140                         dev_err(u3d->dev, "%s, TRB own error!\n",
141                                 u3d->eps[index].name);
142                         return 1;
143                 }
144
145                 curr_trb->trb_hw->ctrl.own = 0;
146                 if (direction == MV_U3D_EP_DIR_OUT) {
147                         tmp = ioread32(&u3d->vuc_regs->rxst[ep_num].statuslo);
148                         cur_deq_lo =
149                                 ioread32(&u3d->vuc_regs->rxst[ep_num].curdeqlo);
150                 } else {
151                         tmp = ioread32(&u3d->vuc_regs->txst[ep_num].statuslo);
152                         cur_deq_lo =
153                                 ioread32(&u3d->vuc_regs->txst[ep_num].curdeqlo);
154                 }
155
156                 status = tmp >> MV_U3D_XFERSTATUS_COMPLETE_SHIFT;
157                 length = tmp & MV_U3D_XFERSTATUS_TRB_LENGTH_MASK;
158
159                 if (status == MV_U3D_COMPLETE_SUCCESS ||
160                         (status == MV_U3D_COMPLETE_SHORT_PACKET &&
161                         direction == MV_U3D_EP_DIR_OUT)) {
162                         remaining_length += length;
163                         actual -= remaining_length;
164                 } else {
165                         dev_err(u3d->dev,
166                                 "complete_tr error: ep=%d %s: error = 0x%x\n",
167                                 index >> 1, direction ? "SEND" : "RECV",
168                                 status);
169                         retval = -EPROTO;
170                 }
171
172                 list_del_init(&curr_trb->trb_list);
173         }
174         if (retval)
175                 return retval;
176
177         curr_req->req.actual = actual;
178         return 0;
179 }
180
181 /*
182  * mv_u3d_done() - retire a request; caller blocked irqs
183  * @status : request status to be set, only works when
184  * request is still in progress.
185  */
186 static
187 void mv_u3d_done(struct mv_u3d_ep *ep, struct mv_u3d_req *req, int status)
188         __releases(&ep->udc->lock)
189         __acquires(&ep->udc->lock)
190 {
191         struct mv_u3d *u3d = (struct mv_u3d *)ep->u3d;
192
193         dev_dbg(u3d->dev, "mv_u3d_done: remove req->queue\n");
194         /* Removed the req from ep queue */
195         list_del_init(&req->queue);
196
197         /* req.status should be set as -EINPROGRESS in ep_queue() */
198         if (req->req.status == -EINPROGRESS)
199                 req->req.status = status;
200         else
201                 status = req->req.status;
202
203         /* Free trb for the request */
204         if (!req->chain)
205                 dma_pool_free(u3d->trb_pool,
206                         req->trb_head->trb_hw, req->trb_head->trb_dma);
207         else {
208                 dma_unmap_single(ep->u3d->gadget.dev.parent,
209                         (dma_addr_t)req->trb_head->trb_dma,
210                         req->trb_count * sizeof(struct mv_u3d_trb_hw),
211                         DMA_BIDIRECTIONAL);
212                 kfree(req->trb_head->trb_hw);
213         }
214         kfree(req->trb_head);
215
216         usb_gadget_unmap_request(&u3d->gadget, &req->req, mv_u3d_ep_dir(ep));
217
218         if (status && (status != -ESHUTDOWN)) {
219                 dev_dbg(u3d->dev, "complete %s req %p stat %d len %u/%u",
220                         ep->ep.name, &req->req, status,
221                         req->req.actual, req->req.length);
222         }
223
224         spin_unlock(&ep->u3d->lock);
225
226         usb_gadget_giveback_request(&ep->ep, &req->req);
227
228         spin_lock(&ep->u3d->lock);
229 }
230
231 static int mv_u3d_queue_trb(struct mv_u3d_ep *ep, struct mv_u3d_req *req)
232 {
233         u32 tmp, direction;
234         struct mv_u3d *u3d;
235         struct mv_u3d_ep_context *ep_context;
236         int retval = 0;
237
238         u3d = ep->u3d;
239         direction = mv_u3d_ep_dir(ep);
240
241         /* ep0 in and out share the same ep context slot 1*/
242         if (ep->ep_num == 0)
243                 ep_context = &(u3d->ep_context[1]);
244         else
245                 ep_context = &(u3d->ep_context[ep->ep_num * 2 + direction]);
246
247         /* check if the pipe is empty or not */
248         if (!list_empty(&ep->queue)) {
249                 dev_err(u3d->dev, "add trb to non-empty queue!\n");
250                 retval = -ENOMEM;
251                 WARN_ON(1);
252         } else {
253                 ep_context->rsvd0 = cpu_to_le32(1);
254                 ep_context->rsvd1 = 0;
255
256                 /* Configure the trb address and set the DCS bit.
257                  * Both DCS bit and own bit in trb should be set.
258                  */
259                 ep_context->trb_addr_lo =
260                         cpu_to_le32(req->trb_head->trb_dma | DCS_ENABLE);
261                 ep_context->trb_addr_hi = 0;
262
263                 /* Ensure that updates to the EP Context will
264                  * occure before Ring Bell.
265                  */
266                 wmb();
267
268                 /* ring bell the ep */
269                 if (ep->ep_num == 0)
270                         tmp = 0x1;
271                 else
272                         tmp = ep->ep_num * 2
273                                 + ((direction == MV_U3D_EP_DIR_OUT) ? 0 : 1);
274
275                 iowrite32(tmp, &u3d->op_regs->doorbell);
276         }
277         return retval;
278 }
279
280 static struct mv_u3d_trb *mv_u3d_build_trb_one(struct mv_u3d_req *req,
281                                 unsigned *length, dma_addr_t *dma)
282 {
283         u32 temp;
284         unsigned int direction;
285         struct mv_u3d_trb *trb;
286         struct mv_u3d_trb_hw *trb_hw;
287         struct mv_u3d *u3d;
288
289         /* how big will this transfer be? */
290         *length = req->req.length - req->req.actual;
291         BUG_ON(*length > (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER);
292
293         u3d = req->ep->u3d;
294
295         trb = kzalloc(sizeof(*trb), GFP_ATOMIC);
296         if (!trb)
297                 return NULL;
298
299         /*
300          * Be careful that no _GFP_HIGHMEM is set,
301          * or we can not use dma_to_virt
302          * cannot use GFP_KERNEL in spin lock
303          */
304         trb_hw = dma_pool_alloc(u3d->trb_pool, GFP_ATOMIC, dma);
305         if (!trb_hw) {
306                 kfree(trb);
307                 dev_err(u3d->dev,
308                         "%s, dma_pool_alloc fail\n", __func__);
309                 return NULL;
310         }
311         trb->trb_dma = *dma;
312         trb->trb_hw = trb_hw;
313
314         /* initialize buffer page pointers */
315         temp = (u32)(req->req.dma + req->req.actual);
316
317         trb_hw->buf_addr_lo = cpu_to_le32(temp);
318         trb_hw->buf_addr_hi = 0;
319         trb_hw->trb_len = cpu_to_le32(*length);
320         trb_hw->ctrl.own = 1;
321
322         if (req->ep->ep_num == 0)
323                 trb_hw->ctrl.type = TYPE_DATA;
324         else
325                 trb_hw->ctrl.type = TYPE_NORMAL;
326
327         req->req.actual += *length;
328
329         direction = mv_u3d_ep_dir(req->ep);
330         if (direction == MV_U3D_EP_DIR_IN)
331                 trb_hw->ctrl.dir = 1;
332         else
333                 trb_hw->ctrl.dir = 0;
334
335         /* Enable interrupt for the last trb of a request */
336         if (!req->req.no_interrupt)
337                 trb_hw->ctrl.ioc = 1;
338
339         trb_hw->ctrl.chain = 0;
340
341         wmb();
342         return trb;
343 }
344
345 static int mv_u3d_build_trb_chain(struct mv_u3d_req *req, unsigned *length,
346                 struct mv_u3d_trb *trb, int *is_last)
347 {
348         u32 temp;
349         unsigned int direction;
350         struct mv_u3d *u3d;
351
352         /* how big will this transfer be? */
353         *length = min(req->req.length - req->req.actual,
354                         (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER);
355
356         u3d = req->ep->u3d;
357
358         trb->trb_dma = 0;
359
360         /* initialize buffer page pointers */
361         temp = (u32)(req->req.dma + req->req.actual);
362
363         trb->trb_hw->buf_addr_lo = cpu_to_le32(temp);
364         trb->trb_hw->buf_addr_hi = 0;
365         trb->trb_hw->trb_len = cpu_to_le32(*length);
366         trb->trb_hw->ctrl.own = 1;
367
368         if (req->ep->ep_num == 0)
369                 trb->trb_hw->ctrl.type = TYPE_DATA;
370         else
371                 trb->trb_hw->ctrl.type = TYPE_NORMAL;
372
373         req->req.actual += *length;
374
375         direction = mv_u3d_ep_dir(req->ep);
376         if (direction == MV_U3D_EP_DIR_IN)
377                 trb->trb_hw->ctrl.dir = 1;
378         else
379                 trb->trb_hw->ctrl.dir = 0;
380
381         /* zlp is needed if req->req.zero is set */
382         if (req->req.zero) {
383                 if (*length == 0 || (*length % req->ep->ep.maxpacket) != 0)
384                         *is_last = 1;
385                 else
386                         *is_last = 0;
387         } else if (req->req.length == req->req.actual)
388                 *is_last = 1;
389         else
390                 *is_last = 0;
391
392         /* Enable interrupt for the last trb of a request */
393         if (*is_last && !req->req.no_interrupt)
394                 trb->trb_hw->ctrl.ioc = 1;
395
396         if (*is_last)
397                 trb->trb_hw->ctrl.chain = 0;
398         else {
399                 trb->trb_hw->ctrl.chain = 1;
400                 dev_dbg(u3d->dev, "chain trb\n");
401         }
402
403         wmb();
404
405         return 0;
406 }
407
408 /* generate TRB linked list for a request
409  * usb controller only supports continous trb chain,
410  * that trb structure physical address should be continous.
411  */
412 static int mv_u3d_req_to_trb(struct mv_u3d_req *req)
413 {
414         unsigned count;
415         int is_last;
416         struct mv_u3d_trb *trb;
417         struct mv_u3d_trb_hw *trb_hw;
418         struct mv_u3d *u3d;
419         dma_addr_t dma;
420         unsigned length;
421         unsigned trb_num;
422
423         u3d = req->ep->u3d;
424
425         INIT_LIST_HEAD(&req->trb_list);
426
427         length = req->req.length - req->req.actual;
428         /* normally the request transfer length is less than 16KB.
429          * we use buil_trb_one() to optimize it.
430          */
431         if (length <= (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER) {
432                 trb = mv_u3d_build_trb_one(req, &count, &dma);
433                 list_add_tail(&trb->trb_list, &req->trb_list);
434                 req->trb_head = trb;
435                 req->trb_count = 1;
436                 req->chain = 0;
437         } else {
438                 trb_num = length / MV_U3D_EP_MAX_LENGTH_TRANSFER;
439                 if (length % MV_U3D_EP_MAX_LENGTH_TRANSFER)
440                         trb_num++;
441
442                 trb = kcalloc(trb_num, sizeof(*trb), GFP_ATOMIC);
443                 if (!trb)
444                         return -ENOMEM;
445
446                 trb_hw = kcalloc(trb_num, sizeof(*trb_hw), GFP_ATOMIC);
447                 if (!trb_hw) {
448                         kfree(trb);
449                         return -ENOMEM;
450                 }
451
452                 do {
453                         trb->trb_hw = trb_hw;
454                         if (mv_u3d_build_trb_chain(req, &count,
455                                                 trb, &is_last)) {
456                                 dev_err(u3d->dev,
457                                         "%s, mv_u3d_build_trb_chain fail\n",
458                                         __func__);
459                                 return -EIO;
460                         }
461
462                         list_add_tail(&trb->trb_list, &req->trb_list);
463                         req->trb_count++;
464                         trb++;
465                         trb_hw++;
466                 } while (!is_last);
467
468                 req->trb_head = list_entry(req->trb_list.next,
469                                         struct mv_u3d_trb, trb_list);
470                 req->trb_head->trb_dma = dma_map_single(u3d->gadget.dev.parent,
471                                         req->trb_head->trb_hw,
472                                         trb_num * sizeof(*trb_hw),
473                                         DMA_BIDIRECTIONAL);
474
475                 req->chain = 1;
476         }
477
478         return 0;
479 }
480
481 static int
482 mv_u3d_start_queue(struct mv_u3d_ep *ep)
483 {
484         struct mv_u3d *u3d = ep->u3d;
485         struct mv_u3d_req *req;
486         int ret;
487
488         if (!list_empty(&ep->req_list) && !ep->processing)
489                 req = list_entry(ep->req_list.next, struct mv_u3d_req, list);
490         else
491                 return 0;
492
493         ep->processing = 1;
494
495         /* set up dma mapping */
496         ret = usb_gadget_map_request(&u3d->gadget, &req->req,
497                                         mv_u3d_ep_dir(ep));
498         if (ret)
499                 return ret;
500
501         req->req.status = -EINPROGRESS;
502         req->req.actual = 0;
503         req->trb_count = 0;
504
505         /* build trbs and push them to device queue */
506         if (!mv_u3d_req_to_trb(req)) {
507                 ret = mv_u3d_queue_trb(ep, req);
508                 if (ret) {
509                         ep->processing = 0;
510                         return ret;
511                 }
512         } else {
513                 ep->processing = 0;
514                 dev_err(u3d->dev, "%s, mv_u3d_req_to_trb fail\n", __func__);
515                 return -ENOMEM;
516         }
517
518         /* irq handler advances the queue */
519         if (req)
520                 list_add_tail(&req->queue, &ep->queue);
521
522         return 0;
523 }
524
525 static int mv_u3d_ep_enable(struct usb_ep *_ep,
526                 const struct usb_endpoint_descriptor *desc)
527 {
528         struct mv_u3d *u3d;
529         struct mv_u3d_ep *ep;
530         struct mv_u3d_ep_context *ep_context;
531         u16 max = 0;
532         unsigned maxburst = 0;
533         u32 epxcr, direction;
534
535         if (!_ep || !desc || desc->bDescriptorType != USB_DT_ENDPOINT)
536                 return -EINVAL;
537
538         ep = container_of(_ep, struct mv_u3d_ep, ep);
539         u3d = ep->u3d;
540
541         if (!u3d->driver || u3d->gadget.speed == USB_SPEED_UNKNOWN)
542                 return -ESHUTDOWN;
543
544         direction = mv_u3d_ep_dir(ep);
545         max = le16_to_cpu(desc->wMaxPacketSize);
546
547         if (!_ep->maxburst)
548                 _ep->maxburst = 1;
549         maxburst = _ep->maxburst;
550
551         /* Get the endpoint context address */
552         ep_context = (struct mv_u3d_ep_context *)ep->ep_context;
553
554         /* Set the max burst size */
555         switch (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK) {
556         case USB_ENDPOINT_XFER_BULK:
557                 if (maxburst > 16) {
558                         dev_dbg(u3d->dev,
559                                 "max burst should not be greater "
560                                 "than 16 on bulk ep\n");
561                         maxburst = 1;
562                         _ep->maxburst = maxburst;
563                 }
564                 dev_dbg(u3d->dev,
565                         "maxburst: %d on bulk %s\n", maxburst, ep->name);
566                 break;
567         case USB_ENDPOINT_XFER_CONTROL:
568                 /* control transfer only supports maxburst as one */
569                 maxburst = 1;
570                 _ep->maxburst = maxburst;
571                 break;
572         case USB_ENDPOINT_XFER_INT:
573                 if (maxburst != 1) {
574                         dev_dbg(u3d->dev,
575                                 "max burst should be 1 on int ep "
576                                 "if transfer size is not 1024\n");
577                         maxburst = 1;
578                         _ep->maxburst = maxburst;
579                 }
580                 break;
581         case USB_ENDPOINT_XFER_ISOC:
582                 if (maxburst != 1) {
583                         dev_dbg(u3d->dev,
584                                 "max burst should be 1 on isoc ep "
585                                 "if transfer size is not 1024\n");
586                         maxburst = 1;
587                         _ep->maxburst = maxburst;
588                 }
589                 break;
590         default:
591                 goto en_done;
592         }
593
594         ep->ep.maxpacket = max;
595         ep->ep.desc = desc;
596         ep->enabled = 1;
597
598         /* Enable the endpoint for Rx or Tx and set the endpoint type */
599         if (direction == MV_U3D_EP_DIR_OUT) {
600                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
601                 epxcr |= MV_U3D_EPXCR_EP_INIT;
602                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
603                 udelay(5);
604                 epxcr &= ~MV_U3D_EPXCR_EP_INIT;
605                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
606
607                 epxcr = ((max << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
608                       | ((maxburst - 1) << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
609                       | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
610                       | (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK));
611                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
612         } else {
613                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
614                 epxcr |= MV_U3D_EPXCR_EP_INIT;
615                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
616                 udelay(5);
617                 epxcr &= ~MV_U3D_EPXCR_EP_INIT;
618                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
619
620                 epxcr = ((max << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
621                       | ((maxburst - 1) << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
622                       | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
623                       | (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK));
624                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
625         }
626
627         return 0;
628 en_done:
629         return -EINVAL;
630 }
631
632 static int  mv_u3d_ep_disable(struct usb_ep *_ep)
633 {
634         struct mv_u3d *u3d;
635         struct mv_u3d_ep *ep;
636         struct mv_u3d_ep_context *ep_context;
637         u32 epxcr, direction;
638         unsigned long flags;
639
640         if (!_ep)
641                 return -EINVAL;
642
643         ep = container_of(_ep, struct mv_u3d_ep, ep);
644         if (!ep->ep.desc)
645                 return -EINVAL;
646
647         u3d = ep->u3d;
648
649         /* Get the endpoint context address */
650         ep_context = ep->ep_context;
651
652         direction = mv_u3d_ep_dir(ep);
653
654         /* nuke all pending requests (does flush) */
655         spin_lock_irqsave(&u3d->lock, flags);
656         mv_u3d_nuke(ep, -ESHUTDOWN);
657         spin_unlock_irqrestore(&u3d->lock, flags);
658
659         /* Disable the endpoint for Rx or Tx and reset the endpoint type */
660         if (direction == MV_U3D_EP_DIR_OUT) {
661                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
662                 epxcr &= ~((1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
663                       | USB_ENDPOINT_XFERTYPE_MASK);
664                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
665         } else {
666                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
667                 epxcr &= ~((1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
668                       | USB_ENDPOINT_XFERTYPE_MASK);
669                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
670         }
671
672         ep->enabled = 0;
673
674         ep->ep.desc = NULL;
675         return 0;
676 }
677
678 static struct usb_request *
679 mv_u3d_alloc_request(struct usb_ep *_ep, gfp_t gfp_flags)
680 {
681         struct mv_u3d_req *req = NULL;
682
683         req = kzalloc(sizeof *req, gfp_flags);
684         if (!req)
685                 return NULL;
686
687         INIT_LIST_HEAD(&req->queue);
688
689         return &req->req;
690 }
691
692 static void mv_u3d_free_request(struct usb_ep *_ep, struct usb_request *_req)
693 {
694         struct mv_u3d_req *req = container_of(_req, struct mv_u3d_req, req);
695
696         kfree(req);
697 }
698
699 static void mv_u3d_ep_fifo_flush(struct usb_ep *_ep)
700 {
701         struct mv_u3d *u3d;
702         u32 direction;
703         struct mv_u3d_ep *ep = container_of(_ep, struct mv_u3d_ep, ep);
704         unsigned int loops;
705         u32 tmp;
706
707         /* if endpoint is not enabled, cannot flush endpoint */
708         if (!ep->enabled)
709                 return;
710
711         u3d = ep->u3d;
712         direction = mv_u3d_ep_dir(ep);
713
714         /* ep0 need clear bit after flushing fifo. */
715         if (!ep->ep_num) {
716                 if (direction == MV_U3D_EP_DIR_OUT) {
717                         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
718                         tmp |= MV_U3D_EPXCR_EP_FLUSH;
719                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
720                         udelay(10);
721                         tmp &= ~MV_U3D_EPXCR_EP_FLUSH;
722                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
723                 } else {
724                         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
725                         tmp |= MV_U3D_EPXCR_EP_FLUSH;
726                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
727                         udelay(10);
728                         tmp &= ~MV_U3D_EPXCR_EP_FLUSH;
729                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
730                 }
731                 return;
732         }
733
734         if (direction == MV_U3D_EP_DIR_OUT) {
735                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
736                 tmp |= MV_U3D_EPXCR_EP_FLUSH;
737                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
738
739                 /* Wait until flushing completed */
740                 loops = LOOPS(MV_U3D_FLUSH_TIMEOUT);
741                 while (ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0) &
742                         MV_U3D_EPXCR_EP_FLUSH) {
743                         /*
744                          * EP_FLUSH bit should be cleared to indicate this
745                          * operation is complete
746                          */
747                         if (loops == 0) {
748                                 dev_dbg(u3d->dev,
749                                     "EP FLUSH TIMEOUT for ep%d%s\n", ep->ep_num,
750                                     direction ? "in" : "out");
751                                 return;
752                         }
753                         loops--;
754                         udelay(LOOPS_USEC);
755                 }
756         } else {        /* EP_DIR_IN */
757                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
758                 tmp |= MV_U3D_EPXCR_EP_FLUSH;
759                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
760
761                 /* Wait until flushing completed */
762                 loops = LOOPS(MV_U3D_FLUSH_TIMEOUT);
763                 while (ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0) &
764                         MV_U3D_EPXCR_EP_FLUSH) {
765                         /*
766                         * EP_FLUSH bit should be cleared to indicate this
767                         * operation is complete
768                         */
769                         if (loops == 0) {
770                                 dev_dbg(u3d->dev,
771                                     "EP FLUSH TIMEOUT for ep%d%s\n", ep->ep_num,
772                                     direction ? "in" : "out");
773                                 return;
774                         }
775                         loops--;
776                         udelay(LOOPS_USEC);
777                 }
778         }
779 }
780
781 /* queues (submits) an I/O request to an endpoint */
782 static int
783 mv_u3d_ep_queue(struct usb_ep *_ep, struct usb_request *_req, gfp_t gfp_flags)
784 {
785         struct mv_u3d_ep *ep;
786         struct mv_u3d_req *req;
787         struct mv_u3d *u3d;
788         unsigned long flags;
789         int is_first_req = 0;
790
791         if (unlikely(!_ep || !_req))
792                 return -EINVAL;
793
794         ep = container_of(_ep, struct mv_u3d_ep, ep);
795         u3d = ep->u3d;
796
797         req = container_of(_req, struct mv_u3d_req, req);
798
799         if (!ep->ep_num
800                 && u3d->ep0_state == MV_U3D_STATUS_STAGE
801                 && !_req->length) {
802                 dev_dbg(u3d->dev, "ep0 status stage\n");
803                 u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
804                 return 0;
805         }
806
807         dev_dbg(u3d->dev, "%s: %s, req: 0x%p\n",
808                         __func__, _ep->name, req);
809
810         /* catch various bogus parameters */
811         if (!req->req.complete || !req->req.buf
812                         || !list_empty(&req->queue)) {
813                 dev_err(u3d->dev,
814                         "%s, bad params, _req: 0x%p,"
815                         "req->req.complete: 0x%p, req->req.buf: 0x%p,"
816                         "list_empty: 0x%x\n",
817                         __func__, _req,
818                         req->req.complete, req->req.buf,
819                         list_empty(&req->queue));
820                 return -EINVAL;
821         }
822         if (unlikely(!ep->ep.desc)) {
823                 dev_err(u3d->dev, "%s, bad ep\n", __func__);
824                 return -EINVAL;
825         }
826         if (ep->ep.desc->bmAttributes == USB_ENDPOINT_XFER_ISOC) {
827                 if (req->req.length > ep->ep.maxpacket)
828                         return -EMSGSIZE;
829         }
830
831         if (!u3d->driver || u3d->gadget.speed == USB_SPEED_UNKNOWN) {
832                 dev_err(u3d->dev,
833                         "bad params of driver/speed\n");
834                 return -ESHUTDOWN;
835         }
836
837         req->ep = ep;
838
839         /* Software list handles usb request. */
840         spin_lock_irqsave(&ep->req_lock, flags);
841         is_first_req = list_empty(&ep->req_list);
842         list_add_tail(&req->list, &ep->req_list);
843         spin_unlock_irqrestore(&ep->req_lock, flags);
844         if (!is_first_req) {
845                 dev_dbg(u3d->dev, "list is not empty\n");
846                 return 0;
847         }
848
849         dev_dbg(u3d->dev, "call mv_u3d_start_queue from usb_ep_queue\n");
850         spin_lock_irqsave(&u3d->lock, flags);
851         mv_u3d_start_queue(ep);
852         spin_unlock_irqrestore(&u3d->lock, flags);
853         return 0;
854 }
855
856 /* dequeues (cancels, unlinks) an I/O request from an endpoint */
857 static int mv_u3d_ep_dequeue(struct usb_ep *_ep, struct usb_request *_req)
858 {
859         struct mv_u3d_ep *ep;
860         struct mv_u3d_req *req;
861         struct mv_u3d *u3d;
862         struct mv_u3d_ep_context *ep_context;
863         struct mv_u3d_req *next_req;
864
865         unsigned long flags;
866         int ret = 0;
867
868         if (!_ep || !_req)
869                 return -EINVAL;
870
871         ep = container_of(_ep, struct mv_u3d_ep, ep);
872         u3d = ep->u3d;
873
874         spin_lock_irqsave(&ep->u3d->lock, flags);
875
876         /* make sure it's actually queued on this endpoint */
877         list_for_each_entry(req, &ep->queue, queue) {
878                 if (&req->req == _req)
879                         break;
880         }
881         if (&req->req != _req) {
882                 ret = -EINVAL;
883                 goto out;
884         }
885
886         /* The request is in progress, or completed but not dequeued */
887         if (ep->queue.next == &req->queue) {
888                 _req->status = -ECONNRESET;
889                 mv_u3d_ep_fifo_flush(_ep);
890
891                 /* The request isn't the last request in this ep queue */
892                 if (req->queue.next != &ep->queue) {
893                         dev_dbg(u3d->dev,
894                                 "it is the last request in this ep queue\n");
895                         ep_context = ep->ep_context;
896                         next_req = list_entry(req->queue.next,
897                                         struct mv_u3d_req, queue);
898
899                         /* Point first TRB of next request to the EP context. */
900                         iowrite32((unsigned long) next_req->trb_head,
901                                         &ep_context->trb_addr_lo);
902                 } else {
903                         struct mv_u3d_ep_context *ep_context;
904                         ep_context = ep->ep_context;
905                         ep_context->trb_addr_lo = 0;
906                         ep_context->trb_addr_hi = 0;
907                 }
908
909         } else
910                 WARN_ON(1);
911
912         mv_u3d_done(ep, req, -ECONNRESET);
913
914         /* remove the req from the ep req list */
915         if (!list_empty(&ep->req_list)) {
916                 struct mv_u3d_req *curr_req;
917                 curr_req = list_entry(ep->req_list.next,
918                                         struct mv_u3d_req, list);
919                 if (curr_req == req) {
920                         list_del_init(&req->list);
921                         ep->processing = 0;
922                 }
923         }
924
925 out:
926         spin_unlock_irqrestore(&ep->u3d->lock, flags);
927         return ret;
928 }
929
930 static void
931 mv_u3d_ep_set_stall(struct mv_u3d *u3d, u8 ep_num, u8 direction, int stall)
932 {
933         u32 tmp;
934         struct mv_u3d_ep *ep = u3d->eps;
935
936         dev_dbg(u3d->dev, "%s\n", __func__);
937         if (direction == MV_U3D_EP_DIR_OUT) {
938                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
939                 if (stall)
940                         tmp |= MV_U3D_EPXCR_EP_HALT;
941                 else
942                         tmp &= ~MV_U3D_EPXCR_EP_HALT;
943                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
944         } else {
945                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
946                 if (stall)
947                         tmp |= MV_U3D_EPXCR_EP_HALT;
948                 else
949                         tmp &= ~MV_U3D_EPXCR_EP_HALT;
950                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
951         }
952 }
953
954 static int mv_u3d_ep_set_halt_wedge(struct usb_ep *_ep, int halt, int wedge)
955 {
956         struct mv_u3d_ep *ep;
957         unsigned long flags = 0;
958         int status = 0;
959         struct mv_u3d *u3d;
960
961         ep = container_of(_ep, struct mv_u3d_ep, ep);
962         u3d = ep->u3d;
963         if (!ep->ep.desc) {
964                 status = -EINVAL;
965                 goto out;
966         }
967
968         if (ep->ep.desc->bmAttributes == USB_ENDPOINT_XFER_ISOC) {
969                 status = -EOPNOTSUPP;
970                 goto out;
971         }
972
973         /*
974          * Attempt to halt IN ep will fail if any transfer requests
975          * are still queue
976          */
977         if (halt && (mv_u3d_ep_dir(ep) == MV_U3D_EP_DIR_IN)
978                         && !list_empty(&ep->queue)) {
979                 status = -EAGAIN;
980                 goto out;
981         }
982
983         spin_lock_irqsave(&ep->u3d->lock, flags);
984         mv_u3d_ep_set_stall(u3d, ep->ep_num, mv_u3d_ep_dir(ep), halt);
985         if (halt && wedge)
986                 ep->wedge = 1;
987         else if (!halt)
988                 ep->wedge = 0;
989         spin_unlock_irqrestore(&ep->u3d->lock, flags);
990
991         if (ep->ep_num == 0)
992                 u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
993 out:
994         return status;
995 }
996
997 static int mv_u3d_ep_set_halt(struct usb_ep *_ep, int halt)
998 {
999         return mv_u3d_ep_set_halt_wedge(_ep, halt, 0);
1000 }
1001
1002 static int mv_u3d_ep_set_wedge(struct usb_ep *_ep)
1003 {
1004         return mv_u3d_ep_set_halt_wedge(_ep, 1, 1);
1005 }
1006
1007 static struct usb_ep_ops mv_u3d_ep_ops = {
1008         .enable         = mv_u3d_ep_enable,
1009         .disable        = mv_u3d_ep_disable,
1010
1011         .alloc_request  = mv_u3d_alloc_request,
1012         .free_request   = mv_u3d_free_request,
1013
1014         .queue          = mv_u3d_ep_queue,
1015         .dequeue        = mv_u3d_ep_dequeue,
1016
1017         .set_wedge      = mv_u3d_ep_set_wedge,
1018         .set_halt       = mv_u3d_ep_set_halt,
1019         .fifo_flush     = mv_u3d_ep_fifo_flush,
1020 };
1021
1022 static void mv_u3d_controller_stop(struct mv_u3d *u3d)
1023 {
1024         u32 tmp;
1025
1026         if (!u3d->clock_gating && u3d->vbus_valid_detect)
1027                 iowrite32(MV_U3D_INTR_ENABLE_VBUS_VALID,
1028                                 &u3d->vuc_regs->intrenable);
1029         else
1030                 iowrite32(0, &u3d->vuc_regs->intrenable);
1031         iowrite32(~0x0, &u3d->vuc_regs->endcomplete);
1032         iowrite32(~0x0, &u3d->vuc_regs->trbunderrun);
1033         iowrite32(~0x0, &u3d->vuc_regs->trbcomplete);
1034         iowrite32(~0x0, &u3d->vuc_regs->linkchange);
1035         iowrite32(0x1, &u3d->vuc_regs->setuplock);
1036
1037         /* Reset the RUN bit in the command register to stop USB */
1038         tmp = ioread32(&u3d->op_regs->usbcmd);
1039         tmp &= ~MV_U3D_CMD_RUN_STOP;
1040         iowrite32(tmp, &u3d->op_regs->usbcmd);
1041         dev_dbg(u3d->dev, "after u3d_stop, USBCMD 0x%x\n",
1042                 ioread32(&u3d->op_regs->usbcmd));
1043 }
1044
1045 static void mv_u3d_controller_start(struct mv_u3d *u3d)
1046 {
1047         u32 usbintr;
1048         u32 temp;
1049
1050         /* enable link LTSSM state machine */
1051         temp = ioread32(&u3d->vuc_regs->ltssm);
1052         temp |= MV_U3D_LTSSM_PHY_INIT_DONE;
1053         iowrite32(temp, &u3d->vuc_regs->ltssm);
1054
1055         /* Enable interrupts */
1056         usbintr = MV_U3D_INTR_ENABLE_LINK_CHG | MV_U3D_INTR_ENABLE_TXDESC_ERR |
1057                 MV_U3D_INTR_ENABLE_RXDESC_ERR | MV_U3D_INTR_ENABLE_TX_COMPLETE |
1058                 MV_U3D_INTR_ENABLE_RX_COMPLETE | MV_U3D_INTR_ENABLE_SETUP |
1059                 (u3d->vbus_valid_detect ? MV_U3D_INTR_ENABLE_VBUS_VALID : 0);
1060         iowrite32(usbintr, &u3d->vuc_regs->intrenable);
1061
1062         /* Enable ctrl ep */
1063         iowrite32(0x1, &u3d->vuc_regs->ctrlepenable);
1064
1065         /* Set the Run bit in the command register */
1066         iowrite32(MV_U3D_CMD_RUN_STOP, &u3d->op_regs->usbcmd);
1067         dev_dbg(u3d->dev, "after u3d_start, USBCMD 0x%x\n",
1068                 ioread32(&u3d->op_regs->usbcmd));
1069 }
1070
1071 static int mv_u3d_controller_reset(struct mv_u3d *u3d)
1072 {
1073         unsigned int loops;
1074         u32 tmp;
1075
1076         /* Stop the controller */
1077         tmp = ioread32(&u3d->op_regs->usbcmd);
1078         tmp &= ~MV_U3D_CMD_RUN_STOP;
1079         iowrite32(tmp, &u3d->op_regs->usbcmd);
1080
1081         /* Reset the controller to get default values */
1082         iowrite32(MV_U3D_CMD_CTRL_RESET, &u3d->op_regs->usbcmd);
1083
1084         /* wait for reset to complete */
1085         loops = LOOPS(MV_U3D_RESET_TIMEOUT);
1086         while (ioread32(&u3d->op_regs->usbcmd) & MV_U3D_CMD_CTRL_RESET) {
1087                 if (loops == 0) {
1088                         dev_err(u3d->dev,
1089                                 "Wait for RESET completed TIMEOUT\n");
1090                         return -ETIMEDOUT;
1091                 }
1092                 loops--;
1093                 udelay(LOOPS_USEC);
1094         }
1095
1096         /* Configure the Endpoint Context Address */
1097         iowrite32(u3d->ep_context_dma, &u3d->op_regs->dcbaapl);
1098         iowrite32(0, &u3d->op_regs->dcbaaph);
1099
1100         return 0;
1101 }
1102
1103 static int mv_u3d_enable(struct mv_u3d *u3d)
1104 {
1105         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1106         int retval;
1107
1108         if (u3d->active)
1109                 return 0;
1110
1111         if (!u3d->clock_gating) {
1112                 u3d->active = 1;
1113                 return 0;
1114         }
1115
1116         dev_dbg(u3d->dev, "enable u3d\n");
1117         clk_enable(u3d->clk);
1118         if (pdata->phy_init) {
1119                 retval = pdata->phy_init(u3d->phy_regs);
1120                 if (retval) {
1121                         dev_err(u3d->dev,
1122                                 "init phy error %d\n", retval);
1123                         clk_disable(u3d->clk);
1124                         return retval;
1125                 }
1126         }
1127         u3d->active = 1;
1128
1129         return 0;
1130 }
1131
1132 static void mv_u3d_disable(struct mv_u3d *u3d)
1133 {
1134         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1135         if (u3d->clock_gating && u3d->active) {
1136                 dev_dbg(u3d->dev, "disable u3d\n");
1137                 if (pdata->phy_deinit)
1138                         pdata->phy_deinit(u3d->phy_regs);
1139                 clk_disable(u3d->clk);
1140                 u3d->active = 0;
1141         }
1142 }
1143
1144 static int mv_u3d_vbus_session(struct usb_gadget *gadget, int is_active)
1145 {
1146         struct mv_u3d *u3d;
1147         unsigned long flags;
1148         int retval = 0;
1149
1150         u3d = container_of(gadget, struct mv_u3d, gadget);
1151
1152         spin_lock_irqsave(&u3d->lock, flags);
1153
1154         u3d->vbus_active = (is_active != 0);
1155         dev_dbg(u3d->dev, "%s: softconnect %d, vbus_active %d\n",
1156                 __func__, u3d->softconnect, u3d->vbus_active);
1157         /*
1158          * 1. external VBUS detect: we can disable/enable clock on demand.
1159          * 2. UDC VBUS detect: we have to enable clock all the time.
1160          * 3. No VBUS detect: we have to enable clock all the time.
1161          */
1162         if (u3d->driver && u3d->softconnect && u3d->vbus_active) {
1163                 retval = mv_u3d_enable(u3d);
1164                 if (retval == 0) {
1165                         /*
1166                          * after clock is disabled, we lost all the register
1167                          *  context. We have to re-init registers
1168                          */
1169                         mv_u3d_controller_reset(u3d);
1170                         mv_u3d_ep0_reset(u3d);
1171                         mv_u3d_controller_start(u3d);
1172                 }
1173         } else if (u3d->driver && u3d->softconnect) {
1174                 if (!u3d->active)
1175                         goto out;
1176
1177                 /* stop all the transfer in queue*/
1178                 mv_u3d_stop_activity(u3d, u3d->driver);
1179                 mv_u3d_controller_stop(u3d);
1180                 mv_u3d_disable(u3d);
1181         }
1182
1183 out:
1184         spin_unlock_irqrestore(&u3d->lock, flags);
1185         return retval;
1186 }
1187
1188 /* constrain controller's VBUS power usage
1189  * This call is used by gadget drivers during SET_CONFIGURATION calls,
1190  * reporting how much power the device may consume.  For example, this
1191  * could affect how quickly batteries are recharged.
1192  *
1193  * Returns zero on success, else negative errno.
1194  */
1195 static int mv_u3d_vbus_draw(struct usb_gadget *gadget, unsigned mA)
1196 {
1197         struct mv_u3d *u3d = container_of(gadget, struct mv_u3d, gadget);
1198
1199         u3d->power = mA;
1200
1201         return 0;
1202 }
1203
1204 static int mv_u3d_pullup(struct usb_gadget *gadget, int is_on)
1205 {
1206         struct mv_u3d *u3d = container_of(gadget, struct mv_u3d, gadget);
1207         unsigned long flags;
1208         int retval = 0;
1209
1210         spin_lock_irqsave(&u3d->lock, flags);
1211
1212         dev_dbg(u3d->dev, "%s: softconnect %d, vbus_active %d\n",
1213                 __func__, u3d->softconnect, u3d->vbus_active);
1214         u3d->softconnect = (is_on != 0);
1215         if (u3d->driver && u3d->softconnect && u3d->vbus_active) {
1216                 retval = mv_u3d_enable(u3d);
1217                 if (retval == 0) {
1218                         /*
1219                          * after clock is disabled, we lost all the register
1220                          *  context. We have to re-init registers
1221                          */
1222                         mv_u3d_controller_reset(u3d);
1223                         mv_u3d_ep0_reset(u3d);
1224                         mv_u3d_controller_start(u3d);
1225                 }
1226         } else if (u3d->driver && u3d->vbus_active) {
1227                 /* stop all the transfer in queue*/
1228                 mv_u3d_stop_activity(u3d, u3d->driver);
1229                 mv_u3d_controller_stop(u3d);
1230                 mv_u3d_disable(u3d);
1231         }
1232
1233         spin_unlock_irqrestore(&u3d->lock, flags);
1234
1235         return retval;
1236 }
1237
1238 static int mv_u3d_start(struct usb_gadget *g,
1239                 struct usb_gadget_driver *driver)
1240 {
1241         struct mv_u3d *u3d = container_of(g, struct mv_u3d, gadget);
1242         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1243         unsigned long flags;
1244
1245         if (u3d->driver)
1246                 return -EBUSY;
1247
1248         spin_lock_irqsave(&u3d->lock, flags);
1249
1250         if (!u3d->clock_gating) {
1251                 clk_enable(u3d->clk);
1252                 if (pdata->phy_init)
1253                         pdata->phy_init(u3d->phy_regs);
1254         }
1255
1256         /* hook up the driver ... */
1257         driver->driver.bus = NULL;
1258         u3d->driver = driver;
1259
1260         u3d->ep0_dir = USB_DIR_OUT;
1261
1262         spin_unlock_irqrestore(&u3d->lock, flags);
1263
1264         u3d->vbus_valid_detect = 1;
1265
1266         return 0;
1267 }
1268
1269 static int mv_u3d_stop(struct usb_gadget *g)
1270 {
1271         struct mv_u3d *u3d = container_of(g, struct mv_u3d, gadget);
1272         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1273         unsigned long flags;
1274
1275         u3d->vbus_valid_detect = 0;
1276         spin_lock_irqsave(&u3d->lock, flags);
1277
1278         /* enable clock to access controller register */
1279         clk_enable(u3d->clk);
1280         if (pdata->phy_init)
1281                 pdata->phy_init(u3d->phy_regs);
1282
1283         mv_u3d_controller_stop(u3d);
1284         /* stop all usb activities */
1285         u3d->gadget.speed = USB_SPEED_UNKNOWN;
1286         mv_u3d_stop_activity(u3d, NULL);
1287         mv_u3d_disable(u3d);
1288
1289         if (pdata->phy_deinit)
1290                 pdata->phy_deinit(u3d->phy_regs);
1291         clk_disable(u3d->clk);
1292
1293         spin_unlock_irqrestore(&u3d->lock, flags);
1294
1295         u3d->driver = NULL;
1296
1297         return 0;
1298 }
1299
1300 /* device controller usb_gadget_ops structure */
1301 static const struct usb_gadget_ops mv_u3d_ops = {
1302         /* notify controller that VBUS is powered or not */
1303         .vbus_session   = mv_u3d_vbus_session,
1304
1305         /* constrain controller's VBUS power usage */
1306         .vbus_draw      = mv_u3d_vbus_draw,
1307
1308         .pullup         = mv_u3d_pullup,
1309         .udc_start      = mv_u3d_start,
1310         .udc_stop       = mv_u3d_stop,
1311 };
1312
1313 static int mv_u3d_eps_init(struct mv_u3d *u3d)
1314 {
1315         struct mv_u3d_ep        *ep;
1316         char name[14];
1317         int i;
1318
1319         /* initialize ep0, ep0 in/out use eps[1] */
1320         ep = &u3d->eps[1];
1321         ep->u3d = u3d;
1322         strncpy(ep->name, "ep0", sizeof(ep->name));
1323         ep->ep.name = ep->name;
1324         ep->ep.ops = &mv_u3d_ep_ops;
1325         ep->wedge = 0;
1326         usb_ep_set_maxpacket_limit(&ep->ep, MV_U3D_EP0_MAX_PKT_SIZE);
1327         ep->ep.caps.type_control = true;
1328         ep->ep.caps.dir_in = true;
1329         ep->ep.caps.dir_out = true;
1330         ep->ep_num = 0;
1331         ep->ep.desc = &mv_u3d_ep0_desc;
1332         INIT_LIST_HEAD(&ep->queue);
1333         INIT_LIST_HEAD(&ep->req_list);
1334         ep->ep_type = USB_ENDPOINT_XFER_CONTROL;
1335
1336         /* add ep0 ep_context */
1337         ep->ep_context = &u3d->ep_context[1];
1338
1339         /* initialize other endpoints */
1340         for (i = 2; i < u3d->max_eps * 2; i++) {
1341                 ep = &u3d->eps[i];
1342                 if (i & 1) {
1343                         snprintf(name, sizeof(name), "ep%din", i >> 1);
1344                         ep->direction = MV_U3D_EP_DIR_IN;
1345                         ep->ep.caps.dir_in = true;
1346                 } else {
1347                         snprintf(name, sizeof(name), "ep%dout", i >> 1);
1348                         ep->direction = MV_U3D_EP_DIR_OUT;
1349                         ep->ep.caps.dir_out = true;
1350                 }
1351                 ep->u3d = u3d;
1352                 strncpy(ep->name, name, sizeof(ep->name));
1353                 ep->ep.name = ep->name;
1354
1355                 ep->ep.caps.type_iso = true;
1356                 ep->ep.caps.type_bulk = true;
1357                 ep->ep.caps.type_int = true;
1358
1359                 ep->ep.ops = &mv_u3d_ep_ops;
1360                 usb_ep_set_maxpacket_limit(&ep->ep, (unsigned short) ~0);
1361                 ep->ep_num = i / 2;
1362
1363                 INIT_LIST_HEAD(&ep->queue);
1364                 list_add_tail(&ep->ep.ep_list, &u3d->gadget.ep_list);
1365
1366                 INIT_LIST_HEAD(&ep->req_list);
1367                 spin_lock_init(&ep->req_lock);
1368                 ep->ep_context = &u3d->ep_context[i];
1369         }
1370
1371         return 0;
1372 }
1373
1374 /* delete all endpoint requests, called with spinlock held */
1375 static void mv_u3d_nuke(struct mv_u3d_ep *ep, int status)
1376 {
1377         /* endpoint fifo flush */
1378         mv_u3d_ep_fifo_flush(&ep->ep);
1379
1380         while (!list_empty(&ep->queue)) {
1381                 struct mv_u3d_req *req = NULL;
1382                 req = list_entry(ep->queue.next, struct mv_u3d_req, queue);
1383                 mv_u3d_done(ep, req, status);
1384         }
1385 }
1386
1387 /* stop all USB activities */
1388 static
1389 void mv_u3d_stop_activity(struct mv_u3d *u3d, struct usb_gadget_driver *driver)
1390 {
1391         struct mv_u3d_ep        *ep;
1392
1393         mv_u3d_nuke(&u3d->eps[1], -ESHUTDOWN);
1394
1395         list_for_each_entry(ep, &u3d->gadget.ep_list, ep.ep_list) {
1396                 mv_u3d_nuke(ep, -ESHUTDOWN);
1397         }
1398
1399         /* report disconnect; the driver is already quiesced */
1400         if (driver) {
1401                 spin_unlock(&u3d->lock);
1402                 driver->disconnect(&u3d->gadget);
1403                 spin_lock(&u3d->lock);
1404         }
1405 }
1406
1407 static void mv_u3d_irq_process_error(struct mv_u3d *u3d)
1408 {
1409         /* Increment the error count */
1410         u3d->errors++;
1411         dev_err(u3d->dev, "%s\n", __func__);
1412 }
1413
1414 static void mv_u3d_irq_process_link_change(struct mv_u3d *u3d)
1415 {
1416         u32 linkchange;
1417
1418         linkchange = ioread32(&u3d->vuc_regs->linkchange);
1419         iowrite32(linkchange, &u3d->vuc_regs->linkchange);
1420
1421         dev_dbg(u3d->dev, "linkchange: 0x%x\n", linkchange);
1422
1423         if (linkchange & MV_U3D_LINK_CHANGE_LINK_UP) {
1424                 dev_dbg(u3d->dev, "link up: ltssm state: 0x%x\n",
1425                         ioread32(&u3d->vuc_regs->ltssmstate));
1426
1427                 u3d->usb_state = USB_STATE_DEFAULT;
1428                 u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
1429                 u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
1430
1431                 /* set speed */
1432                 u3d->gadget.speed = USB_SPEED_SUPER;
1433         }
1434
1435         if (linkchange & MV_U3D_LINK_CHANGE_SUSPEND) {
1436                 dev_dbg(u3d->dev, "link suspend\n");
1437                 u3d->resume_state = u3d->usb_state;
1438                 u3d->usb_state = USB_STATE_SUSPENDED;
1439         }
1440
1441         if (linkchange & MV_U3D_LINK_CHANGE_RESUME) {
1442                 dev_dbg(u3d->dev, "link resume\n");
1443                 u3d->usb_state = u3d->resume_state;
1444                 u3d->resume_state = 0;
1445         }
1446
1447         if (linkchange & MV_U3D_LINK_CHANGE_WRESET) {
1448                 dev_dbg(u3d->dev, "warm reset\n");
1449                 u3d->usb_state = USB_STATE_POWERED;
1450         }
1451
1452         if (linkchange & MV_U3D_LINK_CHANGE_HRESET) {
1453                 dev_dbg(u3d->dev, "hot reset\n");
1454                 u3d->usb_state = USB_STATE_DEFAULT;
1455         }
1456
1457         if (linkchange & MV_U3D_LINK_CHANGE_INACT)
1458                 dev_dbg(u3d->dev, "inactive\n");
1459
1460         if (linkchange & MV_U3D_LINK_CHANGE_DISABLE_AFTER_U0)
1461                 dev_dbg(u3d->dev, "ss.disabled\n");
1462
1463         if (linkchange & MV_U3D_LINK_CHANGE_VBUS_INVALID) {
1464                 dev_dbg(u3d->dev, "vbus invalid\n");
1465                 u3d->usb_state = USB_STATE_ATTACHED;
1466                 u3d->vbus_valid_detect = 1;
1467                 /* if external vbus detect is not supported,
1468                  * we handle it here.
1469                  */
1470                 if (!u3d->vbus) {
1471                         spin_unlock(&u3d->lock);
1472                         mv_u3d_vbus_session(&u3d->gadget, 0);
1473                         spin_lock(&u3d->lock);
1474                 }
1475         }
1476 }
1477
1478 static void mv_u3d_ch9setaddress(struct mv_u3d *u3d,
1479                                 struct usb_ctrlrequest *setup)
1480 {
1481         u32 tmp;
1482
1483         if (u3d->usb_state != USB_STATE_DEFAULT) {
1484                 dev_err(u3d->dev,
1485                         "%s, cannot setaddr in this state (%d)\n",
1486                         __func__, u3d->usb_state);
1487                 goto err;
1488         }
1489
1490         u3d->dev_addr = (u8)setup->wValue;
1491
1492         dev_dbg(u3d->dev, "%s: 0x%x\n", __func__, u3d->dev_addr);
1493
1494         if (u3d->dev_addr > 127) {
1495                 dev_err(u3d->dev,
1496                         "%s, u3d address is wrong (out of range)\n", __func__);
1497                 u3d->dev_addr = 0;
1498                 goto err;
1499         }
1500
1501         /* update usb state */
1502         u3d->usb_state = USB_STATE_ADDRESS;
1503
1504         /* set the new address */
1505         tmp = ioread32(&u3d->vuc_regs->devaddrtiebrkr);
1506         tmp &= ~0x7F;
1507         tmp |= (u32)u3d->dev_addr;
1508         iowrite32(tmp, &u3d->vuc_regs->devaddrtiebrkr);
1509
1510         return;
1511 err:
1512         mv_u3d_ep0_stall(u3d);
1513 }
1514
1515 static int mv_u3d_is_set_configuration(struct usb_ctrlrequest *setup)
1516 {
1517         if ((setup->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD)
1518                 if (setup->bRequest == USB_REQ_SET_CONFIGURATION)
1519                         return 1;
1520
1521         return 0;
1522 }
1523
1524 static void mv_u3d_handle_setup_packet(struct mv_u3d *u3d, u8 ep_num,
1525         struct usb_ctrlrequest *setup)
1526         __releases(&u3c->lock)
1527         __acquires(&u3c->lock)
1528 {
1529         bool delegate = false;
1530
1531         mv_u3d_nuke(&u3d->eps[ep_num * 2 + MV_U3D_EP_DIR_IN], -ESHUTDOWN);
1532
1533         dev_dbg(u3d->dev, "SETUP %02x.%02x v%04x i%04x l%04x\n",
1534                         setup->bRequestType, setup->bRequest,
1535                         setup->wValue, setup->wIndex, setup->wLength);
1536
1537         /* We process some stardard setup requests here */
1538         if ((setup->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD) {
1539                 switch (setup->bRequest) {
1540                 case USB_REQ_GET_STATUS:
1541                         delegate = true;
1542                         break;
1543
1544                 case USB_REQ_SET_ADDRESS:
1545                         mv_u3d_ch9setaddress(u3d, setup);
1546                         break;
1547
1548                 case USB_REQ_CLEAR_FEATURE:
1549                         delegate = true;
1550                         break;
1551
1552                 case USB_REQ_SET_FEATURE:
1553                         delegate = true;
1554                         break;
1555
1556                 default:
1557                         delegate = true;
1558                 }
1559         } else
1560                 delegate = true;
1561
1562         /* delegate USB standard requests to the gadget driver */
1563         if (delegate == true) {
1564                 /* USB requests handled by gadget */
1565                 if (setup->wLength) {
1566                         /* DATA phase from gadget, STATUS phase from u3d */
1567                         u3d->ep0_dir = (setup->bRequestType & USB_DIR_IN)
1568                                         ? MV_U3D_EP_DIR_IN : MV_U3D_EP_DIR_OUT;
1569                         spin_unlock(&u3d->lock);
1570                         if (u3d->driver->setup(&u3d->gadget,
1571                                 &u3d->local_setup_buff) < 0) {
1572                                 dev_err(u3d->dev, "setup error!\n");
1573                                 mv_u3d_ep0_stall(u3d);
1574                         }
1575                         spin_lock(&u3d->lock);
1576                 } else {
1577                         /* no DATA phase, STATUS phase from gadget */
1578                         u3d->ep0_dir = MV_U3D_EP_DIR_IN;
1579                         u3d->ep0_state = MV_U3D_STATUS_STAGE;
1580                         spin_unlock(&u3d->lock);
1581                         if (u3d->driver->setup(&u3d->gadget,
1582                                 &u3d->local_setup_buff) < 0)
1583                                 mv_u3d_ep0_stall(u3d);
1584                         spin_lock(&u3d->lock);
1585                 }
1586
1587                 if (mv_u3d_is_set_configuration(setup)) {
1588                         dev_dbg(u3d->dev, "u3d configured\n");
1589                         u3d->usb_state = USB_STATE_CONFIGURED;
1590                 }
1591         }
1592 }
1593
1594 static void mv_u3d_get_setup_data(struct mv_u3d *u3d, u8 ep_num, u8 *buffer_ptr)
1595 {
1596         struct mv_u3d_ep_context *epcontext;
1597
1598         epcontext = &u3d->ep_context[ep_num * 2 + MV_U3D_EP_DIR_IN];
1599
1600         /* Copy the setup packet to local buffer */
1601         memcpy(buffer_ptr, (u8 *) &epcontext->setup_buffer, 8);
1602 }
1603
1604 static void mv_u3d_irq_process_setup(struct mv_u3d *u3d)
1605 {
1606         u32 tmp, i;
1607         /* Process all Setup packet received interrupts */
1608         tmp = ioread32(&u3d->vuc_regs->setuplock);
1609         if (tmp) {
1610                 for (i = 0; i < u3d->max_eps; i++) {
1611                         if (tmp & (1 << i)) {
1612                                 mv_u3d_get_setup_data(u3d, i,
1613                                         (u8 *)(&u3d->local_setup_buff));
1614                                 mv_u3d_handle_setup_packet(u3d, i,
1615                                         &u3d->local_setup_buff);
1616                         }
1617                 }
1618         }
1619
1620         iowrite32(tmp, &u3d->vuc_regs->setuplock);
1621 }
1622
1623 static void mv_u3d_irq_process_tr_complete(struct mv_u3d *u3d)
1624 {
1625         u32 tmp, bit_pos;
1626         int i, ep_num = 0, direction = 0;
1627         struct mv_u3d_ep        *curr_ep;
1628         struct mv_u3d_req *curr_req, *temp_req;
1629         int status;
1630
1631         tmp = ioread32(&u3d->vuc_regs->endcomplete);
1632
1633         dev_dbg(u3d->dev, "tr_complete: ep: 0x%x\n", tmp);
1634         if (!tmp)
1635                 return;
1636         iowrite32(tmp, &u3d->vuc_regs->endcomplete);
1637
1638         for (i = 0; i < u3d->max_eps * 2; i++) {
1639                 ep_num = i >> 1;
1640                 direction = i % 2;
1641
1642                 bit_pos = 1 << (ep_num + 16 * direction);
1643
1644                 if (!(bit_pos & tmp))
1645                         continue;
1646
1647                 if (i == 0)
1648                         curr_ep = &u3d->eps[1];
1649                 else
1650                         curr_ep = &u3d->eps[i];
1651
1652                 /* remove req out of ep request list after completion */
1653                 dev_dbg(u3d->dev, "tr comp: check req_list\n");
1654                 spin_lock(&curr_ep->req_lock);
1655                 if (!list_empty(&curr_ep->req_list)) {
1656                         struct mv_u3d_req *req;
1657                         req = list_entry(curr_ep->req_list.next,
1658                                                 struct mv_u3d_req, list);
1659                         list_del_init(&req->list);
1660                         curr_ep->processing = 0;
1661                 }
1662                 spin_unlock(&curr_ep->req_lock);
1663
1664                 /* process the req queue until an uncomplete request */
1665                 list_for_each_entry_safe(curr_req, temp_req,
1666                         &curr_ep->queue, queue) {
1667                         status = mv_u3d_process_ep_req(u3d, i, curr_req);
1668                         if (status)
1669                                 break;
1670                         /* write back status to req */
1671                         curr_req->req.status = status;
1672
1673                         /* ep0 request completion */
1674                         if (ep_num == 0) {
1675                                 mv_u3d_done(curr_ep, curr_req, 0);
1676                                 break;
1677                         } else {
1678                                 mv_u3d_done(curr_ep, curr_req, status);
1679                         }
1680                 }
1681
1682                 dev_dbg(u3d->dev, "call mv_u3d_start_queue from ep complete\n");
1683                 mv_u3d_start_queue(curr_ep);
1684         }
1685 }
1686
1687 static irqreturn_t mv_u3d_irq(int irq, void *dev)
1688 {
1689         struct mv_u3d *u3d = (struct mv_u3d *)dev;
1690         u32 status, intr;
1691         u32 bridgesetting;
1692         u32 trbunderrun;
1693
1694         spin_lock(&u3d->lock);
1695
1696         status = ioread32(&u3d->vuc_regs->intrcause);
1697         intr = ioread32(&u3d->vuc_regs->intrenable);
1698         status &= intr;
1699
1700         if (status == 0) {
1701                 spin_unlock(&u3d->lock);
1702                 dev_err(u3d->dev, "irq error!\n");
1703                 return IRQ_NONE;
1704         }
1705
1706         if (status & MV_U3D_USBINT_VBUS_VALID) {
1707                 bridgesetting = ioread32(&u3d->vuc_regs->bridgesetting);
1708                 if (bridgesetting & MV_U3D_BRIDGE_SETTING_VBUS_VALID) {
1709                         /* write vbus valid bit of bridge setting to clear */
1710                         bridgesetting = MV_U3D_BRIDGE_SETTING_VBUS_VALID;
1711                         iowrite32(bridgesetting, &u3d->vuc_regs->bridgesetting);
1712                         dev_dbg(u3d->dev, "vbus valid\n");
1713
1714                         u3d->usb_state = USB_STATE_POWERED;
1715                         u3d->vbus_valid_detect = 0;
1716                         /* if external vbus detect is not supported,
1717                          * we handle it here.
1718                          */
1719                         if (!u3d->vbus) {
1720                                 spin_unlock(&u3d->lock);
1721                                 mv_u3d_vbus_session(&u3d->gadget, 1);
1722                                 spin_lock(&u3d->lock);
1723                         }
1724                 } else
1725                         dev_err(u3d->dev, "vbus bit is not set\n");
1726         }
1727
1728         /* RX data is already in the 16KB FIFO.*/
1729         if (status & MV_U3D_USBINT_UNDER_RUN) {
1730                 trbunderrun = ioread32(&u3d->vuc_regs->trbunderrun);
1731                 dev_err(u3d->dev, "under run, ep%d\n", trbunderrun);
1732                 iowrite32(trbunderrun, &u3d->vuc_regs->trbunderrun);
1733                 mv_u3d_irq_process_error(u3d);
1734         }
1735
1736         if (status & (MV_U3D_USBINT_RXDESC_ERR | MV_U3D_USBINT_TXDESC_ERR)) {
1737                 /* write one to clear */
1738                 iowrite32(status & (MV_U3D_USBINT_RXDESC_ERR
1739                         | MV_U3D_USBINT_TXDESC_ERR),
1740                         &u3d->vuc_regs->intrcause);
1741                 dev_err(u3d->dev, "desc err 0x%x\n", status);
1742                 mv_u3d_irq_process_error(u3d);
1743         }
1744
1745         if (status & MV_U3D_USBINT_LINK_CHG)
1746                 mv_u3d_irq_process_link_change(u3d);
1747
1748         if (status & MV_U3D_USBINT_TX_COMPLETE)
1749                 mv_u3d_irq_process_tr_complete(u3d);
1750
1751         if (status & MV_U3D_USBINT_RX_COMPLETE)
1752                 mv_u3d_irq_process_tr_complete(u3d);
1753
1754         if (status & MV_U3D_USBINT_SETUP)
1755                 mv_u3d_irq_process_setup(u3d);
1756
1757         spin_unlock(&u3d->lock);
1758         return IRQ_HANDLED;
1759 }
1760
1761 static int mv_u3d_remove(struct platform_device *dev)
1762 {
1763         struct mv_u3d *u3d = platform_get_drvdata(dev);
1764
1765         BUG_ON(u3d == NULL);
1766
1767         usb_del_gadget_udc(&u3d->gadget);
1768
1769         /* free memory allocated in probe */
1770         dma_pool_destroy(u3d->trb_pool);
1771
1772         if (u3d->ep_context)
1773                 dma_free_coherent(&dev->dev, u3d->ep_context_size,
1774                         u3d->ep_context, u3d->ep_context_dma);
1775
1776         kfree(u3d->eps);
1777
1778         if (u3d->irq)
1779                 free_irq(u3d->irq, u3d);
1780
1781         if (u3d->cap_regs)
1782                 iounmap(u3d->cap_regs);
1783         u3d->cap_regs = NULL;
1784
1785         kfree(u3d->status_req);
1786
1787         clk_put(u3d->clk);
1788
1789         kfree(u3d);
1790
1791         return 0;
1792 }
1793
1794 static int mv_u3d_probe(struct platform_device *dev)
1795 {
1796         struct mv_u3d *u3d = NULL;
1797         struct mv_usb_platform_data *pdata = dev_get_platdata(&dev->dev);
1798         int retval = 0;
1799         struct resource *r;
1800         size_t size;
1801
1802         if (!dev_get_platdata(&dev->dev)) {
1803                 dev_err(&dev->dev, "missing platform_data\n");
1804                 retval = -ENODEV;
1805                 goto err_pdata;
1806         }
1807
1808         u3d = kzalloc(sizeof(*u3d), GFP_KERNEL);
1809         if (!u3d) {
1810                 retval = -ENOMEM;
1811                 goto err_alloc_private;
1812         }
1813
1814         spin_lock_init(&u3d->lock);
1815
1816         platform_set_drvdata(dev, u3d);
1817
1818         u3d->dev = &dev->dev;
1819         u3d->vbus = pdata->vbus;
1820
1821         u3d->clk = clk_get(&dev->dev, NULL);
1822         if (IS_ERR(u3d->clk)) {
1823                 retval = PTR_ERR(u3d->clk);
1824                 goto err_get_clk;
1825         }
1826
1827         r = platform_get_resource_byname(dev, IORESOURCE_MEM, "capregs");
1828         if (!r) {
1829                 dev_err(&dev->dev, "no I/O memory resource defined\n");
1830                 retval = -ENODEV;
1831                 goto err_get_cap_regs;
1832         }
1833
1834         u3d->cap_regs = (struct mv_u3d_cap_regs __iomem *)
1835                 ioremap(r->start, resource_size(r));
1836         if (!u3d->cap_regs) {
1837                 dev_err(&dev->dev, "failed to map I/O memory\n");
1838                 retval = -EBUSY;
1839                 goto err_map_cap_regs;
1840         } else {
1841                 dev_dbg(&dev->dev, "cap_regs address: 0x%lx/0x%lx\n",
1842                         (unsigned long) r->start,
1843                         (unsigned long) u3d->cap_regs);
1844         }
1845
1846         /* we will access controller register, so enable the u3d controller */
1847         clk_enable(u3d->clk);
1848
1849         if (pdata->phy_init) {
1850                 retval = pdata->phy_init(u3d->phy_regs);
1851                 if (retval) {
1852                         dev_err(&dev->dev, "init phy error %d\n", retval);
1853                         goto err_u3d_enable;
1854                 }
1855         }
1856
1857         u3d->op_regs = (struct mv_u3d_op_regs __iomem *)(u3d->cap_regs
1858                 + MV_U3D_USB3_OP_REGS_OFFSET);
1859
1860         u3d->vuc_regs = (struct mv_u3d_vuc_regs __iomem *)(u3d->cap_regs
1861                 + ioread32(&u3d->cap_regs->vuoff));
1862
1863         u3d->max_eps = 16;
1864
1865         /*
1866          * some platform will use usb to download image, it may not disconnect
1867          * usb gadget before loading kernel. So first stop u3d here.
1868          */
1869         mv_u3d_controller_stop(u3d);
1870         iowrite32(0xFFFFFFFF, &u3d->vuc_regs->intrcause);
1871
1872         if (pdata->phy_deinit)
1873                 pdata->phy_deinit(u3d->phy_regs);
1874         clk_disable(u3d->clk);
1875
1876         size = u3d->max_eps * sizeof(struct mv_u3d_ep_context) * 2;
1877         size = (size + MV_U3D_EP_CONTEXT_ALIGNMENT - 1)
1878                 & ~(MV_U3D_EP_CONTEXT_ALIGNMENT - 1);
1879         u3d->ep_context = dma_alloc_coherent(&dev->dev, size,
1880                                         &u3d->ep_context_dma, GFP_KERNEL);
1881         if (!u3d->ep_context) {
1882                 dev_err(&dev->dev, "allocate ep context memory failed\n");
1883                 retval = -ENOMEM;
1884                 goto err_alloc_ep_context;
1885         }
1886         u3d->ep_context_size = size;
1887
1888         /* create TRB dma_pool resource */
1889         u3d->trb_pool = dma_pool_create("u3d_trb",
1890                         &dev->dev,
1891                         sizeof(struct mv_u3d_trb_hw),
1892                         MV_U3D_TRB_ALIGNMENT,
1893                         MV_U3D_DMA_BOUNDARY);
1894
1895         if (!u3d->trb_pool) {
1896                 retval = -ENOMEM;
1897                 goto err_alloc_trb_pool;
1898         }
1899
1900         size = u3d->max_eps * sizeof(struct mv_u3d_ep) * 2;
1901         u3d->eps = kzalloc(size, GFP_KERNEL);
1902         if (!u3d->eps) {
1903                 retval = -ENOMEM;
1904                 goto err_alloc_eps;
1905         }
1906
1907         /* initialize ep0 status request structure */
1908         u3d->status_req = kzalloc(sizeof(struct mv_u3d_req) + 8, GFP_KERNEL);
1909         if (!u3d->status_req) {
1910                 retval = -ENOMEM;
1911                 goto err_alloc_status_req;
1912         }
1913         INIT_LIST_HEAD(&u3d->status_req->queue);
1914
1915         /* allocate a small amount of memory to get valid address */
1916         u3d->status_req->req.buf = (char *)u3d->status_req
1917                                         + sizeof(struct mv_u3d_req);
1918         u3d->status_req->req.dma = virt_to_phys(u3d->status_req->req.buf);
1919
1920         u3d->resume_state = USB_STATE_NOTATTACHED;
1921         u3d->usb_state = USB_STATE_ATTACHED;
1922         u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
1923         u3d->remote_wakeup = 0;
1924
1925         r = platform_get_resource(dev, IORESOURCE_IRQ, 0);
1926         if (!r) {
1927                 dev_err(&dev->dev, "no IRQ resource defined\n");
1928                 retval = -ENODEV;
1929                 goto err_get_irq;
1930         }
1931         u3d->irq = r->start;
1932         if (request_irq(u3d->irq, mv_u3d_irq,
1933                 IRQF_SHARED, driver_name, u3d)) {
1934                 u3d->irq = 0;
1935                 dev_err(&dev->dev, "Request irq %d for u3d failed\n",
1936                         u3d->irq);
1937                 retval = -ENODEV;
1938                 goto err_request_irq;
1939         }
1940
1941         /* initialize gadget structure */
1942         u3d->gadget.ops = &mv_u3d_ops;  /* usb_gadget_ops */
1943         u3d->gadget.ep0 = &u3d->eps[1].ep;      /* gadget ep0 */
1944         INIT_LIST_HEAD(&u3d->gadget.ep_list);   /* ep_list */
1945         u3d->gadget.speed = USB_SPEED_UNKNOWN;  /* speed */
1946
1947         /* the "gadget" abstracts/virtualizes the controller */
1948         u3d->gadget.name = driver_name;         /* gadget name */
1949
1950         mv_u3d_eps_init(u3d);
1951
1952         /* external vbus detection */
1953         if (u3d->vbus) {
1954                 u3d->clock_gating = 1;
1955                 dev_err(&dev->dev, "external vbus detection\n");
1956         }
1957
1958         if (!u3d->clock_gating)
1959                 u3d->vbus_active = 1;
1960
1961         /* enable usb3 controller vbus detection */
1962         u3d->vbus_valid_detect = 1;
1963
1964         retval = usb_add_gadget_udc(&dev->dev, &u3d->gadget);
1965         if (retval)
1966                 goto err_unregister;
1967
1968         dev_dbg(&dev->dev, "successful probe usb3 device %s clock gating.\n",
1969                 u3d->clock_gating ? "with" : "without");
1970
1971         return 0;
1972
1973 err_unregister:
1974         free_irq(u3d->irq, u3d);
1975 err_request_irq:
1976 err_get_irq:
1977         kfree(u3d->status_req);
1978 err_alloc_status_req:
1979         kfree(u3d->eps);
1980 err_alloc_eps:
1981         dma_pool_destroy(u3d->trb_pool);
1982 err_alloc_trb_pool:
1983         dma_free_coherent(&dev->dev, u3d->ep_context_size,
1984                 u3d->ep_context, u3d->ep_context_dma);
1985 err_alloc_ep_context:
1986         if (pdata->phy_deinit)
1987                 pdata->phy_deinit(u3d->phy_regs);
1988         clk_disable(u3d->clk);
1989 err_u3d_enable:
1990         iounmap(u3d->cap_regs);
1991 err_map_cap_regs:
1992 err_get_cap_regs:
1993 err_get_clk:
1994         clk_put(u3d->clk);
1995         kfree(u3d);
1996 err_alloc_private:
1997 err_pdata:
1998         return retval;
1999 }
2000
2001 #ifdef CONFIG_PM_SLEEP
2002 static int mv_u3d_suspend(struct device *dev)
2003 {
2004         struct mv_u3d *u3d = dev_get_drvdata(dev);
2005
2006         /*
2007          * only cable is unplugged, usb can suspend.
2008          * So do not care about clock_gating == 1, it is handled by
2009          * vbus session.
2010          */
2011         if (!u3d->clock_gating) {
2012                 mv_u3d_controller_stop(u3d);
2013
2014                 spin_lock_irq(&u3d->lock);
2015                 /* stop all usb activities */
2016                 mv_u3d_stop_activity(u3d, u3d->driver);
2017                 spin_unlock_irq(&u3d->lock);
2018
2019                 mv_u3d_disable(u3d);
2020         }
2021
2022         return 0;
2023 }
2024
2025 static int mv_u3d_resume(struct device *dev)
2026 {
2027         struct mv_u3d *u3d = dev_get_drvdata(dev);
2028         int retval;
2029
2030         if (!u3d->clock_gating) {
2031                 retval = mv_u3d_enable(u3d);
2032                 if (retval)
2033                         return retval;
2034
2035                 if (u3d->driver && u3d->softconnect) {
2036                         mv_u3d_controller_reset(u3d);
2037                         mv_u3d_ep0_reset(u3d);
2038                         mv_u3d_controller_start(u3d);
2039                 }
2040         }
2041
2042         return 0;
2043 }
2044 #endif
2045
2046 static SIMPLE_DEV_PM_OPS(mv_u3d_pm_ops, mv_u3d_suspend, mv_u3d_resume);
2047
2048 static void mv_u3d_shutdown(struct platform_device *dev)
2049 {
2050         struct mv_u3d *u3d = platform_get_drvdata(dev);
2051         u32 tmp;
2052
2053         tmp = ioread32(&u3d->op_regs->usbcmd);
2054         tmp &= ~MV_U3D_CMD_RUN_STOP;
2055         iowrite32(tmp, &u3d->op_regs->usbcmd);
2056 }
2057
2058 static struct platform_driver mv_u3d_driver = {
2059         .probe          = mv_u3d_probe,
2060         .remove         = mv_u3d_remove,
2061         .shutdown       = mv_u3d_shutdown,
2062         .driver         = {
2063                 .name   = "mv-u3d",
2064                 .pm     = &mv_u3d_pm_ops,
2065         },
2066 };
2067
2068 module_platform_driver(mv_u3d_driver);
2069 MODULE_ALIAS("platform:mv-u3d");
2070 MODULE_DESCRIPTION(DRIVER_DESC);
2071 MODULE_AUTHOR("Yu Xu <yuxu@marvell.com>");
2072 MODULE_LICENSE("GPL");