2 * Copyright (c) 2010 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 /* identify firmware images */
20 #define FIRMWARE_AR7010 "ar7010.fw"
21 #define FIRMWARE_AR7010_1_1 "ar7010_1_1.fw"
22 #define FIRMWARE_AR9271 "ar9271.fw"
24 MODULE_FIRMWARE(FIRMWARE_AR7010
);
25 MODULE_FIRMWARE(FIRMWARE_AR7010_1_1
);
26 MODULE_FIRMWARE(FIRMWARE_AR9271
);
28 static struct usb_device_id ath9k_hif_usb_ids
[] = {
29 { USB_DEVICE(0x0cf3, 0x9271) }, /* Atheros */
30 { USB_DEVICE(0x0cf3, 0x1006) }, /* Atheros */
31 { USB_DEVICE(0x0cf3, 0x7010) }, /* Atheros */
32 { USB_DEVICE(0x0cf3, 0x7015) }, /* Atheros */
33 { USB_DEVICE(0x0846, 0x9030) }, /* Netgear N150 */
34 { USB_DEVICE(0x0846, 0x9018) }, /* Netgear WNDA3200 */
35 { USB_DEVICE(0x07D1, 0x3A10) }, /* Dlink Wireless 150 */
36 { USB_DEVICE(0x13D3, 0x3327) }, /* Azurewave */
37 { USB_DEVICE(0x13D3, 0x3328) }, /* Azurewave */
38 { USB_DEVICE(0x13D3, 0x3346) }, /* IMC Networks */
39 { USB_DEVICE(0x04CA, 0x4605) }, /* Liteon */
40 { USB_DEVICE(0x083A, 0xA704) }, /* SMC Networks */
44 MODULE_DEVICE_TABLE(usb
, ath9k_hif_usb_ids
);
46 static int __hif_usb_tx(struct hif_device_usb
*hif_dev
);
48 static void hif_usb_regout_cb(struct urb
*urb
)
50 struct cmd_buf
*cmd
= (struct cmd_buf
*)urb
->context
;
52 switch (urb
->status
) {
65 ath9k_htc_txcompletion_cb(cmd
->hif_dev
->htc_handle
,
76 static int hif_usb_send_regout(struct hif_device_usb
*hif_dev
,
83 urb
= usb_alloc_urb(0, GFP_KERNEL
);
87 cmd
= kzalloc(sizeof(*cmd
), GFP_KERNEL
);
94 cmd
->hif_dev
= hif_dev
;
96 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
97 usb_sndbulkpipe(hif_dev
->udev
, USB_REG_OUT_PIPE
),
99 hif_usb_regout_cb
, cmd
);
101 usb_anchor_urb(urb
, &hif_dev
->regout_submitted
);
102 ret
= usb_submit_urb(urb
, GFP_KERNEL
);
104 usb_unanchor_urb(urb
);
112 static inline void ath9k_skb_queue_purge(struct hif_device_usb
*hif_dev
,
113 struct sk_buff_head
*list
)
117 while ((skb
= __skb_dequeue(list
)) != NULL
) {
118 dev_kfree_skb_any(skb
);
119 TX_STAT_INC(skb_dropped
);
123 static void hif_usb_tx_cb(struct urb
*urb
)
125 struct tx_buf
*tx_buf
= (struct tx_buf
*) urb
->context
;
126 struct hif_device_usb
*hif_dev
;
129 if (!tx_buf
|| !tx_buf
->hif_dev
)
132 hif_dev
= tx_buf
->hif_dev
;
134 switch (urb
->status
) {
142 * The URB has been killed, free the SKBs
145 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
151 /* Check if TX has been stopped */
152 spin_lock(&hif_dev
->tx
.tx_lock
);
153 if (hif_dev
->tx
.flags
& HIF_USB_TX_STOP
) {
154 spin_unlock(&hif_dev
->tx
.tx_lock
);
155 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
158 spin_unlock(&hif_dev
->tx
.tx_lock
);
160 /* Complete the queued SKBs. */
161 while ((skb
= __skb_dequeue(&tx_buf
->skb_queue
)) != NULL
) {
162 ath9k_htc_txcompletion_cb(hif_dev
->htc_handle
,
164 TX_STAT_INC(skb_completed
);
168 /* Re-initialize the SKB queue */
169 tx_buf
->len
= tx_buf
->offset
= 0;
170 __skb_queue_head_init(&tx_buf
->skb_queue
);
172 /* Add this TX buffer to the free list */
173 spin_lock(&hif_dev
->tx
.tx_lock
);
174 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
175 hif_dev
->tx
.tx_buf_cnt
++;
176 if (!(hif_dev
->tx
.flags
& HIF_USB_TX_STOP
))
177 __hif_usb_tx(hif_dev
); /* Check for pending SKBs */
178 TX_STAT_INC(buf_completed
);
179 spin_unlock(&hif_dev
->tx
.tx_lock
);
182 /* TX lock has to be taken */
183 static int __hif_usb_tx(struct hif_device_usb
*hif_dev
)
185 struct tx_buf
*tx_buf
= NULL
;
186 struct sk_buff
*nskb
= NULL
;
188 u16
*hdr
, tx_skb_cnt
= 0;
191 if (hif_dev
->tx
.tx_skb_cnt
== 0)
194 /* Check if a free TX buffer is available */
195 if (list_empty(&hif_dev
->tx
.tx_buf
))
198 tx_buf
= list_first_entry(&hif_dev
->tx
.tx_buf
, struct tx_buf
, list
);
199 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_pending
);
200 hif_dev
->tx
.tx_buf_cnt
--;
202 tx_skb_cnt
= min_t(u16
, hif_dev
->tx
.tx_skb_cnt
, MAX_TX_AGGR_NUM
);
204 for (i
= 0; i
< tx_skb_cnt
; i
++) {
205 nskb
= __skb_dequeue(&hif_dev
->tx
.tx_skb_queue
);
207 /* Should never be NULL */
210 hif_dev
->tx
.tx_skb_cnt
--;
213 buf
+= tx_buf
->offset
;
216 *hdr
++ = ATH_USB_TX_STREAM_MODE_TAG
;
218 memcpy(buf
, nskb
->data
, nskb
->len
);
219 tx_buf
->len
= nskb
->len
+ 4;
221 if (i
< (tx_skb_cnt
- 1))
222 tx_buf
->offset
+= (((tx_buf
->len
- 1) / 4) + 1) * 4;
224 if (i
== (tx_skb_cnt
- 1))
225 tx_buf
->len
+= tx_buf
->offset
;
227 __skb_queue_tail(&tx_buf
->skb_queue
, nskb
);
228 TX_STAT_INC(skb_queued
);
231 usb_fill_bulk_urb(tx_buf
->urb
, hif_dev
->udev
,
232 usb_sndbulkpipe(hif_dev
->udev
, USB_WLAN_TX_PIPE
),
233 tx_buf
->buf
, tx_buf
->len
,
234 hif_usb_tx_cb
, tx_buf
);
236 ret
= usb_submit_urb(tx_buf
->urb
, GFP_ATOMIC
);
238 tx_buf
->len
= tx_buf
->offset
= 0;
239 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
240 __skb_queue_head_init(&tx_buf
->skb_queue
);
241 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
242 hif_dev
->tx
.tx_buf_cnt
++;
246 TX_STAT_INC(buf_queued
);
251 static int hif_usb_send_tx(struct hif_device_usb
*hif_dev
, struct sk_buff
*skb
,
252 struct ath9k_htc_tx_ctl
*tx_ctl
)
256 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
258 if (hif_dev
->tx
.flags
& HIF_USB_TX_STOP
) {
259 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
263 /* Check if the max queue count has been reached */
264 if (hif_dev
->tx
.tx_skb_cnt
> MAX_TX_BUF_NUM
) {
265 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
269 __skb_queue_tail(&hif_dev
->tx
.tx_skb_queue
, skb
);
270 hif_dev
->tx
.tx_skb_cnt
++;
272 /* Send normal frames immediately */
273 if (!tx_ctl
|| (tx_ctl
&& (tx_ctl
->type
== ATH9K_HTC_NORMAL
)))
274 __hif_usb_tx(hif_dev
);
276 /* Check if AMPDUs have to be sent immediately */
277 if (tx_ctl
&& (tx_ctl
->type
== ATH9K_HTC_AMPDU
) &&
278 (hif_dev
->tx
.tx_buf_cnt
== MAX_TX_URB_NUM
) &&
279 (hif_dev
->tx
.tx_skb_cnt
< 2)) {
280 __hif_usb_tx(hif_dev
);
283 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
288 static void hif_usb_start(void *hif_handle
, u8 pipe_id
)
290 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
293 hif_dev
->flags
|= HIF_USB_START
;
295 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
296 hif_dev
->tx
.flags
&= ~HIF_USB_TX_STOP
;
297 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
300 static void hif_usb_stop(void *hif_handle
, u8 pipe_id
)
302 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
305 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
306 ath9k_skb_queue_purge(hif_dev
, &hif_dev
->tx
.tx_skb_queue
);
307 hif_dev
->tx
.tx_skb_cnt
= 0;
308 hif_dev
->tx
.flags
|= HIF_USB_TX_STOP
;
309 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
312 static int hif_usb_send(void *hif_handle
, u8 pipe_id
, struct sk_buff
*skb
,
313 struct ath9k_htc_tx_ctl
*tx_ctl
)
315 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
319 case USB_WLAN_TX_PIPE
:
320 ret
= hif_usb_send_tx(hif_dev
, skb
, tx_ctl
);
322 case USB_REG_OUT_PIPE
:
323 ret
= hif_usb_send_regout(hif_dev
, skb
);
326 dev_err(&hif_dev
->udev
->dev
,
327 "ath9k_htc: Invalid TX pipe: %d\n", pipe_id
);
335 static struct ath9k_htc_hif hif_usb
= {
336 .transport
= ATH9K_HIF_USB
,
337 .name
= "ath9k_hif_usb",
339 .control_ul_pipe
= USB_REG_OUT_PIPE
,
340 .control_dl_pipe
= USB_REG_IN_PIPE
,
342 .start
= hif_usb_start
,
343 .stop
= hif_usb_stop
,
344 .send
= hif_usb_send
,
347 static void ath9k_hif_usb_rx_stream(struct hif_device_usb
*hif_dev
,
350 struct sk_buff
*nskb
, *skb_pool
[MAX_PKT_NUM_IN_TRANSFER
];
351 int index
= 0, i
= 0, chk_idx
, len
= skb
->len
;
352 int rx_remain_len
= 0, rx_pkt_len
= 0;
353 u16 pkt_len
, pkt_tag
, pool_index
= 0;
356 spin_lock(&hif_dev
->rx_lock
);
358 rx_remain_len
= hif_dev
->rx_remain_len
;
359 rx_pkt_len
= hif_dev
->rx_transfer_len
;
361 if (rx_remain_len
!= 0) {
362 struct sk_buff
*remain_skb
= hif_dev
->remain_skb
;
365 ptr
= (u8
*) remain_skb
->data
;
367 index
= rx_remain_len
;
368 rx_remain_len
-= hif_dev
->rx_pad_len
;
371 memcpy(ptr
, skb
->data
, rx_remain_len
);
373 rx_pkt_len
+= rx_remain_len
;
374 hif_dev
->rx_remain_len
= 0;
375 skb_put(remain_skb
, rx_pkt_len
);
377 skb_pool
[pool_index
++] = remain_skb
;
380 index
= rx_remain_len
;
384 spin_unlock(&hif_dev
->rx_lock
);
386 while (index
< len
) {
387 ptr
= (u8
*) skb
->data
;
389 pkt_len
= ptr
[index
] + (ptr
[index
+1] << 8);
390 pkt_tag
= ptr
[index
+2] + (ptr
[index
+3] << 8);
392 if (pkt_tag
== ATH_USB_RX_STREAM_MODE_TAG
) {
395 pad_len
= 4 - (pkt_len
& 0x3);
400 index
= index
+ 4 + pkt_len
+ pad_len
;
402 if (index
> MAX_RX_BUF_SIZE
) {
403 spin_lock(&hif_dev
->rx_lock
);
404 hif_dev
->rx_remain_len
= index
- MAX_RX_BUF_SIZE
;
405 hif_dev
->rx_transfer_len
=
406 MAX_RX_BUF_SIZE
- chk_idx
- 4;
407 hif_dev
->rx_pad_len
= pad_len
;
409 nskb
= __dev_alloc_skb(pkt_len
+ 32,
412 dev_err(&hif_dev
->udev
->dev
,
413 "ath9k_htc: RX memory allocation"
415 spin_unlock(&hif_dev
->rx_lock
);
418 skb_reserve(nskb
, 32);
419 RX_STAT_INC(skb_allocated
);
421 memcpy(nskb
->data
, &(skb
->data
[chk_idx
+4]),
422 hif_dev
->rx_transfer_len
);
424 /* Record the buffer pointer */
425 hif_dev
->remain_skb
= nskb
;
426 spin_unlock(&hif_dev
->rx_lock
);
428 nskb
= __dev_alloc_skb(pkt_len
+ 32, GFP_ATOMIC
);
430 dev_err(&hif_dev
->udev
->dev
,
431 "ath9k_htc: RX memory allocation"
435 skb_reserve(nskb
, 32);
436 RX_STAT_INC(skb_allocated
);
438 memcpy(nskb
->data
, &(skb
->data
[chk_idx
+4]), pkt_len
);
439 skb_put(nskb
, pkt_len
);
440 skb_pool
[pool_index
++] = nskb
;
443 RX_STAT_INC(skb_dropped
);
449 for (i
= 0; i
< pool_index
; i
++) {
450 ath9k_htc_rx_msg(hif_dev
->htc_handle
, skb_pool
[i
],
451 skb_pool
[i
]->len
, USB_WLAN_RX_PIPE
);
452 RX_STAT_INC(skb_completed
);
456 static void ath9k_hif_usb_rx_cb(struct urb
*urb
)
458 struct sk_buff
*skb
= (struct sk_buff
*) urb
->context
;
459 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)
460 usb_get_intfdata(usb_ifnum_to_if(urb
->dev
, 0));
469 switch (urb
->status
) {
481 if (likely(urb
->actual_length
!= 0)) {
482 skb_put(skb
, urb
->actual_length
);
483 ath9k_hif_usb_rx_stream(hif_dev
, skb
);
487 skb_reset_tail_pointer(skb
);
490 usb_anchor_urb(urb
, &hif_dev
->rx_submitted
);
491 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
493 usb_unanchor_urb(urb
);
502 static void ath9k_hif_usb_reg_in_cb(struct urb
*urb
)
504 struct sk_buff
*skb
= (struct sk_buff
*) urb
->context
;
505 struct sk_buff
*nskb
;
506 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)
507 usb_get_intfdata(usb_ifnum_to_if(urb
->dev
, 0));
516 switch (urb
->status
) {
528 if (likely(urb
->actual_length
!= 0)) {
529 skb_put(skb
, urb
->actual_length
);
531 /* Process the command first */
532 ath9k_htc_rx_msg(hif_dev
->htc_handle
, skb
,
533 skb
->len
, USB_REG_IN_PIPE
);
536 nskb
= alloc_skb(MAX_REG_IN_BUF_SIZE
, GFP_ATOMIC
);
538 dev_err(&hif_dev
->udev
->dev
,
539 "ath9k_htc: REG_IN memory allocation failure\n");
544 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
545 usb_rcvbulkpipe(hif_dev
->udev
,
547 nskb
->data
, MAX_REG_IN_BUF_SIZE
,
548 ath9k_hif_usb_reg_in_cb
, nskb
);
550 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
560 skb_reset_tail_pointer(skb
);
563 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
573 static void ath9k_hif_usb_dealloc_tx_urbs(struct hif_device_usb
*hif_dev
)
575 struct tx_buf
*tx_buf
= NULL
, *tx_buf_tmp
= NULL
;
577 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
578 &hif_dev
->tx
.tx_buf
, list
) {
579 usb_kill_urb(tx_buf
->urb
);
580 list_del(&tx_buf
->list
);
581 usb_free_urb(tx_buf
->urb
);
586 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
587 &hif_dev
->tx
.tx_pending
, list
) {
588 usb_kill_urb(tx_buf
->urb
);
589 list_del(&tx_buf
->list
);
590 usb_free_urb(tx_buf
->urb
);
596 static int ath9k_hif_usb_alloc_tx_urbs(struct hif_device_usb
*hif_dev
)
598 struct tx_buf
*tx_buf
;
601 INIT_LIST_HEAD(&hif_dev
->tx
.tx_buf
);
602 INIT_LIST_HEAD(&hif_dev
->tx
.tx_pending
);
603 spin_lock_init(&hif_dev
->tx
.tx_lock
);
604 __skb_queue_head_init(&hif_dev
->tx
.tx_skb_queue
);
606 for (i
= 0; i
< MAX_TX_URB_NUM
; i
++) {
607 tx_buf
= kzalloc(sizeof(struct tx_buf
), GFP_KERNEL
);
611 tx_buf
->buf
= kzalloc(MAX_TX_BUF_SIZE
, GFP_KERNEL
);
615 tx_buf
->urb
= usb_alloc_urb(0, GFP_KERNEL
);
619 tx_buf
->hif_dev
= hif_dev
;
620 __skb_queue_head_init(&tx_buf
->skb_queue
);
622 list_add_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
625 hif_dev
->tx
.tx_buf_cnt
= MAX_TX_URB_NUM
;
633 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
637 static void ath9k_hif_usb_dealloc_rx_urbs(struct hif_device_usb
*hif_dev
)
639 usb_kill_anchored_urbs(&hif_dev
->rx_submitted
);
642 static int ath9k_hif_usb_alloc_rx_urbs(struct hif_device_usb
*hif_dev
)
644 struct urb
*urb
= NULL
;
645 struct sk_buff
*skb
= NULL
;
648 init_usb_anchor(&hif_dev
->rx_submitted
);
649 spin_lock_init(&hif_dev
->rx_lock
);
651 for (i
= 0; i
< MAX_RX_URB_NUM
; i
++) {
654 urb
= usb_alloc_urb(0, GFP_KERNEL
);
660 /* Allocate buffer */
661 skb
= alloc_skb(MAX_RX_BUF_SIZE
, GFP_KERNEL
);
667 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
668 usb_rcvbulkpipe(hif_dev
->udev
,
670 skb
->data
, MAX_RX_BUF_SIZE
,
671 ath9k_hif_usb_rx_cb
, skb
);
674 usb_anchor_urb(urb
, &hif_dev
->rx_submitted
);
677 ret
= usb_submit_urb(urb
, GFP_KERNEL
);
679 usb_unanchor_urb(urb
);
684 * Drop reference count.
685 * This ensures that the URB is freed when killing them.
697 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
701 static void ath9k_hif_usb_dealloc_reg_in_urb(struct hif_device_usb
*hif_dev
)
703 if (hif_dev
->reg_in_urb
) {
704 usb_kill_urb(hif_dev
->reg_in_urb
);
705 if (hif_dev
->reg_in_urb
->context
)
706 kfree_skb((void *)hif_dev
->reg_in_urb
->context
);
707 usb_free_urb(hif_dev
->reg_in_urb
);
708 hif_dev
->reg_in_urb
= NULL
;
712 static int ath9k_hif_usb_alloc_reg_in_urb(struct hif_device_usb
*hif_dev
)
716 hif_dev
->reg_in_urb
= usb_alloc_urb(0, GFP_KERNEL
);
717 if (hif_dev
->reg_in_urb
== NULL
)
720 skb
= alloc_skb(MAX_REG_IN_BUF_SIZE
, GFP_KERNEL
);
724 usb_fill_bulk_urb(hif_dev
->reg_in_urb
, hif_dev
->udev
,
725 usb_rcvbulkpipe(hif_dev
->udev
,
727 skb
->data
, MAX_REG_IN_BUF_SIZE
,
728 ath9k_hif_usb_reg_in_cb
, skb
);
730 if (usb_submit_urb(hif_dev
->reg_in_urb
, GFP_KERNEL
) != 0)
736 ath9k_hif_usb_dealloc_reg_in_urb(hif_dev
);
740 static int ath9k_hif_usb_alloc_urbs(struct hif_device_usb
*hif_dev
)
743 init_usb_anchor(&hif_dev
->regout_submitted
);
746 if (ath9k_hif_usb_alloc_tx_urbs(hif_dev
) < 0)
750 if (ath9k_hif_usb_alloc_rx_urbs(hif_dev
) < 0)
754 if (ath9k_hif_usb_alloc_reg_in_urb(hif_dev
) < 0)
759 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
761 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
766 static void ath9k_hif_usb_dealloc_urbs(struct hif_device_usb
*hif_dev
)
768 usb_kill_anchored_urbs(&hif_dev
->regout_submitted
);
769 ath9k_hif_usb_dealloc_reg_in_urb(hif_dev
);
770 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
771 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
774 static int ath9k_hif_usb_download_fw(struct hif_device_usb
*hif_dev
)
777 const void *data
= hif_dev
->firmware
->data
;
778 size_t len
= hif_dev
->firmware
->size
;
779 u32 addr
= AR9271_FIRMWARE
;
780 u8
*buf
= kzalloc(4096, GFP_KERNEL
);
787 transfer
= min_t(int, len
, 4096);
788 memcpy(buf
, data
, transfer
);
790 err
= usb_control_msg(hif_dev
->udev
,
791 usb_sndctrlpipe(hif_dev
->udev
, 0),
792 FIRMWARE_DOWNLOAD
, 0x40 | USB_DIR_OUT
,
793 addr
>> 8, 0, buf
, transfer
, HZ
);
805 switch (hif_dev
->device_id
) {
809 firm_offset
= AR7010_FIRMWARE_TEXT
;
812 firm_offset
= AR9271_FIRMWARE_TEXT
;
817 * Issue FW download complete command to firmware.
819 err
= usb_control_msg(hif_dev
->udev
, usb_sndctrlpipe(hif_dev
->udev
, 0),
820 FIRMWARE_DOWNLOAD_COMP
,
822 firm_offset
>> 8, 0, NULL
, 0, HZ
);
826 dev_info(&hif_dev
->udev
->dev
, "ath9k_htc: Transferred FW: %s, size: %ld\n",
827 hif_dev
->fw_name
, (unsigned long) hif_dev
->firmware
->size
);
832 static int ath9k_hif_usb_dev_init(struct hif_device_usb
*hif_dev
)
835 struct usb_host_interface
*alt
= &hif_dev
->interface
->altsetting
[0];
836 struct usb_endpoint_descriptor
*endp
;
838 /* Request firmware */
839 ret
= request_firmware(&hif_dev
->firmware
, hif_dev
->fw_name
,
840 &hif_dev
->udev
->dev
);
842 dev_err(&hif_dev
->udev
->dev
,
843 "ath9k_htc: Firmware - %s not found\n", hif_dev
->fw_name
);
847 /* Download firmware */
848 ret
= ath9k_hif_usb_download_fw(hif_dev
);
850 dev_err(&hif_dev
->udev
->dev
,
851 "ath9k_htc: Firmware - %s download failed\n",
853 goto err_fw_download
;
856 /* On downloading the firmware to the target, the USB descriptor of EP4
857 * is 'patched' to change the type of the endpoint to Bulk. This will
858 * bring down CPU usage during the scan period.
860 for (idx
= 0; idx
< alt
->desc
.bNumEndpoints
; idx
++) {
861 endp
= &alt
->endpoint
[idx
].desc
;
862 if ((endp
->bmAttributes
& USB_ENDPOINT_XFERTYPE_MASK
)
863 == USB_ENDPOINT_XFER_INT
) {
864 endp
->bmAttributes
&= ~USB_ENDPOINT_XFERTYPE_MASK
;
865 endp
->bmAttributes
|= USB_ENDPOINT_XFER_BULK
;
871 ret
= ath9k_hif_usb_alloc_urbs(hif_dev
);
873 dev_err(&hif_dev
->udev
->dev
,
874 "ath9k_htc: Unable to allocate URBs\n");
881 ath9k_hif_usb_dealloc_urbs(hif_dev
);
883 release_firmware(hif_dev
->firmware
);
885 hif_dev
->firmware
= NULL
;
889 static void ath9k_hif_usb_dev_deinit(struct hif_device_usb
*hif_dev
)
891 ath9k_hif_usb_dealloc_urbs(hif_dev
);
892 if (hif_dev
->firmware
)
893 release_firmware(hif_dev
->firmware
);
896 static int ath9k_hif_usb_probe(struct usb_interface
*interface
,
897 const struct usb_device_id
*id
)
899 struct usb_device
*udev
= interface_to_usbdev(interface
);
900 struct hif_device_usb
*hif_dev
;
903 hif_dev
= kzalloc(sizeof(struct hif_device_usb
), GFP_KERNEL
);
910 hif_dev
->udev
= udev
;
911 hif_dev
->interface
= interface
;
912 hif_dev
->device_id
= id
->idProduct
;
914 udev
->reset_resume
= 1;
916 usb_set_intfdata(interface
, hif_dev
);
918 hif_dev
->htc_handle
= ath9k_htc_hw_alloc(hif_dev
, &hif_usb
,
919 &hif_dev
->udev
->dev
);
920 if (hif_dev
->htc_handle
== NULL
) {
922 goto err_htc_hw_alloc
;
925 /* Find out which firmware to load */
927 switch(hif_dev
->device_id
) {
931 if (le16_to_cpu(udev
->descriptor
.bcdDevice
) == 0x0202)
932 hif_dev
->fw_name
= FIRMWARE_AR7010_1_1
;
934 hif_dev
->fw_name
= FIRMWARE_AR7010
;
937 hif_dev
->fw_name
= FIRMWARE_AR9271
;
941 ret
= ath9k_hif_usb_dev_init(hif_dev
);
944 goto err_hif_init_usb
;
947 ret
= ath9k_htc_hw_init(hif_dev
->htc_handle
,
948 &hif_dev
->udev
->dev
, hif_dev
->device_id
,
949 hif_dev
->udev
->product
);
952 goto err_htc_hw_init
;
955 dev_info(&hif_dev
->udev
->dev
, "ath9k_htc: USB layer initialized\n");
960 ath9k_hif_usb_dev_deinit(hif_dev
);
962 ath9k_htc_hw_free(hif_dev
->htc_handle
);
964 usb_set_intfdata(interface
, NULL
);
971 static void ath9k_hif_usb_reboot(struct usb_device
*udev
)
973 u32 reboot_cmd
= 0xffffffff;
977 buf
= kmemdup(&reboot_cmd
, 4, GFP_KERNEL
);
981 ret
= usb_bulk_msg(udev
, usb_sndbulkpipe(udev
, USB_REG_OUT_PIPE
),
984 dev_err(&udev
->dev
, "ath9k_htc: USB reboot failed\n");
989 static void ath9k_hif_usb_disconnect(struct usb_interface
*interface
)
991 struct usb_device
*udev
= interface_to_usbdev(interface
);
992 struct hif_device_usb
*hif_dev
=
993 (struct hif_device_usb
*) usb_get_intfdata(interface
);
996 ath9k_htc_hw_deinit(hif_dev
->htc_handle
,
997 (udev
->state
== USB_STATE_NOTATTACHED
) ? true : false);
998 ath9k_htc_hw_free(hif_dev
->htc_handle
);
999 ath9k_hif_usb_dev_deinit(hif_dev
);
1000 usb_set_intfdata(interface
, NULL
);
1003 if (hif_dev
->flags
& HIF_USB_START
)
1004 ath9k_hif_usb_reboot(udev
);
1007 dev_info(&udev
->dev
, "ath9k_htc: USB layer deinitialized\n");
1012 static int ath9k_hif_usb_suspend(struct usb_interface
*interface
,
1013 pm_message_t message
)
1015 struct hif_device_usb
*hif_dev
=
1016 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1018 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1023 static int ath9k_hif_usb_resume(struct usb_interface
*interface
)
1025 struct hif_device_usb
*hif_dev
=
1026 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1029 ret
= ath9k_hif_usb_alloc_urbs(hif_dev
);
1033 if (hif_dev
->firmware
) {
1034 ret
= ath9k_hif_usb_download_fw(hif_dev
);
1038 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1044 ret
= ath9k_htc_resume(hif_dev
->htc_handle
);
1052 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1058 static struct usb_driver ath9k_hif_usb_driver
= {
1059 .name
= "ath9k_hif_usb",
1060 .probe
= ath9k_hif_usb_probe
,
1061 .disconnect
= ath9k_hif_usb_disconnect
,
1063 .suspend
= ath9k_hif_usb_suspend
,
1064 .resume
= ath9k_hif_usb_resume
,
1065 .reset_resume
= ath9k_hif_usb_resume
,
1067 .id_table
= ath9k_hif_usb_ids
,
1071 int ath9k_hif_usb_init(void)
1073 return usb_register(&ath9k_hif_usb_driver
);
1076 void ath9k_hif_usb_exit(void)
1078 usb_deregister(&ath9k_hif_usb_driver
);