2 * Copyright (c) 2010 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 /* identify firmware images */
20 #define FIRMWARE_AR7010 "ar7010.fw"
21 #define FIRMWARE_AR7010_1_1 "ar7010_1_1.fw"
22 #define FIRMWARE_AR9271 "ar9271.fw"
24 MODULE_FIRMWARE(FIRMWARE_AR7010
);
25 MODULE_FIRMWARE(FIRMWARE_AR7010_1_1
);
26 MODULE_FIRMWARE(FIRMWARE_AR9271
);
28 static struct usb_device_id ath9k_hif_usb_ids
[] = {
29 { USB_DEVICE(0x0cf3, 0x9271) }, /* Atheros */
30 { USB_DEVICE(0x0cf3, 0x1006) }, /* Atheros */
31 { USB_DEVICE(0x0cf3, 0x7010) }, /* Atheros */
32 { USB_DEVICE(0x0cf3, 0x7015) }, /* Atheros */
33 { USB_DEVICE(0x0846, 0x9030) }, /* Netgear N150 */
34 { USB_DEVICE(0x0846, 0x9018) }, /* Netgear WNDA3200 */
35 { USB_DEVICE(0x07D1, 0x3A10) }, /* Dlink Wireless 150 */
36 { USB_DEVICE(0x13D3, 0x3327) }, /* Azurewave */
37 { USB_DEVICE(0x13D3, 0x3328) }, /* Azurewave */
38 { USB_DEVICE(0x13D3, 0x3346) }, /* IMC Networks */
39 { USB_DEVICE(0x13D3, 0x3348) }, /* Azurewave */
40 { USB_DEVICE(0x13D3, 0x3349) }, /* Azurewave */
41 { USB_DEVICE(0x13D3, 0x3350) }, /* Azurewave */
42 { USB_DEVICE(0x04CA, 0x4605) }, /* Liteon */
43 { USB_DEVICE(0x083A, 0xA704) }, /* SMC Networks */
44 { USB_DEVICE(0x040D, 0x3801) }, /* VIA */
45 { USB_DEVICE(0x1668, 0x1200) }, /* Verizon */
49 MODULE_DEVICE_TABLE(usb
, ath9k_hif_usb_ids
);
51 static int __hif_usb_tx(struct hif_device_usb
*hif_dev
);
53 static void hif_usb_regout_cb(struct urb
*urb
)
55 struct cmd_buf
*cmd
= (struct cmd_buf
*)urb
->context
;
57 switch (urb
->status
) {
70 ath9k_htc_txcompletion_cb(cmd
->hif_dev
->htc_handle
,
81 static int hif_usb_send_regout(struct hif_device_usb
*hif_dev
,
88 urb
= usb_alloc_urb(0, GFP_KERNEL
);
92 cmd
= kzalloc(sizeof(*cmd
), GFP_KERNEL
);
99 cmd
->hif_dev
= hif_dev
;
101 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
102 usb_sndbulkpipe(hif_dev
->udev
, USB_REG_OUT_PIPE
),
104 hif_usb_regout_cb
, cmd
);
106 usb_anchor_urb(urb
, &hif_dev
->regout_submitted
);
107 ret
= usb_submit_urb(urb
, GFP_KERNEL
);
109 usb_unanchor_urb(urb
);
117 static inline void ath9k_skb_queue_purge(struct hif_device_usb
*hif_dev
,
118 struct sk_buff_head
*list
)
122 while ((skb
= __skb_dequeue(list
)) != NULL
) {
123 dev_kfree_skb_any(skb
);
124 TX_STAT_INC(skb_dropped
);
128 static void hif_usb_tx_cb(struct urb
*urb
)
130 struct tx_buf
*tx_buf
= (struct tx_buf
*) urb
->context
;
131 struct hif_device_usb
*hif_dev
;
134 if (!tx_buf
|| !tx_buf
->hif_dev
)
137 hif_dev
= tx_buf
->hif_dev
;
139 switch (urb
->status
) {
147 * The URB has been killed, free the SKBs
150 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
156 /* Check if TX has been stopped */
157 spin_lock(&hif_dev
->tx
.tx_lock
);
158 if (hif_dev
->tx
.flags
& HIF_USB_TX_STOP
) {
159 spin_unlock(&hif_dev
->tx
.tx_lock
);
160 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
163 spin_unlock(&hif_dev
->tx
.tx_lock
);
165 /* Complete the queued SKBs. */
166 while ((skb
= __skb_dequeue(&tx_buf
->skb_queue
)) != NULL
) {
167 ath9k_htc_txcompletion_cb(hif_dev
->htc_handle
,
169 TX_STAT_INC(skb_completed
);
173 /* Re-initialize the SKB queue */
174 tx_buf
->len
= tx_buf
->offset
= 0;
175 __skb_queue_head_init(&tx_buf
->skb_queue
);
177 /* Add this TX buffer to the free list */
178 spin_lock(&hif_dev
->tx
.tx_lock
);
179 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
180 hif_dev
->tx
.tx_buf_cnt
++;
181 if (!(hif_dev
->tx
.flags
& HIF_USB_TX_STOP
))
182 __hif_usb_tx(hif_dev
); /* Check for pending SKBs */
183 TX_STAT_INC(buf_completed
);
184 spin_unlock(&hif_dev
->tx
.tx_lock
);
187 /* TX lock has to be taken */
188 static int __hif_usb_tx(struct hif_device_usb
*hif_dev
)
190 struct tx_buf
*tx_buf
= NULL
;
191 struct sk_buff
*nskb
= NULL
;
193 u16
*hdr
, tx_skb_cnt
= 0;
196 if (hif_dev
->tx
.tx_skb_cnt
== 0)
199 /* Check if a free TX buffer is available */
200 if (list_empty(&hif_dev
->tx
.tx_buf
))
203 tx_buf
= list_first_entry(&hif_dev
->tx
.tx_buf
, struct tx_buf
, list
);
204 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_pending
);
205 hif_dev
->tx
.tx_buf_cnt
--;
207 tx_skb_cnt
= min_t(u16
, hif_dev
->tx
.tx_skb_cnt
, MAX_TX_AGGR_NUM
);
209 for (i
= 0; i
< tx_skb_cnt
; i
++) {
210 nskb
= __skb_dequeue(&hif_dev
->tx
.tx_skb_queue
);
212 /* Should never be NULL */
215 hif_dev
->tx
.tx_skb_cnt
--;
218 buf
+= tx_buf
->offset
;
221 *hdr
++ = ATH_USB_TX_STREAM_MODE_TAG
;
223 memcpy(buf
, nskb
->data
, nskb
->len
);
224 tx_buf
->len
= nskb
->len
+ 4;
226 if (i
< (tx_skb_cnt
- 1))
227 tx_buf
->offset
+= (((tx_buf
->len
- 1) / 4) + 1) * 4;
229 if (i
== (tx_skb_cnt
- 1))
230 tx_buf
->len
+= tx_buf
->offset
;
232 __skb_queue_tail(&tx_buf
->skb_queue
, nskb
);
233 TX_STAT_INC(skb_queued
);
236 usb_fill_bulk_urb(tx_buf
->urb
, hif_dev
->udev
,
237 usb_sndbulkpipe(hif_dev
->udev
, USB_WLAN_TX_PIPE
),
238 tx_buf
->buf
, tx_buf
->len
,
239 hif_usb_tx_cb
, tx_buf
);
241 ret
= usb_submit_urb(tx_buf
->urb
, GFP_ATOMIC
);
243 tx_buf
->len
= tx_buf
->offset
= 0;
244 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
245 __skb_queue_head_init(&tx_buf
->skb_queue
);
246 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
247 hif_dev
->tx
.tx_buf_cnt
++;
251 TX_STAT_INC(buf_queued
);
256 static int hif_usb_send_tx(struct hif_device_usb
*hif_dev
, struct sk_buff
*skb
,
257 struct ath9k_htc_tx_ctl
*tx_ctl
)
261 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
263 if (hif_dev
->tx
.flags
& HIF_USB_TX_STOP
) {
264 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
268 /* Check if the max queue count has been reached */
269 if (hif_dev
->tx
.tx_skb_cnt
> MAX_TX_BUF_NUM
) {
270 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
274 __skb_queue_tail(&hif_dev
->tx
.tx_skb_queue
, skb
);
275 hif_dev
->tx
.tx_skb_cnt
++;
277 /* Send normal frames immediately */
278 if (!tx_ctl
|| (tx_ctl
&& (tx_ctl
->type
== ATH9K_HTC_NORMAL
)))
279 __hif_usb_tx(hif_dev
);
281 /* Check if AMPDUs have to be sent immediately */
282 if (tx_ctl
&& (tx_ctl
->type
== ATH9K_HTC_AMPDU
) &&
283 (hif_dev
->tx
.tx_buf_cnt
== MAX_TX_URB_NUM
) &&
284 (hif_dev
->tx
.tx_skb_cnt
< 2)) {
285 __hif_usb_tx(hif_dev
);
288 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
293 static void hif_usb_start(void *hif_handle
, u8 pipe_id
)
295 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
298 hif_dev
->flags
|= HIF_USB_START
;
300 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
301 hif_dev
->tx
.flags
&= ~HIF_USB_TX_STOP
;
302 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
305 static void hif_usb_stop(void *hif_handle
, u8 pipe_id
)
307 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
310 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
311 ath9k_skb_queue_purge(hif_dev
, &hif_dev
->tx
.tx_skb_queue
);
312 hif_dev
->tx
.tx_skb_cnt
= 0;
313 hif_dev
->tx
.flags
|= HIF_USB_TX_STOP
;
314 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
317 static int hif_usb_send(void *hif_handle
, u8 pipe_id
, struct sk_buff
*skb
,
318 struct ath9k_htc_tx_ctl
*tx_ctl
)
320 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
324 case USB_WLAN_TX_PIPE
:
325 ret
= hif_usb_send_tx(hif_dev
, skb
, tx_ctl
);
327 case USB_REG_OUT_PIPE
:
328 ret
= hif_usb_send_regout(hif_dev
, skb
);
331 dev_err(&hif_dev
->udev
->dev
,
332 "ath9k_htc: Invalid TX pipe: %d\n", pipe_id
);
340 static struct ath9k_htc_hif hif_usb
= {
341 .transport
= ATH9K_HIF_USB
,
342 .name
= "ath9k_hif_usb",
344 .control_ul_pipe
= USB_REG_OUT_PIPE
,
345 .control_dl_pipe
= USB_REG_IN_PIPE
,
347 .start
= hif_usb_start
,
348 .stop
= hif_usb_stop
,
349 .send
= hif_usb_send
,
352 static void ath9k_hif_usb_rx_stream(struct hif_device_usb
*hif_dev
,
355 struct sk_buff
*nskb
, *skb_pool
[MAX_PKT_NUM_IN_TRANSFER
];
356 int index
= 0, i
= 0, chk_idx
, len
= skb
->len
;
357 int rx_remain_len
= 0, rx_pkt_len
= 0;
358 u16 pkt_len
, pkt_tag
, pool_index
= 0;
361 spin_lock(&hif_dev
->rx_lock
);
363 rx_remain_len
= hif_dev
->rx_remain_len
;
364 rx_pkt_len
= hif_dev
->rx_transfer_len
;
366 if (rx_remain_len
!= 0) {
367 struct sk_buff
*remain_skb
= hif_dev
->remain_skb
;
370 ptr
= (u8
*) remain_skb
->data
;
372 index
= rx_remain_len
;
373 rx_remain_len
-= hif_dev
->rx_pad_len
;
376 memcpy(ptr
, skb
->data
, rx_remain_len
);
378 rx_pkt_len
+= rx_remain_len
;
379 hif_dev
->rx_remain_len
= 0;
380 skb_put(remain_skb
, rx_pkt_len
);
382 skb_pool
[pool_index
++] = remain_skb
;
385 index
= rx_remain_len
;
389 spin_unlock(&hif_dev
->rx_lock
);
391 while (index
< len
) {
392 ptr
= (u8
*) skb
->data
;
394 pkt_len
= ptr
[index
] + (ptr
[index
+1] << 8);
395 pkt_tag
= ptr
[index
+2] + (ptr
[index
+3] << 8);
397 if (pkt_tag
== ATH_USB_RX_STREAM_MODE_TAG
) {
400 pad_len
= 4 - (pkt_len
& 0x3);
405 index
= index
+ 4 + pkt_len
+ pad_len
;
407 if (index
> MAX_RX_BUF_SIZE
) {
408 spin_lock(&hif_dev
->rx_lock
);
409 hif_dev
->rx_remain_len
= index
- MAX_RX_BUF_SIZE
;
410 hif_dev
->rx_transfer_len
=
411 MAX_RX_BUF_SIZE
- chk_idx
- 4;
412 hif_dev
->rx_pad_len
= pad_len
;
414 nskb
= __dev_alloc_skb(pkt_len
+ 32,
417 dev_err(&hif_dev
->udev
->dev
,
418 "ath9k_htc: RX memory allocation"
420 spin_unlock(&hif_dev
->rx_lock
);
423 skb_reserve(nskb
, 32);
424 RX_STAT_INC(skb_allocated
);
426 memcpy(nskb
->data
, &(skb
->data
[chk_idx
+4]),
427 hif_dev
->rx_transfer_len
);
429 /* Record the buffer pointer */
430 hif_dev
->remain_skb
= nskb
;
431 spin_unlock(&hif_dev
->rx_lock
);
433 nskb
= __dev_alloc_skb(pkt_len
+ 32, GFP_ATOMIC
);
435 dev_err(&hif_dev
->udev
->dev
,
436 "ath9k_htc: RX memory allocation"
440 skb_reserve(nskb
, 32);
441 RX_STAT_INC(skb_allocated
);
443 memcpy(nskb
->data
, &(skb
->data
[chk_idx
+4]), pkt_len
);
444 skb_put(nskb
, pkt_len
);
445 skb_pool
[pool_index
++] = nskb
;
448 RX_STAT_INC(skb_dropped
);
454 for (i
= 0; i
< pool_index
; i
++) {
455 ath9k_htc_rx_msg(hif_dev
->htc_handle
, skb_pool
[i
],
456 skb_pool
[i
]->len
, USB_WLAN_RX_PIPE
);
457 RX_STAT_INC(skb_completed
);
461 static void ath9k_hif_usb_rx_cb(struct urb
*urb
)
463 struct sk_buff
*skb
= (struct sk_buff
*) urb
->context
;
464 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)
465 usb_get_intfdata(usb_ifnum_to_if(urb
->dev
, 0));
474 switch (urb
->status
) {
486 if (likely(urb
->actual_length
!= 0)) {
487 skb_put(skb
, urb
->actual_length
);
488 ath9k_hif_usb_rx_stream(hif_dev
, skb
);
492 skb_reset_tail_pointer(skb
);
495 usb_anchor_urb(urb
, &hif_dev
->rx_submitted
);
496 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
498 usb_unanchor_urb(urb
);
507 static void ath9k_hif_usb_reg_in_cb(struct urb
*urb
)
509 struct sk_buff
*skb
= (struct sk_buff
*) urb
->context
;
510 struct sk_buff
*nskb
;
511 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)
512 usb_get_intfdata(usb_ifnum_to_if(urb
->dev
, 0));
521 switch (urb
->status
) {
533 if (likely(urb
->actual_length
!= 0)) {
534 skb_put(skb
, urb
->actual_length
);
536 /* Process the command first */
537 ath9k_htc_rx_msg(hif_dev
->htc_handle
, skb
,
538 skb
->len
, USB_REG_IN_PIPE
);
541 nskb
= alloc_skb(MAX_REG_IN_BUF_SIZE
, GFP_ATOMIC
);
543 dev_err(&hif_dev
->udev
->dev
,
544 "ath9k_htc: REG_IN memory allocation failure\n");
549 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
550 usb_rcvbulkpipe(hif_dev
->udev
,
552 nskb
->data
, MAX_REG_IN_BUF_SIZE
,
553 ath9k_hif_usb_reg_in_cb
, nskb
);
555 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
565 skb_reset_tail_pointer(skb
);
568 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
578 static void ath9k_hif_usb_dealloc_tx_urbs(struct hif_device_usb
*hif_dev
)
580 struct tx_buf
*tx_buf
= NULL
, *tx_buf_tmp
= NULL
;
582 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
583 &hif_dev
->tx
.tx_buf
, list
) {
584 usb_kill_urb(tx_buf
->urb
);
585 list_del(&tx_buf
->list
);
586 usb_free_urb(tx_buf
->urb
);
591 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
592 &hif_dev
->tx
.tx_pending
, list
) {
593 usb_kill_urb(tx_buf
->urb
);
594 list_del(&tx_buf
->list
);
595 usb_free_urb(tx_buf
->urb
);
601 static int ath9k_hif_usb_alloc_tx_urbs(struct hif_device_usb
*hif_dev
)
603 struct tx_buf
*tx_buf
;
606 INIT_LIST_HEAD(&hif_dev
->tx
.tx_buf
);
607 INIT_LIST_HEAD(&hif_dev
->tx
.tx_pending
);
608 spin_lock_init(&hif_dev
->tx
.tx_lock
);
609 __skb_queue_head_init(&hif_dev
->tx
.tx_skb_queue
);
611 for (i
= 0; i
< MAX_TX_URB_NUM
; i
++) {
612 tx_buf
= kzalloc(sizeof(struct tx_buf
), GFP_KERNEL
);
616 tx_buf
->buf
= kzalloc(MAX_TX_BUF_SIZE
, GFP_KERNEL
);
620 tx_buf
->urb
= usb_alloc_urb(0, GFP_KERNEL
);
624 tx_buf
->hif_dev
= hif_dev
;
625 __skb_queue_head_init(&tx_buf
->skb_queue
);
627 list_add_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
630 hif_dev
->tx
.tx_buf_cnt
= MAX_TX_URB_NUM
;
638 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
642 static void ath9k_hif_usb_dealloc_rx_urbs(struct hif_device_usb
*hif_dev
)
644 usb_kill_anchored_urbs(&hif_dev
->rx_submitted
);
647 static int ath9k_hif_usb_alloc_rx_urbs(struct hif_device_usb
*hif_dev
)
649 struct urb
*urb
= NULL
;
650 struct sk_buff
*skb
= NULL
;
653 init_usb_anchor(&hif_dev
->rx_submitted
);
654 spin_lock_init(&hif_dev
->rx_lock
);
656 for (i
= 0; i
< MAX_RX_URB_NUM
; i
++) {
659 urb
= usb_alloc_urb(0, GFP_KERNEL
);
665 /* Allocate buffer */
666 skb
= alloc_skb(MAX_RX_BUF_SIZE
, GFP_KERNEL
);
672 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
673 usb_rcvbulkpipe(hif_dev
->udev
,
675 skb
->data
, MAX_RX_BUF_SIZE
,
676 ath9k_hif_usb_rx_cb
, skb
);
679 usb_anchor_urb(urb
, &hif_dev
->rx_submitted
);
682 ret
= usb_submit_urb(urb
, GFP_KERNEL
);
684 usb_unanchor_urb(urb
);
689 * Drop reference count.
690 * This ensures that the URB is freed when killing them.
702 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
706 static void ath9k_hif_usb_dealloc_reg_in_urb(struct hif_device_usb
*hif_dev
)
708 if (hif_dev
->reg_in_urb
) {
709 usb_kill_urb(hif_dev
->reg_in_urb
);
710 if (hif_dev
->reg_in_urb
->context
)
711 kfree_skb((void *)hif_dev
->reg_in_urb
->context
);
712 usb_free_urb(hif_dev
->reg_in_urb
);
713 hif_dev
->reg_in_urb
= NULL
;
717 static int ath9k_hif_usb_alloc_reg_in_urb(struct hif_device_usb
*hif_dev
)
721 hif_dev
->reg_in_urb
= usb_alloc_urb(0, GFP_KERNEL
);
722 if (hif_dev
->reg_in_urb
== NULL
)
725 skb
= alloc_skb(MAX_REG_IN_BUF_SIZE
, GFP_KERNEL
);
729 usb_fill_bulk_urb(hif_dev
->reg_in_urb
, hif_dev
->udev
,
730 usb_rcvbulkpipe(hif_dev
->udev
,
732 skb
->data
, MAX_REG_IN_BUF_SIZE
,
733 ath9k_hif_usb_reg_in_cb
, skb
);
735 if (usb_submit_urb(hif_dev
->reg_in_urb
, GFP_KERNEL
) != 0)
741 ath9k_hif_usb_dealloc_reg_in_urb(hif_dev
);
745 static int ath9k_hif_usb_alloc_urbs(struct hif_device_usb
*hif_dev
)
748 init_usb_anchor(&hif_dev
->regout_submitted
);
751 if (ath9k_hif_usb_alloc_tx_urbs(hif_dev
) < 0)
755 if (ath9k_hif_usb_alloc_rx_urbs(hif_dev
) < 0)
759 if (ath9k_hif_usb_alloc_reg_in_urb(hif_dev
) < 0)
764 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
766 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
771 static void ath9k_hif_usb_dealloc_urbs(struct hif_device_usb
*hif_dev
)
773 usb_kill_anchored_urbs(&hif_dev
->regout_submitted
);
774 ath9k_hif_usb_dealloc_reg_in_urb(hif_dev
);
775 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
776 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
779 static int ath9k_hif_usb_download_fw(struct hif_device_usb
*hif_dev
)
782 const void *data
= hif_dev
->firmware
->data
;
783 size_t len
= hif_dev
->firmware
->size
;
784 u32 addr
= AR9271_FIRMWARE
;
785 u8
*buf
= kzalloc(4096, GFP_KERNEL
);
792 transfer
= min_t(int, len
, 4096);
793 memcpy(buf
, data
, transfer
);
795 err
= usb_control_msg(hif_dev
->udev
,
796 usb_sndctrlpipe(hif_dev
->udev
, 0),
797 FIRMWARE_DOWNLOAD
, 0x40 | USB_DIR_OUT
,
798 addr
>> 8, 0, buf
, transfer
, HZ
);
810 switch (hif_dev
->device_id
) {
816 firm_offset
= AR7010_FIRMWARE_TEXT
;
819 firm_offset
= AR9271_FIRMWARE_TEXT
;
824 * Issue FW download complete command to firmware.
826 err
= usb_control_msg(hif_dev
->udev
, usb_sndctrlpipe(hif_dev
->udev
, 0),
827 FIRMWARE_DOWNLOAD_COMP
,
829 firm_offset
>> 8, 0, NULL
, 0, HZ
);
833 dev_info(&hif_dev
->udev
->dev
, "ath9k_htc: Transferred FW: %s, size: %ld\n",
834 hif_dev
->fw_name
, (unsigned long) hif_dev
->firmware
->size
);
839 static int ath9k_hif_usb_dev_init(struct hif_device_usb
*hif_dev
)
842 struct usb_host_interface
*alt
= &hif_dev
->interface
->altsetting
[0];
843 struct usb_endpoint_descriptor
*endp
;
845 /* Request firmware */
846 ret
= request_firmware(&hif_dev
->firmware
, hif_dev
->fw_name
,
847 &hif_dev
->udev
->dev
);
849 dev_err(&hif_dev
->udev
->dev
,
850 "ath9k_htc: Firmware - %s not found\n", hif_dev
->fw_name
);
854 /* Download firmware */
855 ret
= ath9k_hif_usb_download_fw(hif_dev
);
857 dev_err(&hif_dev
->udev
->dev
,
858 "ath9k_htc: Firmware - %s download failed\n",
860 goto err_fw_download
;
863 /* On downloading the firmware to the target, the USB descriptor of EP4
864 * is 'patched' to change the type of the endpoint to Bulk. This will
865 * bring down CPU usage during the scan period.
867 for (idx
= 0; idx
< alt
->desc
.bNumEndpoints
; idx
++) {
868 endp
= &alt
->endpoint
[idx
].desc
;
869 if ((endp
->bmAttributes
& USB_ENDPOINT_XFERTYPE_MASK
)
870 == USB_ENDPOINT_XFER_INT
) {
871 endp
->bmAttributes
&= ~USB_ENDPOINT_XFERTYPE_MASK
;
872 endp
->bmAttributes
|= USB_ENDPOINT_XFER_BULK
;
878 ret
= ath9k_hif_usb_alloc_urbs(hif_dev
);
880 dev_err(&hif_dev
->udev
->dev
,
881 "ath9k_htc: Unable to allocate URBs\n");
888 ath9k_hif_usb_dealloc_urbs(hif_dev
);
890 release_firmware(hif_dev
->firmware
);
892 hif_dev
->firmware
= NULL
;
896 static void ath9k_hif_usb_dev_deinit(struct hif_device_usb
*hif_dev
)
898 ath9k_hif_usb_dealloc_urbs(hif_dev
);
899 if (hif_dev
->firmware
)
900 release_firmware(hif_dev
->firmware
);
903 static int ath9k_hif_usb_probe(struct usb_interface
*interface
,
904 const struct usb_device_id
*id
)
906 struct usb_device
*udev
= interface_to_usbdev(interface
);
907 struct hif_device_usb
*hif_dev
;
910 hif_dev
= kzalloc(sizeof(struct hif_device_usb
), GFP_KERNEL
);
917 hif_dev
->udev
= udev
;
918 hif_dev
->interface
= interface
;
919 hif_dev
->device_id
= id
->idProduct
;
921 udev
->reset_resume
= 1;
923 usb_set_intfdata(interface
, hif_dev
);
925 hif_dev
->htc_handle
= ath9k_htc_hw_alloc(hif_dev
, &hif_usb
,
926 &hif_dev
->udev
->dev
);
927 if (hif_dev
->htc_handle
== NULL
) {
929 goto err_htc_hw_alloc
;
932 /* Find out which firmware to load */
934 switch(hif_dev
->device_id
) {
940 if (le16_to_cpu(udev
->descriptor
.bcdDevice
) == 0x0202)
941 hif_dev
->fw_name
= FIRMWARE_AR7010_1_1
;
943 hif_dev
->fw_name
= FIRMWARE_AR7010
;
946 hif_dev
->fw_name
= FIRMWARE_AR9271
;
950 ret
= ath9k_hif_usb_dev_init(hif_dev
);
953 goto err_hif_init_usb
;
956 ret
= ath9k_htc_hw_init(hif_dev
->htc_handle
,
957 &hif_dev
->udev
->dev
, hif_dev
->device_id
,
958 hif_dev
->udev
->product
);
961 goto err_htc_hw_init
;
964 dev_info(&hif_dev
->udev
->dev
, "ath9k_htc: USB layer initialized\n");
969 ath9k_hif_usb_dev_deinit(hif_dev
);
971 ath9k_htc_hw_free(hif_dev
->htc_handle
);
973 usb_set_intfdata(interface
, NULL
);
980 static void ath9k_hif_usb_reboot(struct usb_device
*udev
)
982 u32 reboot_cmd
= 0xffffffff;
986 buf
= kmemdup(&reboot_cmd
, 4, GFP_KERNEL
);
990 ret
= usb_bulk_msg(udev
, usb_sndbulkpipe(udev
, USB_REG_OUT_PIPE
),
993 dev_err(&udev
->dev
, "ath9k_htc: USB reboot failed\n");
998 static void ath9k_hif_usb_disconnect(struct usb_interface
*interface
)
1000 struct usb_device
*udev
= interface_to_usbdev(interface
);
1001 struct hif_device_usb
*hif_dev
=
1002 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1005 ath9k_htc_hw_deinit(hif_dev
->htc_handle
,
1006 (udev
->state
== USB_STATE_NOTATTACHED
) ? true : false);
1007 ath9k_htc_hw_free(hif_dev
->htc_handle
);
1008 ath9k_hif_usb_dev_deinit(hif_dev
);
1009 usb_set_intfdata(interface
, NULL
);
1012 if (hif_dev
->flags
& HIF_USB_START
)
1013 ath9k_hif_usb_reboot(udev
);
1016 dev_info(&udev
->dev
, "ath9k_htc: USB layer deinitialized\n");
1021 static int ath9k_hif_usb_suspend(struct usb_interface
*interface
,
1022 pm_message_t message
)
1024 struct hif_device_usb
*hif_dev
=
1025 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1027 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1032 static int ath9k_hif_usb_resume(struct usb_interface
*interface
)
1034 struct hif_device_usb
*hif_dev
=
1035 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1038 ret
= ath9k_hif_usb_alloc_urbs(hif_dev
);
1042 if (hif_dev
->firmware
) {
1043 ret
= ath9k_hif_usb_download_fw(hif_dev
);
1047 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1053 ret
= ath9k_htc_resume(hif_dev
->htc_handle
);
1061 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1067 static struct usb_driver ath9k_hif_usb_driver
= {
1068 .name
= "ath9k_hif_usb",
1069 .probe
= ath9k_hif_usb_probe
,
1070 .disconnect
= ath9k_hif_usb_disconnect
,
1072 .suspend
= ath9k_hif_usb_suspend
,
1073 .resume
= ath9k_hif_usb_resume
,
1074 .reset_resume
= ath9k_hif_usb_resume
,
1076 .id_table
= ath9k_hif_usb_ids
,
1080 int ath9k_hif_usb_init(void)
1082 return usb_register(&ath9k_hif_usb_driver
);
1085 void ath9k_hif_usb_exit(void)
1087 usb_deregister(&ath9k_hif_usb_driver
);