usb: dwc2: Fix usage of bool params

Check these parameters only for true or false. There is no need to check
for greater or less than 0.

Signed-off-by: John Youn <johnyoun@synopsys.com>
Signed-off-by: Felipe Balbi <felipe.balbi@linux.intel.com>
This commit is contained in:
John Youn 2017-01-23 14:57:26 -08:00 committed by Felipe Balbi
parent 7de1debcd2
commit 95832c00bc
9 changed files with 93 additions and 100 deletions

View File

@ -541,7 +541,7 @@ void dwc2_dump_host_registers(struct dwc2_hsotg *hsotg)
addr = hsotg->regs + HAINTMSK;
dev_dbg(hsotg->dev, "HAINTMSK @0x%08lX : 0x%08X\n",
(unsigned long)addr, dwc2_readl(addr));
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.dma_desc_enable) {
addr = hsotg->regs + HFLBADDR;
dev_dbg(hsotg->dev, "HFLBADDR @0x%08lX : 0x%08X\n",
(unsigned long)addr, dwc2_readl(addr));
@ -571,7 +571,7 @@ void dwc2_dump_host_registers(struct dwc2_hsotg *hsotg)
addr = hsotg->regs + HCDMA(i);
dev_dbg(hsotg->dev, "HCDMA @0x%08lX : 0x%08X\n",
(unsigned long)addr, dwc2_readl(addr));
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.dma_desc_enable) {
addr = hsotg->regs + HCDMAB(i);
dev_dbg(hsotg->dev, "HCDMAB @0x%08lX : 0x%08X\n",
(unsigned long)addr, dwc2_readl(addr));

View File

@ -470,9 +470,6 @@ struct dwc2_core_params {
u8 phy_utmi_width;
bool phy_ulpi_ddr;
bool phy_ulpi_ext_vbus;
#define DWC2_PHY_ULPI_INTERNAL_VBUS 0
#define DWC2_PHY_ULPI_EXTERNAL_VBUS 1
bool enable_dynamic_fifo;
bool en_multiple_tx_fifo;
bool i2c_enable;
@ -492,8 +489,6 @@ struct dwc2_core_params {
bool dma_desc_fs_enable;
bool host_support_fs_ls_low_power;
bool host_ls_low_power_phy_clk;
#define DWC2_HOST_LS_LOW_POWER_PHY_CLK_PARAM_48MHZ 0
#define DWC2_HOST_LS_LOW_POWER_PHY_CLK_PARAM_6MHZ 1
u8 host_channels;
u16 host_rx_fifo_size;

View File

@ -160,7 +160,7 @@ static void dwc2_handle_otg_intr(struct dwc2_hsotg *hsotg)
gotgctl = dwc2_readl(hsotg->regs + GOTGCTL);
if (gotgctl & GOTGCTL_SESREQSCS) {
if (hsotg->params.phy_type == DWC2_PHY_TYPE_PARAM_FS &&
hsotg->params.i2c_enable > 0) {
hsotg->params.i2c_enable) {
hsotg->srp_success = 1;
} else {
/* Clear Session Request */

View File

@ -3227,7 +3227,7 @@ void dwc2_hsotg_core_init_disconnected(struct dwc2_hsotg *hsotg,
if (!using_desc_dma(hsotg))
intmsk |= GINTSTS_INCOMPL_SOIN | GINTSTS_INCOMPL_SOOUT;
if (hsotg->params.external_id_pin_ctl <= 0)
if (!hsotg->params.external_id_pin_ctl)
intmsk |= GINTSTS_CONIDSTSCHNG;
dwc2_writel(intmsk, hsotg->regs + GINTMSK);

View File

@ -79,9 +79,9 @@ static void dwc2_enable_common_interrupts(struct dwc2_hsotg *hsotg)
/* Enable the interrupts in the GINTMSK */
intmsk = GINTSTS_MODEMIS | GINTSTS_OTGINT;
if (hsotg->params.host_dma <= 0)
if (!hsotg->params.host_dma)
intmsk |= GINTSTS_RXFLVL;
if (hsotg->params.external_id_pin_ctl <= 0)
if (!hsotg->params.external_id_pin_ctl)
intmsk |= GINTSTS_CONIDSTSCHNG;
intmsk |= GINTSTS_WKUPINT | GINTSTS_USBSUSP |
@ -100,7 +100,7 @@ static void dwc2_init_fs_ls_pclk_sel(struct dwc2_hsotg *hsotg)
if ((hsotg->hw_params.hs_phy_type == GHWCFG2_HS_PHY_TYPE_ULPI &&
hsotg->hw_params.fs_phy_type == GHWCFG2_FS_PHY_TYPE_DEDICATED &&
hsotg->params.ulpi_fs_ls > 0) ||
hsotg->params.ulpi_fs_ls) ||
hsotg->params.phy_type == DWC2_PHY_TYPE_PARAM_FS) {
/* Full speed PHY */
val = HCFG_FSLSPCLKSEL_48_MHZ;
@ -152,7 +152,7 @@ static int dwc2_fs_phy_init(struct dwc2_hsotg *hsotg, bool select_phy)
if (dwc2_is_host_mode(hsotg))
dwc2_init_fs_ls_pclk_sel(hsotg);
if (hsotg->params.i2c_enable > 0) {
if (hsotg->params.i2c_enable) {
dev_dbg(hsotg->dev, "FS PHY enabling I2C\n");
/* Program GUSBCFG.OtgUtmiFsSel to I2C */
@ -195,7 +195,7 @@ static int dwc2_hs_phy_init(struct dwc2_hsotg *hsotg, bool select_phy)
dev_dbg(hsotg->dev, "HS ULPI PHY selected\n");
usbcfg |= GUSBCFG_ULPI_UTMI_SEL;
usbcfg &= ~(GUSBCFG_PHYIF16 | GUSBCFG_DDRSEL);
if (hsotg->params.phy_ulpi_ddr > 0)
if (hsotg->params.phy_ulpi_ddr)
usbcfg |= GUSBCFG_DDRSEL;
break;
case DWC2_PHY_TYPE_PARAM_UTMI:
@ -246,7 +246,7 @@ static int dwc2_phy_init(struct dwc2_hsotg *hsotg, bool select_phy)
if (hsotg->hw_params.hs_phy_type == GHWCFG2_HS_PHY_TYPE_ULPI &&
hsotg->hw_params.fs_phy_type == GHWCFG2_FS_PHY_TYPE_DEDICATED &&
hsotg->params.ulpi_fs_ls > 0) {
hsotg->params.ulpi_fs_ls) {
dev_dbg(hsotg->dev, "Setting ULPI FSLS\n");
usbcfg = dwc2_readl(hsotg->regs + GUSBCFG);
usbcfg |= GUSBCFG_ULPI_FS_LS;
@ -290,17 +290,17 @@ static int dwc2_gahbcfg_init(struct dwc2_hsotg *hsotg)
hsotg->params.host_dma,
hsotg->params.dma_desc_enable);
if (hsotg->params.host_dma > 0) {
if (hsotg->params.dma_desc_enable > 0)
if (hsotg->params.host_dma) {
if (hsotg->params.dma_desc_enable)
dev_dbg(hsotg->dev, "Using Descriptor DMA mode\n");
else
dev_dbg(hsotg->dev, "Using Buffer DMA mode\n");
} else {
dev_dbg(hsotg->dev, "Using Slave mode\n");
hsotg->params.dma_desc_enable = 0;
hsotg->params.dma_desc_enable = false;
}
if (hsotg->params.host_dma > 0)
if (hsotg->params.host_dma)
ahbcfg |= GAHBCFG_DMA_EN;
dwc2_writel(ahbcfg, hsotg->regs + GAHBCFG);
@ -491,7 +491,7 @@ static void dwc2_config_fifos(struct dwc2_hsotg *hsotg)
dev_dbg(hsotg->dev, "new hptxfsiz=%08x\n",
dwc2_readl(hsotg->regs + HPTXFSIZ));
if (hsotg->params.en_multiple_tx_fifo > 0 &&
if (hsotg->params.en_multiple_tx_fifo &&
hsotg->hw_params.snpsid <= DWC2_CORE_REV_2_94a) {
/*
* Global DFIFOCFG calculation for Host mode -
@ -771,7 +771,7 @@ static void dwc2_hc_enable_dma_ints(struct dwc2_hsotg *hsotg,
* For Descriptor DMA mode core halts the channel on AHB error.
* Interrupt is not required.
*/
if (hsotg->params.dma_desc_enable <= 0) {
if (!hsotg->params.dma_desc_enable) {
if (dbg_hc(chan))
dev_vdbg(hsotg->dev, "desc DMA disabled\n");
hcintmsk |= HCINTMSK_AHBERR;
@ -804,7 +804,7 @@ static void dwc2_hc_enable_ints(struct dwc2_hsotg *hsotg,
{
u32 intmsk;
if (hsotg->params.host_dma > 0) {
if (hsotg->params.host_dma) {
if (dbg_hc(chan))
dev_vdbg(hsotg->dev, "DMA enabled\n");
dwc2_hc_enable_dma_ints(hsotg, chan);
@ -1024,7 +1024,7 @@ void dwc2_hc_halt(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan,
/* No need to set the bit in DDMA for disabling the channel */
/* TODO check it everywhere channel is disabled */
if (hsotg->params.dma_desc_enable <= 0) {
if (!hsotg->params.dma_desc_enable) {
if (dbg_hc(chan))
dev_vdbg(hsotg->dev, "desc DMA disabled\n");
hcchar |= HCCHAR_CHENA;
@ -1034,7 +1034,7 @@ void dwc2_hc_halt(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan,
}
hcchar |= HCCHAR_CHDIS;
if (hsotg->params.host_dma <= 0) {
if (!hsotg->params.host_dma) {
if (dbg_hc(chan))
dev_vdbg(hsotg->dev, "DMA not enabled\n");
hcchar |= HCCHAR_CHENA;
@ -1380,7 +1380,7 @@ static void dwc2_hc_start_transfer(struct dwc2_hsotg *hsotg,
dev_vdbg(hsotg->dev, "%s()\n", __func__);
if (chan->do_ping) {
if (hsotg->params.host_dma <= 0) {
if (!hsotg->params.host_dma) {
if (dbg_hc(chan))
dev_vdbg(hsotg->dev, "ping, no DMA\n");
dwc2_hc_do_ping(hsotg, chan);
@ -1508,7 +1508,7 @@ static void dwc2_hc_start_transfer(struct dwc2_hsotg *hsotg,
TSIZ_SC_MC_PID_SHIFT);
}
if (hsotg->params.host_dma > 0) {
if (hsotg->params.host_dma) {
dwc2_writel((u32)chan->xfer_dma,
hsotg->regs + HCDMA(chan->hc_num));
if (dbg_hc(chan))
@ -1551,7 +1551,7 @@ static void dwc2_hc_start_transfer(struct dwc2_hsotg *hsotg,
chan->xfer_started = 1;
chan->requests++;
if (hsotg->params.host_dma <= 0 &&
if (!hsotg->params.host_dma &&
!chan->ep_is_in && chan->xfer_len > 0)
/* Load OUT packet into the appropriate Tx FIFO */
dwc2_hc_write_packet(hsotg, chan);
@ -1834,7 +1834,7 @@ static void dwc2_hcd_cleanup_channels(struct dwc2_hsotg *hsotg)
u32 hcchar;
int i;
if (hsotg->params.host_dma <= 0) {
if (!hsotg->params.host_dma) {
/* Flush out any channel requests in slave mode */
for (i = 0; i < num_channels; i++) {
channel = hsotg->hc_ptr_array[i];
@ -1870,7 +1870,7 @@ static void dwc2_hcd_cleanup_channels(struct dwc2_hsotg *hsotg)
channel->qh = NULL;
}
/* All channels have been freed, mark them available */
if (hsotg->params.uframe_sched > 0) {
if (hsotg->params.uframe_sched) {
hsotg->available_host_channels =
hsotg->params.host_channels;
} else {
@ -2107,7 +2107,7 @@ static int dwc2_hcd_urb_dequeue(struct dwc2_hsotg *hsotg,
* Free the QTD and clean up the associated QH. Leave the QH in the
* schedule if it has any remaining QTDs.
*/
if (hsotg->params.dma_desc_enable <= 0) {
if (!hsotg->params.dma_desc_enable) {
u8 in_process = urb_qtd->in_process;
dwc2_hcd_qtd_unlink_and_free(hsotg, urb_qtd, qh);
@ -2215,13 +2215,12 @@ static int dwc2_core_init(struct dwc2_hsotg *hsotg, bool initial_setup)
/* Set ULPI External VBUS bit if needed */
usbcfg &= ~GUSBCFG_ULPI_EXT_VBUS_DRV;
if (hsotg->params.phy_ulpi_ext_vbus ==
DWC2_PHY_ULPI_EXTERNAL_VBUS)
if (hsotg->params.phy_ulpi_ext_vbus)
usbcfg |= GUSBCFG_ULPI_EXT_VBUS_DRV;
/* Set external TS Dline pulsing bit if needed */
usbcfg &= ~GUSBCFG_TERMSELDLPULSE;
if (hsotg->params.ts_dline > 0)
if (hsotg->params.ts_dline)
usbcfg |= GUSBCFG_TERMSELDLPULSE;
dwc2_writel(usbcfg, hsotg->regs + GUSBCFG);
@ -2316,13 +2315,13 @@ static void dwc2_core_host_init(struct dwc2_hsotg *hsotg)
* runtime. This bit needs to be programmed during initial configuration
* and its value must not be changed during runtime.
*/
if (hsotg->params.reload_ctl > 0) {
if (hsotg->params.reload_ctl) {
hfir = dwc2_readl(hsotg->regs + HFIR);
hfir |= HFIR_RLDCTRL;
dwc2_writel(hfir, hsotg->regs + HFIR);
}
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.dma_desc_enable) {
u32 op_mode = hsotg->hw_params.op_mode;
if (hsotg->hw_params.snpsid < DWC2_CORE_REV_2_90a ||
@ -2334,7 +2333,7 @@ static void dwc2_core_host_init(struct dwc2_hsotg *hsotg)
"Hardware does not support descriptor DMA mode -\n");
dev_err(hsotg->dev,
"falling back to buffer DMA mode.\n");
hsotg->params.dma_desc_enable = 0;
hsotg->params.dma_desc_enable = false;
} else {
hcfg = dwc2_readl(hsotg->regs + HCFG);
hcfg |= HCFG_DESCDMA;
@ -2360,7 +2359,7 @@ static void dwc2_core_host_init(struct dwc2_hsotg *hsotg)
otgctl &= ~GOTGCTL_HSTSETHNPEN;
dwc2_writel(otgctl, hsotg->regs + GOTGCTL);
if (hsotg->params.dma_desc_enable <= 0) {
if (!hsotg->params.dma_desc_enable) {
int num_channels, i;
u32 hcchar;
@ -2427,7 +2426,7 @@ static void dwc2_hcd_reinit(struct dwc2_hsotg *hsotg)
hsotg->flags.d32 = 0;
hsotg->non_periodic_qh_ptr = &hsotg->non_periodic_sched_active;
if (hsotg->params.uframe_sched > 0) {
if (hsotg->params.uframe_sched) {
hsotg->available_host_channels =
hsotg->params.host_channels;
} else {
@ -2485,7 +2484,7 @@ static void dwc2_hc_init_xfer(struct dwc2_hsotg *hsotg,
chan->do_ping = 0;
chan->ep_is_in = 0;
chan->data_pid_start = DWC2_HC_PID_SETUP;
if (hsotg->params.host_dma > 0)
if (hsotg->params.host_dma)
chan->xfer_dma = urb->setup_dma;
else
chan->xfer_buf = urb->setup_packet;
@ -2512,7 +2511,7 @@ static void dwc2_hc_init_xfer(struct dwc2_hsotg *hsotg,
chan->do_ping = 0;
chan->data_pid_start = DWC2_HC_PID_DATA1;
chan->xfer_len = 0;
if (hsotg->params.host_dma > 0)
if (hsotg->params.host_dma)
chan->xfer_dma = hsotg->status_buf_dma;
else
chan->xfer_buf = hsotg->status_buf;
@ -2530,13 +2529,13 @@ static void dwc2_hc_init_xfer(struct dwc2_hsotg *hsotg,
case USB_ENDPOINT_XFER_ISOC:
chan->ep_type = USB_ENDPOINT_XFER_ISOC;
if (hsotg->params.dma_desc_enable > 0)
if (hsotg->params.dma_desc_enable)
break;
frame_desc = &urb->iso_descs[qtd->isoc_frame_index];
frame_desc->status = 0;
if (hsotg->params.host_dma > 0) {
if (hsotg->params.host_dma) {
chan->xfer_dma = urb->dma;
chan->xfer_dma += frame_desc->offset +
qtd->isoc_split_offset;
@ -2718,7 +2717,7 @@ static int dwc2_assign_and_init_hc(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
!dwc2_hcd_is_pipe_in(&urb->pipe_info))
urb->actual_length = urb->length;
if (hsotg->params.host_dma > 0)
if (hsotg->params.host_dma)
chan->xfer_dma = urb->dma + urb->actual_length;
else
chan->xfer_buf = (u8 *)urb->buf + urb->actual_length;
@ -2743,7 +2742,7 @@ static int dwc2_assign_and_init_hc(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
*/
chan->multi_count = dwc2_hb_mult(qh->maxp);
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.dma_desc_enable) {
chan->desc_list_addr = qh->desc_list_dma;
chan->desc_list_sz = qh->desc_list_sz;
}
@ -2780,7 +2779,7 @@ enum dwc2_transaction_type dwc2_hcd_select_transactions(
while (qh_ptr != &hsotg->periodic_sched_ready) {
if (list_empty(&hsotg->free_hc_list))
break;
if (hsotg->params.uframe_sched > 0) {
if (hsotg->params.uframe_sched) {
if (hsotg->available_host_channels <= 1)
break;
hsotg->available_host_channels--;
@ -2807,14 +2806,14 @@ enum dwc2_transaction_type dwc2_hcd_select_transactions(
num_channels = hsotg->params.host_channels;
qh_ptr = hsotg->non_periodic_sched_inactive.next;
while (qh_ptr != &hsotg->non_periodic_sched_inactive) {
if (hsotg->params.uframe_sched <= 0 &&
if (!hsotg->params.uframe_sched &&
hsotg->non_periodic_channels >= num_channels -
hsotg->periodic_channels)
break;
if (list_empty(&hsotg->free_hc_list))
break;
qh = list_entry(qh_ptr, struct dwc2_qh, qh_list_entry);
if (hsotg->params.uframe_sched > 0) {
if (hsotg->params.uframe_sched) {
if (hsotg->available_host_channels < 1)
break;
hsotg->available_host_channels--;
@ -2836,7 +2835,7 @@ enum dwc2_transaction_type dwc2_hcd_select_transactions(
else
ret_val = DWC2_TRANSACTION_ALL;
if (hsotg->params.uframe_sched <= 0)
if (!hsotg->params.uframe_sched)
hsotg->non_periodic_channels++;
}
@ -2875,8 +2874,8 @@ static int dwc2_queue_transaction(struct dwc2_hsotg *hsotg,
list_move_tail(&chan->split_order_list_entry,
&hsotg->split_order);
if (hsotg->params.host_dma > 0) {
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.host_dma) {
if (hsotg->params.dma_desc_enable) {
if (!chan->xfer_started ||
chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
dwc2_hcd_start_xfer_ddma(hsotg, chan->qh);
@ -2985,7 +2984,7 @@ static void dwc2_process_periodic_channels(struct dwc2_hsotg *hsotg)
* The flag prevents any halts to get into the request queue in
* the middle of multiple high-bandwidth packets getting queued.
*/
if (hsotg->params.host_dma <= 0 &&
if (!hsotg->params.host_dma &&
qh->channel->multi_count > 1)
hsotg->queuing_high_bandwidth = 1;
@ -3004,7 +3003,7 @@ static void dwc2_process_periodic_channels(struct dwc2_hsotg *hsotg)
* controller automatically handles multiple packets for
* high-bandwidth transfers.
*/
if (hsotg->params.host_dma > 0 || status == 0 ||
if (hsotg->params.host_dma || status == 0 ||
qh->channel->requests == qh->channel->multi_count) {
qh_ptr = qh_ptr->next;
/*
@ -3021,7 +3020,7 @@ static void dwc2_process_periodic_channels(struct dwc2_hsotg *hsotg)
exit:
if (no_queue_space || no_fifo_space ||
(hsotg->params.host_dma <= 0 &&
(!hsotg->params.host_dma &&
!list_empty(&hsotg->periodic_sched_assigned))) {
/*
* May need to queue more transactions as the request
@ -3101,7 +3100,7 @@ static void dwc2_process_non_periodic_channels(struct dwc2_hsotg *hsotg)
tx_status = dwc2_readl(hsotg->regs + GNPTXSTS);
qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
TXSTS_QSPCAVAIL_SHIFT;
if (hsotg->params.host_dma <= 0 && qspcavail == 0) {
if (!hsotg->params.host_dma && qspcavail == 0) {
no_queue_space = 1;
break;
}
@ -3134,7 +3133,7 @@ next:
hsotg->non_periodic_qh_ptr->next;
} while (hsotg->non_periodic_qh_ptr != orig_qh_ptr);
if (hsotg->params.host_dma <= 0) {
if (!hsotg->params.host_dma) {
tx_status = dwc2_readl(hsotg->regs + GNPTXSTS);
qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
TXSTS_QSPCAVAIL_SHIFT;
@ -3611,7 +3610,7 @@ static int dwc2_hcd_hub_control(struct dwc2_hsotg *hsotg, u16 typereq,
u32 hcfg;
dev_info(hsotg->dev, "Enabling descriptor DMA mode\n");
hsotg->params.dma_desc_enable = 1;
hsotg->params.dma_desc_enable = true;
hcfg = dwc2_readl(hsotg->regs + HCFG);
hcfg |= HCFG_DESCDMA;
dwc2_writel(hcfg, hsotg->regs + HCFG);
@ -4912,7 +4911,7 @@ static void dwc2_hcd_free(struct dwc2_hsotg *hsotg)
}
}
if (hsotg->params.host_dma > 0) {
if (hsotg->params.host_dma) {
if (hsotg->status_buf) {
dma_free_coherent(hsotg->dev, DWC2_HCD_STATUS_BUF_SIZE,
hsotg->status_buf,
@ -4992,16 +4991,16 @@ int dwc2_hcd_init(struct dwc2_hsotg *hsotg, int irq)
hsotg->last_frame_num = HFNUM_MAX_FRNUM;
/* Check if the bus driver or platform code has setup a dma_mask */
if (hsotg->params.host_dma > 0 &&
if (hsotg->params.host_dma &&
!hsotg->dev->dma_mask) {
dev_warn(hsotg->dev,
"dma_mask not set, disabling DMA\n");
hsotg->params.host_dma = false;
hsotg->params.dma_desc_enable = 0;
hsotg->params.dma_desc_enable = false;
}
/* Set device flags indicating whether the HCD supports DMA */
if (hsotg->params.host_dma > 0) {
if (hsotg->params.host_dma) {
if (dma_set_mask(hsotg->dev, DMA_BIT_MASK(32)) < 0)
dev_warn(hsotg->dev, "can't set DMA mask\n");
if (dma_set_coherent_mask(hsotg->dev, DMA_BIT_MASK(32)) < 0)
@ -5012,7 +5011,7 @@ int dwc2_hcd_init(struct dwc2_hsotg *hsotg, int irq)
if (!hcd)
goto error1;
if (hsotg->params.host_dma <= 0)
if (!hsotg->params.host_dma)
hcd->self.uses_dma = 0;
hcd->has_tt = 1;
@ -5084,7 +5083,7 @@ int dwc2_hcd_init(struct dwc2_hsotg *hsotg, int irq)
* done after usb_add_hcd since that function allocates the DMA buffer
* pool.
*/
if (hsotg->params.host_dma > 0)
if (hsotg->params.host_dma)
hsotg->status_buf = dma_alloc_coherent(hsotg->dev,
DWC2_HCD_STATUS_BUF_SIZE,
&hsotg->status_buf_dma, GFP_KERNEL);
@ -5114,8 +5113,8 @@ int dwc2_hcd_init(struct dwc2_hsotg *hsotg, int irq)
* Disable descriptor dma mode since it will not be
* usable.
*/
hsotg->params.dma_desc_enable = 0;
hsotg->params.dma_desc_fs_enable = 0;
hsotg->params.dma_desc_enable = false;
hsotg->params.dma_desc_fs_enable = false;
}
hsotg->desc_hsisoc_cache = kmem_cache_create("dwc2-hsisoc-desc",
@ -5131,8 +5130,8 @@ int dwc2_hcd_init(struct dwc2_hsotg *hsotg, int irq)
* Disable descriptor dma mode since it will not be
* usable.
*/
hsotg->params.dma_desc_enable = 0;
hsotg->params.dma_desc_fs_enable = 0;
hsotg->params.dma_desc_enable = false;
hsotg->params.dma_desc_fs_enable = false;
}
}

View File

@ -296,7 +296,7 @@ static void dwc2_release_channel_ddma(struct dwc2_hsotg *hsotg,
struct dwc2_host_chan *chan = qh->channel;
if (dwc2_qh_is_non_per(qh)) {
if (hsotg->params.uframe_sched > 0)
if (hsotg->params.uframe_sched)
hsotg->available_host_channels++;
else
hsotg->non_periodic_channels--;
@ -403,7 +403,7 @@ void dwc2_hcd_qh_free_ddma(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
if ((qh->ep_type == USB_ENDPOINT_XFER_ISOC ||
qh->ep_type == USB_ENDPOINT_XFER_INT) &&
(hsotg->params.uframe_sched > 0 ||
(hsotg->params.uframe_sched ||
!hsotg->periodic_channels) && hsotg->frame_list) {
dwc2_per_sched_disable(hsotg);
dwc2_frame_list_free(hsotg);

View File

@ -297,8 +297,7 @@ static void dwc2_hprt0_enable(struct dwc2_hsotg *hsotg, u32 hprt0,
HCFG_FSLSPCLKSEL_SHIFT;
if (prtspd == HPRT0_SPD_LOW_SPEED &&
params->host_ls_low_power_phy_clk ==
DWC2_HOST_LS_LOW_POWER_PHY_CLK_PARAM_6MHZ) {
params->host_ls_low_power_phy_clk) {
/* 6 MHZ */
dev_vdbg(hsotg->dev,
"FS_PHY programming HCFG to 6 MHz\n");
@ -398,7 +397,7 @@ static void dwc2_port_intr(struct dwc2_hsotg *hsotg)
if (hsotg->params.dma_desc_fs_enable) {
u32 hcfg;
hsotg->params.dma_desc_enable = 0;
hsotg->params.dma_desc_enable = false;
hsotg->new_connection = false;
hcfg = dwc2_readl(hsotg->regs + HCFG);
hcfg &= ~HCFG_DESCDMA;
@ -604,7 +603,7 @@ static enum dwc2_halt_status dwc2_update_isoc_urb_state(
/* Skip whole frame */
if (chan->qh->do_split &&
chan->ep_type == USB_ENDPOINT_XFER_ISOC && chan->ep_is_in &&
hsotg->params.host_dma > 0) {
hsotg->params.host_dma) {
qtd->complete_split = 0;
qtd->isoc_split_offset = 0;
}
@ -743,7 +742,7 @@ cleanup:
dwc2_hc_cleanup(hsotg, chan);
list_add_tail(&chan->hc_list_entry, &hsotg->free_hc_list);
if (hsotg->params.uframe_sched > 0) {
if (hsotg->params.uframe_sched) {
hsotg->available_host_channels++;
} else {
switch (chan->ep_type) {
@ -789,7 +788,7 @@ static void dwc2_halt_channel(struct dwc2_hsotg *hsotg,
if (dbg_hc(chan))
dev_vdbg(hsotg->dev, "%s()\n", __func__);
if (hsotg->params.host_dma > 0) {
if (hsotg->params.host_dma) {
if (dbg_hc(chan))
dev_vdbg(hsotg->dev, "DMA enabled\n");
dwc2_release_channel(hsotg, chan, qtd, halt_status);
@ -979,7 +978,7 @@ static void dwc2_hc_xfercomp_intr(struct dwc2_hsotg *hsotg,
pipe_type = dwc2_hcd_get_pipe_type(&urb->pipe_info);
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.dma_desc_enable) {
dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum, halt_status);
if (pipe_type == USB_ENDPOINT_XFER_ISOC)
/* Do not disable the interrupt, just clear it */
@ -990,7 +989,7 @@ static void dwc2_hc_xfercomp_intr(struct dwc2_hsotg *hsotg,
/* Handle xfer complete on CSPLIT */
if (chan->qh->do_split) {
if (chan->ep_type == USB_ENDPOINT_XFER_ISOC && chan->ep_is_in &&
hsotg->params.host_dma > 0) {
hsotg->params.host_dma) {
if (qtd->complete_split &&
dwc2_xfercomp_isoc_split_in(hsotg, chan, chnum,
qtd))
@ -1103,7 +1102,7 @@ static void dwc2_hc_stall_intr(struct dwc2_hsotg *hsotg,
dev_dbg(hsotg->dev, "--Host Channel %d Interrupt: STALL Received--\n",
chnum);
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.dma_desc_enable) {
dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum,
DWC2_HC_XFER_STALL);
goto handle_stall_done;
@ -1213,7 +1212,7 @@ static void dwc2_hc_nak_intr(struct dwc2_hsotg *hsotg,
switch (dwc2_hcd_get_pipe_type(&qtd->urb->pipe_info)) {
case USB_ENDPOINT_XFER_CONTROL:
case USB_ENDPOINT_XFER_BULK:
if (hsotg->params.host_dma > 0 && chan->ep_is_in) {
if (hsotg->params.host_dma && chan->ep_is_in) {
/*
* NAK interrupts are enabled on bulk/control IN
* transfers in DMA mode for the sole purpose of
@ -1359,7 +1358,7 @@ static void dwc2_hc_nyet_intr(struct dwc2_hsotg *hsotg,
*/
if (chan->do_split && chan->complete_split) {
if (chan->ep_is_in && chan->ep_type == USB_ENDPOINT_XFER_ISOC &&
hsotg->params.host_dma > 0) {
hsotg->params.host_dma) {
qtd->complete_split = 0;
qtd->isoc_split_offset = 0;
qtd->isoc_frame_index++;
@ -1380,7 +1379,7 @@ static void dwc2_hc_nyet_intr(struct dwc2_hsotg *hsotg,
struct dwc2_qh *qh = chan->qh;
bool past_end;
if (hsotg->params.uframe_sched <= 0) {
if (!hsotg->params.uframe_sched) {
int frnum = dwc2_hcd_get_frame_number(hsotg);
/* Don't have num_hs_transfers; simple logic */
@ -1478,7 +1477,7 @@ static void dwc2_hc_babble_intr(struct dwc2_hsotg *hsotg,
dwc2_hc_handle_tt_clear(hsotg, chan, qtd);
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.dma_desc_enable) {
dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum,
DWC2_HC_XFER_BABBLE_ERR);
goto disable_int;
@ -1583,7 +1582,7 @@ static void dwc2_hc_ahberr_intr(struct dwc2_hsotg *hsotg,
dev_err(hsotg->dev, " Interval: %d\n", urb->interval);
/* Core halts the channel for Descriptor DMA mode */
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.dma_desc_enable) {
dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum,
DWC2_HC_XFER_AHB_ERR);
goto handle_ahberr_done;
@ -1615,7 +1614,7 @@ static void dwc2_hc_xacterr_intr(struct dwc2_hsotg *hsotg,
dwc2_hc_handle_tt_clear(hsotg, chan, qtd);
if (hsotg->params.dma_desc_enable > 0) {
if (hsotg->params.dma_desc_enable) {
dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum,
DWC2_HC_XFER_XACT_ERR);
goto handle_xacterr_done;
@ -1808,8 +1807,8 @@ static void dwc2_hc_chhltd_intr_dma(struct dwc2_hsotg *hsotg,
if (chan->halt_status == DWC2_HC_XFER_URB_DEQUEUE ||
(chan->halt_status == DWC2_HC_XFER_AHB_ERR &&
hsotg->params.dma_desc_enable <= 0)) {
if (hsotg->params.dma_desc_enable > 0)
!hsotg->params.dma_desc_enable)) {
if (hsotg->params.dma_desc_enable)
dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum,
chan->halt_status);
else
@ -1840,7 +1839,7 @@ static void dwc2_hc_chhltd_intr_dma(struct dwc2_hsotg *hsotg,
} else if (chan->hcint & HCINTMSK_STALL) {
dwc2_hc_stall_intr(hsotg, chan, chnum, qtd);
} else if ((chan->hcint & HCINTMSK_XACTERR) &&
hsotg->params.dma_desc_enable <= 0) {
!hsotg->params.dma_desc_enable) {
if (out_nak_enh) {
if (chan->hcint &
(HCINTMSK_NYET | HCINTMSK_NAK | HCINTMSK_ACK)) {
@ -1860,10 +1859,10 @@ static void dwc2_hc_chhltd_intr_dma(struct dwc2_hsotg *hsotg,
*/
dwc2_hc_xacterr_intr(hsotg, chan, chnum, qtd);
} else if ((chan->hcint & HCINTMSK_XCS_XACT) &&
hsotg->params.dma_desc_enable > 0) {
hsotg->params.dma_desc_enable) {
dwc2_hc_xacterr_intr(hsotg, chan, chnum, qtd);
} else if ((chan->hcint & HCINTMSK_AHBERR) &&
hsotg->params.dma_desc_enable > 0) {
hsotg->params.dma_desc_enable) {
dwc2_hc_ahberr_intr(hsotg, chan, chnum, qtd);
} else if (chan->hcint & HCINTMSK_BBLERR) {
dwc2_hc_babble_intr(hsotg, chan, chnum, qtd);
@ -1956,7 +1955,7 @@ static void dwc2_hc_chhltd_intr(struct dwc2_hsotg *hsotg,
dev_vdbg(hsotg->dev, "--Host Channel %d Interrupt: Channel Halted--\n",
chnum);
if (hsotg->params.host_dma > 0) {
if (hsotg->params.host_dma) {
dwc2_hc_chhltd_intr_dma(hsotg, chan, chnum, qtd);
} else {
if (!dwc2_halt_status_ok(hsotg, chan, chnum, qtd))
@ -2033,7 +2032,7 @@ static void dwc2_hc_n_intr(struct dwc2_hsotg *hsotg, int chnum)
* interrupt unmasked
*/
WARN_ON(hcint != HCINTMSK_CHHLTD);
if (hsotg->params.dma_desc_enable > 0)
if (hsotg->params.dma_desc_enable)
dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum,
chan->halt_status);
else
@ -2061,7 +2060,7 @@ static void dwc2_hc_n_intr(struct dwc2_hsotg *hsotg, int chnum)
qtd = list_first_entry(&chan->qh->qtd_list, struct dwc2_qtd,
qtd_list_entry);
if (hsotg->params.host_dma <= 0) {
if (!hsotg->params.host_dma) {
if ((hcint & HCINTMSK_CHHLTD) && hcint != HCINTMSK_CHHLTD)
hcint &= ~HCINTMSK_CHHLTD;
}

View File

@ -1104,7 +1104,7 @@ static void dwc2_pick_first_frame(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
next_active_frame = earliest_frame;
/* Get the "no microframe schduler" out of the way... */
if (hsotg->params.uframe_sched <= 0) {
if (!hsotg->params.uframe_sched) {
if (qh->do_split)
/* Splits are active at microframe 0 minus 1 */
next_active_frame |= 0x7;
@ -1197,7 +1197,7 @@ static int dwc2_do_reserve(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
{
int status;
if (hsotg->params.uframe_sched > 0) {
if (hsotg->params.uframe_sched) {
status = dwc2_uframe_schedule(hsotg, qh);
} else {
status = dwc2_periodic_channel_available(hsotg);
@ -1218,7 +1218,7 @@ static int dwc2_do_reserve(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
return status;
}
if (hsotg->params.uframe_sched <= 0)
if (!hsotg->params.uframe_sched)
/* Reserve periodic channel */
hsotg->periodic_channels++;
@ -1254,7 +1254,7 @@ static void dwc2_do_unreserve(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
/* Update claimed usecs per (micro)frame */
hsotg->periodic_usecs -= qh->host_us;
if (hsotg->params.uframe_sched > 0) {
if (hsotg->params.uframe_sched) {
dwc2_uframe_unschedule(hsotg, qh);
} else {
/* Release periodic channel reservation */
@ -1391,7 +1391,7 @@ static int dwc2_schedule_periodic(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
qh->unreserve_pending = 0;
if (hsotg->params.dma_desc_enable > 0)
if (hsotg->params.dma_desc_enable)
/* Don't rely on SOF and start in ready schedule */
list_add_tail(&qh->qh_list_entry, &hsotg->periodic_sched_ready);
else
@ -1598,7 +1598,7 @@ struct dwc2_qh *dwc2_hcd_qh_create(struct dwc2_hsotg *hsotg,
dwc2_qh_init(hsotg, qh, urb, mem_flags);
if (hsotg->params.dma_desc_enable > 0 &&
if (hsotg->params.dma_desc_enable &&
dwc2_hcd_qh_init_ddma(hsotg, qh, mem_flags) < 0) {
dwc2_hcd_qh_free(hsotg, qh);
return NULL;
@ -1710,7 +1710,7 @@ void dwc2_hcd_qh_unlink(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
dwc2_deschedule_periodic(hsotg, qh);
hsotg->periodic_qh_count--;
if (!hsotg->periodic_qh_count &&
hsotg->params.dma_desc_enable <= 0) {
!hsotg->params.dma_desc_enable) {
intr_mask = dwc2_readl(hsotg->regs + GINTMSK);
intr_mask &= ~GINTSTS_SOF;
dwc2_writel(intr_mask, hsotg->regs + GINTMSK);

View File

@ -53,7 +53,7 @@ static void dwc2_set_bcm_params(struct dwc2_hsotg *hsotg)
p->phy_type = 1;
p->phy_utmi_width = 8;
p->i2c_enable = false;
p->host_ls_low_power_phy_clk = 0;
p->host_ls_low_power_phy_clk = false;
p->reload_ctl = false;
p->ahbcfg = 0x10;
p->uframe_sched = false;
@ -74,7 +74,7 @@ static void dwc2_set_his_params(struct dwc2_hsotg *hsotg)
p->phy_type = DWC2_PHY_TYPE_PARAM_UTMI;
p->phy_utmi_width = 8;
p->i2c_enable = false;
p->host_ls_low_power_phy_clk = 0;
p->host_ls_low_power_phy_clk = false;
p->reload_ctl = false;
p->ahbcfg = GAHBCFG_HBSTLEN_INCR16 <<
GAHBCFG_HBSTLEN_SHIFT;