Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2011-2014, The Linux Foundation. All rights reserved. |
| 3 | * |
| 4 | * This program is free software; you can redistribute it and/or modify |
| 5 | * it under the terms of the GNU General Public License version 2 and |
| 6 | * only version 2 as published by the Free Software Foundation. |
| 7 | * |
| 8 | * This program is distributed in the hope that it will be useful, |
| 9 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 10 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 11 | * GNU General Public License for more details. |
| 12 | */ |
| 13 | |
| 14 | #ifndef _DMA_H_ |
| 15 | #define _DMA_H_ |
| 16 | |
Linus Torvalds | f89f4a0 | 2014-10-08 05:23:02 -0400 | [diff] [blame] | 17 | #include <linux/dmaengine.h> |
| 18 | |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 19 | /* maximum data transfer block size between BAM and CE */ |
| 20 | #define QCE_BAM_BURST_SIZE 64 |
| 21 | |
| 22 | #define QCE_AUTHIV_REGS_CNT 16 |
| 23 | #define QCE_AUTH_BYTECOUNT_REGS_CNT 4 |
| 24 | #define QCE_CNTRIV_REGS_CNT 4 |
| 25 | |
| 26 | struct qce_result_dump { |
| 27 | u32 auth_iv[QCE_AUTHIV_REGS_CNT]; |
| 28 | u32 auth_byte_count[QCE_AUTH_BYTECOUNT_REGS_CNT]; |
| 29 | u32 encr_cntr_iv[QCE_CNTRIV_REGS_CNT]; |
| 30 | u32 status; |
| 31 | u32 status2; |
| 32 | }; |
| 33 | |
| 34 | #define QCE_IGNORE_BUF_SZ (2 * QCE_BAM_BURST_SIZE) |
| 35 | #define QCE_RESULT_BUF_SZ \ |
| 36 | ALIGN(sizeof(struct qce_result_dump), QCE_BAM_BURST_SIZE) |
| 37 | |
| 38 | struct qce_dma_data { |
| 39 | struct dma_chan *txchan; |
| 40 | struct dma_chan *rxchan; |
| 41 | struct qce_result_dump *result_buf; |
| 42 | void *ignore_buf; |
| 43 | }; |
| 44 | |
| 45 | int qce_dma_request(struct device *dev, struct qce_dma_data *dma); |
| 46 | void qce_dma_release(struct qce_dma_data *dma); |
| 47 | int qce_dma_prep_sgs(struct qce_dma_data *dma, struct scatterlist *sg_in, |
| 48 | int in_ents, struct scatterlist *sg_out, int out_ents, |
| 49 | dma_async_tx_callback cb, void *cb_param); |
| 50 | void qce_dma_issue_pending(struct qce_dma_data *dma); |
| 51 | int qce_dma_terminate_all(struct qce_dma_data *dma); |
| 52 | int qce_countsg(struct scatterlist *sg_list, int nbytes, bool *chained); |
| 53 | void qce_unmapsg(struct device *dev, struct scatterlist *sg, int nents, |
| 54 | enum dma_data_direction dir, bool chained); |
| 55 | int qce_mapsg(struct device *dev, struct scatterlist *sg, int nents, |
| 56 | enum dma_data_direction dir, bool chained); |
| 57 | struct scatterlist * |
| 58 | qce_sgtable_add(struct sg_table *sgt, struct scatterlist *sg_add); |
| 59 | |
| 60 | #endif /* _DMA_H_ */ |