Yuan Kang | a299c83 | 2012-06-22 19:48:46 -0500 | [diff] [blame] | 1 | /* |
| 2 | * CAAM/SEC 4.x functions for using scatterlists in caam driver |
| 3 | * |
| 4 | * Copyright 2008-2011 Freescale Semiconductor, Inc. |
| 5 | * |
| 6 | */ |
| 7 | |
| 8 | struct sec4_sg_entry; |
| 9 | |
| 10 | /* |
| 11 | * convert single dma address to h/w link table format |
| 12 | */ |
| 13 | static inline void dma_to_sec4_sg_one(struct sec4_sg_entry *sec4_sg_ptr, |
| 14 | dma_addr_t dma, u32 len, u32 offset) |
| 15 | { |
| 16 | sec4_sg_ptr->ptr = dma; |
| 17 | sec4_sg_ptr->len = len; |
| 18 | sec4_sg_ptr->reserved = 0; |
| 19 | sec4_sg_ptr->buf_pool_id = 0; |
| 20 | sec4_sg_ptr->offset = offset; |
| 21 | #ifdef DEBUG |
| 22 | print_hex_dump(KERN_ERR, "sec4_sg_ptr@: ", |
| 23 | DUMP_PREFIX_ADDRESS, 16, 4, sec4_sg_ptr, |
| 24 | sizeof(struct sec4_sg_entry), 1); |
| 25 | #endif |
| 26 | } |
| 27 | |
| 28 | /* |
| 29 | * convert scatterlist to h/w link table format |
| 30 | * but does not have final bit; instead, returns last entry |
| 31 | */ |
| 32 | static inline struct sec4_sg_entry * |
| 33 | sg_to_sec4_sg(struct scatterlist *sg, int sg_count, |
| 34 | struct sec4_sg_entry *sec4_sg_ptr, u32 offset) |
| 35 | { |
| 36 | while (sg_count) { |
| 37 | dma_to_sec4_sg_one(sec4_sg_ptr, sg_dma_address(sg), |
| 38 | sg_dma_len(sg), offset); |
| 39 | sec4_sg_ptr++; |
Yuan Kang | 643b39b | 2012-06-22 19:48:49 -0500 | [diff] [blame] | 40 | sg = scatterwalk_sg_next(sg); |
Yuan Kang | a299c83 | 2012-06-22 19:48:46 -0500 | [diff] [blame] | 41 | sg_count--; |
| 42 | } |
| 43 | return sec4_sg_ptr - 1; |
| 44 | } |
| 45 | |
| 46 | /* |
| 47 | * convert scatterlist to h/w link table format |
| 48 | * scatterlist must have been previously dma mapped |
| 49 | */ |
| 50 | static inline void sg_to_sec4_sg_last(struct scatterlist *sg, int sg_count, |
| 51 | struct sec4_sg_entry *sec4_sg_ptr, |
| 52 | u32 offset) |
| 53 | { |
| 54 | sec4_sg_ptr = sg_to_sec4_sg(sg, sg_count, sec4_sg_ptr, offset); |
| 55 | sec4_sg_ptr->len |= SEC4_SG_LEN_FIN; |
| 56 | } |
| 57 | |
| 58 | /* count number of elements in scatterlist */ |
Yuan Kang | 643b39b | 2012-06-22 19:48:49 -0500 | [diff] [blame] | 59 | static inline int __sg_count(struct scatterlist *sg_list, int nbytes, |
| 60 | bool *chained) |
Yuan Kang | a299c83 | 2012-06-22 19:48:46 -0500 | [diff] [blame] | 61 | { |
| 62 | struct scatterlist *sg = sg_list; |
| 63 | int sg_nents = 0; |
| 64 | |
| 65 | while (nbytes > 0) { |
| 66 | sg_nents++; |
| 67 | nbytes -= sg->length; |
| 68 | if (!sg_is_last(sg) && (sg + 1)->length == 0) |
Yuan Kang | 643b39b | 2012-06-22 19:48:49 -0500 | [diff] [blame] | 69 | *chained = true; |
Yuan Kang | a299c83 | 2012-06-22 19:48:46 -0500 | [diff] [blame] | 70 | sg = scatterwalk_sg_next(sg); |
| 71 | } |
| 72 | |
| 73 | return sg_nents; |
| 74 | } |
| 75 | |
| 76 | /* derive number of elements in scatterlist, but return 0 for 1 */ |
Yuan Kang | 643b39b | 2012-06-22 19:48:49 -0500 | [diff] [blame] | 77 | static inline int sg_count(struct scatterlist *sg_list, int nbytes, |
| 78 | bool *chained) |
Yuan Kang | a299c83 | 2012-06-22 19:48:46 -0500 | [diff] [blame] | 79 | { |
Yuan Kang | 643b39b | 2012-06-22 19:48:49 -0500 | [diff] [blame] | 80 | int sg_nents = __sg_count(sg_list, nbytes, chained); |
Yuan Kang | a299c83 | 2012-06-22 19:48:46 -0500 | [diff] [blame] | 81 | |
| 82 | if (likely(sg_nents == 1)) |
| 83 | return 0; |
| 84 | |
| 85 | return sg_nents; |
| 86 | } |
Yuan Kang | 045e367 | 2012-06-22 19:48:47 -0500 | [diff] [blame] | 87 | |
Yuan Kang | 643b39b | 2012-06-22 19:48:49 -0500 | [diff] [blame] | 88 | static int dma_map_sg_chained(struct device *dev, struct scatterlist *sg, |
| 89 | unsigned int nents, enum dma_data_direction dir, |
| 90 | bool chained) |
| 91 | { |
| 92 | if (unlikely(chained)) { |
| 93 | int i; |
| 94 | for (i = 0; i < nents; i++) { |
| 95 | dma_map_sg(dev, sg, 1, dir); |
| 96 | sg = scatterwalk_sg_next(sg); |
| 97 | } |
| 98 | } else { |
| 99 | dma_map_sg(dev, sg, nents, dir); |
| 100 | } |
| 101 | return nents; |
| 102 | } |
| 103 | |
| 104 | static int dma_unmap_sg_chained(struct device *dev, struct scatterlist *sg, |
| 105 | unsigned int nents, enum dma_data_direction dir, |
| 106 | bool chained) |
| 107 | { |
| 108 | if (unlikely(chained)) { |
| 109 | int i; |
| 110 | for (i = 0; i < nents; i++) { |
| 111 | dma_unmap_sg(dev, sg, 1, dir); |
| 112 | sg = scatterwalk_sg_next(sg); |
| 113 | } |
| 114 | } else { |
| 115 | dma_unmap_sg(dev, sg, nents, dir); |
| 116 | } |
| 117 | return nents; |
| 118 | } |
| 119 | |
Yuan Kang | 045e367 | 2012-06-22 19:48:47 -0500 | [diff] [blame] | 120 | /* Copy from len bytes of sg to dest, starting from beginning */ |
| 121 | static inline void sg_copy(u8 *dest, struct scatterlist *sg, unsigned int len) |
| 122 | { |
| 123 | struct scatterlist *current_sg = sg; |
| 124 | int cpy_index = 0, next_cpy_index = current_sg->length; |
| 125 | |
| 126 | while (next_cpy_index < len) { |
| 127 | memcpy(dest + cpy_index, (u8 *) sg_virt(current_sg), |
| 128 | current_sg->length); |
| 129 | current_sg = scatterwalk_sg_next(current_sg); |
| 130 | cpy_index = next_cpy_index; |
| 131 | next_cpy_index += current_sg->length; |
| 132 | } |
| 133 | if (cpy_index < len) |
| 134 | memcpy(dest + cpy_index, (u8 *) sg_virt(current_sg), |
| 135 | len - cpy_index); |
| 136 | } |
| 137 | |
| 138 | /* Copy sg data, from to_skip to end, to dest */ |
| 139 | static inline void sg_copy_part(u8 *dest, struct scatterlist *sg, |
| 140 | int to_skip, unsigned int end) |
| 141 | { |
| 142 | struct scatterlist *current_sg = sg; |
| 143 | int sg_index, cpy_index; |
| 144 | |
| 145 | sg_index = current_sg->length; |
| 146 | while (sg_index <= to_skip) { |
| 147 | current_sg = scatterwalk_sg_next(current_sg); |
| 148 | sg_index += current_sg->length; |
| 149 | } |
| 150 | cpy_index = sg_index - to_skip; |
| 151 | memcpy(dest, (u8 *) sg_virt(current_sg) + |
| 152 | current_sg->length - cpy_index, cpy_index); |
| 153 | current_sg = scatterwalk_sg_next(current_sg); |
| 154 | if (end - sg_index) |
| 155 | sg_copy(dest + cpy_index, current_sg, end - sg_index); |
| 156 | } |