Home
last modified time | relevance | path

Searched refs:chunk (Results 1 – 25 of 316) sorted by relevance

12345678910>>...13

/linux/net/sctp/
A Dinqueue.c124 if (chunk->head_skb == chunk->skb) { in sctp_inq_pop()
125 chunk->skb = skb_shinfo(chunk->skb)->frag_list; in sctp_inq_pop()
129 chunk->skb = chunk->skb->next; in sctp_inq_pop()
134 chunk->skb = chunk->head_skb; in sctp_inq_pop()
141 skb_pull(chunk->skb, chunk->chunk_end - chunk->skb->data); in sctp_inq_pop()
163 chunk->head_skb = chunk->skb; in sctp_inq_pop()
166 if (chunk->head_skb && chunk->skb->data_len == chunk->skb->len) in sctp_inq_pop()
167 chunk->skb = skb_shinfo(chunk->skb)->frag_list; in sctp_inq_pop()
195 cb->chunk = head_cb->chunk; in sctp_inq_pop()
211 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_inq_pop()
[all …]
A Doutput.c124 if (chunk) in sctp_packet_config()
186 packet, packet->size, chunk, chunk->skb ? chunk->skb->len : -1); in sctp_packet_transmit_chunk()
202 chunk); in sctp_packet_transmit_chunk()
260 if (!chunk->auth) in sctp_packet_bundle_auth()
361 if (chunk->asoc) in __sctp_packet_append_chunk()
491 padding = SCTP_PAD4(chunk->skb->len) - chunk->skb->len; in sctp_packet_pack()
499 skb_put_data(nskb, chunk->skb->data, chunk->skb->len); in sctp_packet_pack()
502 chunk, in sctp_packet_pack()
505 chunk->has_tsn ? ntohl(chunk->subh.data_hdr->tsn) : 0, in sctp_packet_pack()
506 ntohs(chunk->chunk_hdr->length), chunk->skb->len, in sctp_packet_pack()
[all …]
A Dchunk.c60 struct sctp_chunk *chunk; in sctp_datamsg_free() local
66 sctp_chunk_free(chunk); in sctp_datamsg_free()
86 sctp_chunk_put(chunk); in sctp_datamsg_destroy()
90 asoc = chunk->asoc; in sctp_datamsg_destroy()
111 sctp_chunk_put(chunk); in sctp_datamsg_destroy()
135 chunk->msg = msg; in sctp_datamsg_assign()
264 if (!chunk) { in sctp_datamsg_from_user()
273 chunk->shkey = shkey; in sctp_datamsg_from_user()
276 __skb_pull(chunk->skb, (__u8 *)chunk->chunk_hdr - in sctp_datamsg_from_user()
308 if (!chunk->has_tsn && in sctp_chunk_abandoned()
[all …]
A Doutqueue.c230 sctp_chunk_free(chunk); in __sctp_outq_teardown()
239 sctp_chunk_free(chunk); in __sctp_outq_teardown()
248 sctp_chunk_free(chunk); in __sctp_outq_teardown()
257 sctp_chunk_free(chunk); in __sctp_outq_teardown()
263 sctp_chunk_free(chunk); in __sctp_outq_teardown()
286 chunk && chunk->chunk_hdr ? in sctp_outq_tail()
295 __func__, q, chunk, chunk && chunk->chunk_hdr ? in sctp_outq_tail()
496 if (chunk->transport) in sctp_retransmit_mark()
1094 __func__, ctx->q, chunk, chunk && chunk->chunk_hdr ? in sctp_outq_flush_data()
1097 chunk->skb ? chunk->skb->head : NULL, chunk->skb ? in sctp_outq_flush_data()
[all …]
A Dsm_statefuns.c375 (struct sctp_init_chunk *)chunk->chunk_hdr, chunk, in sctp_sf_do_5_1B_init()
404 chunk->subh.init_hdr = (struct sctp_inithdr *)chunk->skb->data; in sctp_sf_do_5_1B_init()
425 if (!sctp_process_init(new_asoc, chunk, sctp_source(chunk), in sctp_sf_do_5_1B_init()
543 chunk->subh.init_hdr = (struct sctp_inithdr *)chunk->skb->data; in sctp_sf_do_5_1C_ack()
548 (struct sctp_init_chunk *)chunk->chunk_hdr, chunk, in sctp_sf_do_5_1C_ack()
748 if (!pskb_pull(chunk->skb, ntohs(chunk->chunk_hdr->length) - in sctp_sf_do_5_1D_ce()
1551 chunk->subh.init_hdr = (struct sctp_inithdr *)chunk->skb->data; in sctp_sf_do_unexpected_init()
1559 (struct sctp_init_chunk *)chunk->chunk_hdr, chunk, in sctp_sf_do_unexpected_init()
1611 if (!sctp_process_init(new_asoc, chunk, sctp_source(chunk), in sctp_sf_do_unexpected_init()
2227 if (!pskb_pull(chunk->skb, ntohs(chunk->chunk_hdr->length) - in sctp_sf_do_5_2_4_dupcook()
[all …]
A Dsm_make_chunk.c594 if (chunk) in sctp_make_cookie_echo()
635 if (retval && chunk && chunk->transport) in sctp_make_cookie_ack()
693 if (chunk) in sctp_make_cwr()
872 if (chunk) in sctp_make_shutdown()
946 if (chunk && chunk->chunk_hdr && in sctp_make_abort()
1000 if (chunk) in sctp_make_abort_no_data()
1067 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_addto_param()
1217 if (chunk) in sctp_make_heartbeat_ack()
1274 if (chunk) in sctp_make_op_error_space()
1547 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_addto_chunk()
[all …]
A Dulpevent.c79 struct sctp_chunk *chunk = event->chunk; in sctp_ulpevent_set_owner() local
90 if (chunk && chunk->head_skb && !chunk->head_skb->sk) in sctp_ulpevent_set_owner()
126 if (chunk) { in sctp_ulpevent_make_assoc_change()
130 skb = skb_copy_expand(chunk->skb, in sctp_ulpevent_make_assoc_change()
398 skb_pull(chunk->skb, elen); in sctp_ulpevent_make_remote_error()
676 struct sctp_chunk *chunk, in sctp_ulpevent_make_rcvmsg() argument
710 chunk->transport)) in sctp_ulpevent_make_rcvmsg()
742 sctp_chunk_hold(chunk); in sctp_ulpevent_make_rcvmsg()
743 event->chunk = chunk; in sctp_ulpevent_make_rcvmsg()
1133 sctp_chunk_put(event->chunk); in sctp_ulpevent_release_data()
[all …]
/linux/net/sunrpc/xprtrdma/
A Dsvc_rdma_pcl.c24 kfree(chunk); in pcl_free()
32 chunk = kmalloc(struct_size(chunk, ch_segments, segcount), GFP_KERNEL); in pcl_alloc_chunk()
33 if (!chunk) in pcl_alloc_chunk()
40 return chunk; in pcl_alloc_chunk()
74 segment = &chunk->ch_segments[chunk->ch_segcount]; in pcl_set_read_segment()
121 if (!chunk) in pcl_alloc_call()
174 if (!chunk) { in pcl_alloc_read()
176 if (!chunk) in pcl_alloc_read()
214 if (!chunk) in pcl_alloc_write()
277 if (!chunk || !chunk->ch_payload_length) in pcl_process_nonpayloads()
[all …]
/linux/mm/
A Dpercpu-vm.c17 WARN_ON(chunk->immutable); in pcpu_chunk_page()
229 chunk); in pcpu_map_pages()
339 if (!chunk) in pcpu_create_chunk()
349 chunk->data = vms; in pcpu_create_chunk()
355 return chunk; in pcpu_create_chunk()
360 if (!chunk) in pcpu_destroy_chunk()
366 if (chunk->data) in pcpu_destroy_chunk()
368 pcpu_free_chunk(chunk); in pcpu_destroy_chunk()
397 if (chunk == pcpu_first_chunk || chunk == pcpu_reserved_chunk) in pcpu_should_reclaim_chunk()
406 return ((chunk->isolated && chunk->nr_empty_pop_pages) || in pcpu_should_reclaim_chunk()
[all …]
A Dpercpu.c223 if (!chunk) in pcpu_addr_in_chunk()
226 start_addr = chunk->base_addr + chunk->start_offset; in pcpu_addr_in_chunk()
605 if (chunk != pcpu_reserved_chunk && !chunk->isolated) in pcpu_update_empty_pages()
1366 if (!chunk) in pcpu_alloc_first_chunk()
1406 bitmap_fill(chunk->populated, chunk->nr_pages); in pcpu_alloc_first_chunk()
1407 chunk->nr_populated = chunk->nr_pages; in pcpu_alloc_first_chunk()
1408 chunk->nr_empty_pop_pages = chunk->nr_pages; in pcpu_alloc_first_chunk()
1447 if (!chunk) in pcpu_alloc_chunk()
1482 chunk->free_bytes = chunk->nr_pages * PAGE_SIZE; in pcpu_alloc_chunk()
1654 if (likely(chunk && chunk->obj_cgroups)) { in pcpu_memcg_post_alloc_hook()
[all …]
A Dpercpu-km.c56 struct pcpu_chunk *chunk; in pcpu_create_chunk() local
61 chunk = pcpu_alloc_chunk(gfp); in pcpu_create_chunk()
62 if (!chunk) in pcpu_create_chunk()
67 pcpu_free_chunk(chunk); in pcpu_create_chunk()
74 chunk->data = pages; in pcpu_create_chunk()
75 chunk->base_addr = page_address(pages); in pcpu_create_chunk()
78 pcpu_chunk_populated(chunk, 0, nr_pages); in pcpu_create_chunk()
84 return chunk; in pcpu_create_chunk()
91 if (!chunk) in pcpu_destroy_chunk()
97 if (chunk->data) in pcpu_destroy_chunk()
[all …]
A Dpercpu-stats.c35 struct pcpu_chunk *chunk; in find_max_nr_alloc() local
55 struct pcpu_block_md *chunk_md = &chunk->chunk_md; in chunk_map_stats()
69 last_alloc = find_last_bit(chunk->alloc_map, in chunk_map_stats()
70 pcpu_chunk_map_bits(chunk) - in chunk_map_stats()
88 if (test_bit(start, chunk->alloc_map)) { in chunk_map_stats()
121 P("nr_alloc", chunk->nr_alloc); in chunk_map_stats()
122 P("max_alloc_size", chunk->max_alloc_size); in chunk_map_stats()
125 P("free_bytes", chunk->free_bytes); in chunk_map_stats()
137 struct pcpu_chunk *chunk; in percpu_stats_show() local
207 if (chunk == pcpu_first_chunk) in percpu_stats_show()
[all …]
/linux/drivers/s390/cio/
A Ditcw.c182 void *chunk; in itcw_init() local
196 return chunk; in itcw_init()
197 itcw = chunk; in itcw_init()
212 return chunk; in itcw_init()
220 return chunk; in itcw_init()
230 return chunk; in itcw_init()
238 return chunk; in itcw_init()
244 return chunk; in itcw_init()
251 return chunk; in itcw_init()
258 return chunk; in itcw_init()
[all …]
/linux/drivers/net/ethernet/mellanox/mlx5/core/steering/
A Ddr_icm_pool.c141 chunk->ste_arr = kvzalloc(chunk->num_of_entries * in dr_icm_chunk_ste_init()
146 chunk->hw_ste_arr = kvzalloc(chunk->num_of_entries * in dr_icm_chunk_ste_init()
151 chunk->miss_list = kvmalloc(chunk->num_of_entries * in dr_icm_chunk_ste_init()
189 kvfree(chunk); in dr_icm_chunk_destroy()
249 chunk = kvzalloc(sizeof(*chunk), GFP_KERNEL); in dr_icm_chunk_create()
250 if (!chunk) in dr_icm_chunk_create()
257 chunk->icm_addr = in dr_icm_chunk_create()
279 return chunk; in dr_icm_chunk_create()
282 kvfree(chunk); in dr_icm_chunk_create()
389 if (!chunk) in mlx5dr_icm_alloc_chunk()
[all …]
/linux/kernel/trace/
A Dpid_list.c25 chunk->next = NULL; in get_lower_chunk()
33 return chunk; in get_lower_chunk()
57 return chunk; in get_upper_chunk()
357 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in pid_list_refill_irq()
358 if (!chunk) in pid_list_refill_irq()
368 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in pid_list_refill_irq()
369 if (!chunk) in pid_list_refill_irq()
430 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in trace_pid_list_alloc()
431 if (!chunk) in trace_pid_list_alloc()
441 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in trace_pid_list_alloc()
[all …]
/linux/drivers/net/ethernet/mellanox/mlx4/
A Dicm.c92 kfree(chunk); in mlx4_free_icm()
160 if (!chunk) { in mlx4_alloc_icm()
161 chunk = kzalloc_node(sizeof(*chunk), in mlx4_alloc_icm()
165 if (!chunk) { in mlx4_alloc_icm()
166 chunk = kzalloc(sizeof(*chunk), in mlx4_alloc_icm()
169 if (!chunk) in mlx4_alloc_icm()
188 &chunk->buf[chunk->npages], in mlx4_alloc_icm()
191 ret = mlx4_alloc_icm_pages(&chunk->sg[chunk->npages], in mlx4_alloc_icm()
205 ++chunk->nsg; in mlx4_alloc_icm()
208 chunk->sg, chunk->npages, in mlx4_alloc_icm()
[all …]
A Dicm.h74 struct mlx4_icm_chunk *chunk; member
100 iter->chunk = list_empty(&icm->chunk_list) ? in mlx4_icm_first()
108 return !iter->chunk; in mlx4_icm_last()
113 if (++iter->page_idx >= iter->chunk->nsg) { in mlx4_icm_next()
115 iter->chunk = NULL; in mlx4_icm_next()
119 iter->chunk = list_entry(iter->chunk->list.next, in mlx4_icm_next()
127 if (iter->chunk->coherent) in mlx4_icm_addr()
128 return iter->chunk->buf[iter->page_idx].dma_addr; in mlx4_icm_addr()
135 if (iter->chunk->coherent) in mlx4_icm_size()
136 return iter->chunk->buf[iter->page_idx].size; in mlx4_icm_size()
[all …]
/linux/kernel/
A Daudit_tree.c136 kfree(chunk); in free_chunk()
194 if (!chunk) in alloc_chunk()
205 return chunk; in alloc_chunk()
286 audit_mark(mark)->chunk = chunk; in replace_mark_chunk()
287 if (chunk) in replace_mark_chunk()
402 if (!chunk) { in create_chunk()
410 kfree(chunk); in create_chunk()
417 kfree(chunk); in create_chunk()
489 if (!chunk) { in tag_chunk()
503 p = &chunk->owners[chunk->count - 1]; in tag_chunk()
[all …]
/linux/lib/
A Dgenalloc.c40 return chunk->end_addr - chunk->start_addr + 1; in chunk_size()
196 chunk->phys_addr = phys; in gen_pool_add_owner()
197 chunk->start_addr = virt; in gen_pool_add_owner()
199 chunk->owner = owner; in gen_pool_add_owner()
224 if (addr >= chunk->start_addr && addr <= chunk->end_addr) { in gen_pool_virt_to_phys()
225 paddr = chunk->phys_addr + (addr - chunk->start_addr); in gen_pool_virt_to_phys()
257 vfree(chunk); in gen_pool_destroy()
320 *owner = chunk->owner; in gen_pool_alloc_algo_owner()
504 if (addr >= chunk->start_addr && addr <= chunk->end_addr) { in gen_pool_free_owner()
512 *owner = chunk->owner; in gen_pool_free_owner()
[all …]
/linux/drivers/infiniband/hw/irdma/
A Dpble.c20 struct irdma_chunk *chunk; in irdma_destroy_pble_prm() local
25 list_del(&chunk->list); in irdma_destroy_pble_prm()
29 kfree(chunk->chunkmem.va); in irdma_destroy_pble_prm()
94 struct irdma_chunk *chunk = info->chunk; in add_sd_direct() local
116 chunk->size, chunk->size, chunk->vaddr, chunk->fpm_addr); in add_sd_direct()
150 struct irdma_chunk *chunk = info->chunk; in add_bp_pages() local
165 addr = chunk->vaddr; in add_bp_pages()
236 chunk = chunkmem.va; in add_pble_prm()
239 chunk->dev = dev; in add_pble_prm()
246 info.chunk = chunk; in add_pble_prm()
[all …]
/linux/include/net/sctp/
A Dsm.h180 const struct sctp_chunk *chunk);
182 const struct sctp_chunk *chunk);
185 const struct sctp_chunk *chunk);
203 const struct sctp_chunk *chunk);
215 const struct sctp_chunk *chunk,
220 const struct sctp_chunk *chunk,
224 const struct sctp_chunk *chunk);
227 const struct sctp_chunk *chunk);
274 struct sctp_chunk *chunk,
324 struct sctp_chunk *chunk,
[all …]
/linux/drivers/infiniband/hw/mthca/
A Dmthca_memfree.c69 dma_unmap_sg(&dev->pdev->dev, chunk->mem, chunk->npages, in mthca_free_icm_pages()
101 kfree(chunk); in mthca_free_icm()
158 if (!chunk) { in mthca_alloc_icm()
159 chunk = kmalloc(sizeof *chunk, in mthca_alloc_icm()
161 if (!chunk) in mthca_alloc_icm()
175 &chunk->mem[chunk->npages], in mthca_alloc_icm()
178 ret = mthca_alloc_icm_pages(&chunk->mem[chunk->npages], in mthca_alloc_icm()
185 ++chunk->nsg; in mthca_alloc_icm()
187 chunk->nsg = in mthca_alloc_icm()
197 chunk = NULL; in mthca_alloc_icm()
[all …]
/linux/drivers/gpu/drm/nouveau/
A Dnouveau_dmem.c97 return chunk->drm; in page_to_drm()
119 chunk->callocated--; in nouveau_dmem_page_free()
236 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in nouveau_dmem_chunk_alloc()
237 if (chunk == NULL) { in nouveau_dmem_chunk_alloc()
250 chunk->drm = drm; in nouveau_dmem_chunk_alloc()
260 &chunk->bo); in nouveau_dmem_chunk_alloc()
286 chunk->callocated++; in nouveau_dmem_chunk_alloc()
301 kfree(chunk); in nouveau_dmem_chunk_alloc()
318 chunk->callocated++; in nouveau_dmem_page_alloc_locked()
384 list_del(&chunk->list); in nouveau_dmem_fini()
[all …]
/linux/drivers/gpu/drm/panel/
A Dpanel-samsung-s6e63m0-dsi.c44 int chunk; in s6e63m0_dsi_dcs_write() local
54 chunk = remain; in s6e63m0_dsi_dcs_write()
57 if (chunk > S6E63M0_DSI_MAX_CHUNK) in s6e63m0_dsi_dcs_write()
58 chunk = S6E63M0_DSI_MAX_CHUNK; in s6e63m0_dsi_dcs_write()
64 cmdwritten += chunk; in s6e63m0_dsi_dcs_write()
65 seqp += chunk; in s6e63m0_dsi_dcs_write()
68 chunk = remain - cmdwritten; in s6e63m0_dsi_dcs_write()
69 if (chunk > S6E63M0_DSI_MAX_CHUNK) in s6e63m0_dsi_dcs_write()
70 chunk = S6E63M0_DSI_MAX_CHUNK; in s6e63m0_dsi_dcs_write()
82 cmdwritten += chunk; in s6e63m0_dsi_dcs_write()
[all …]
/linux/drivers/staging/media/atomisp/pci/isp/kernels/s3a/s3a_1.0/
A Dia_css_s3a.host.c355 chunk = max(chunk, 1); in ia_css_s3a_vmem_decode()
362 kmax = (rest > chunk) ? chunk : rest; in ia_css_s3a_vmem_decode()
367 hi[elm + chunk * 0], lo[elm + chunk * 0]); in ia_css_s3a_vmem_decode()
369 hi[elm + chunk * 1], lo[elm + chunk * 1]); in ia_css_s3a_vmem_decode()
371 hi[elm + chunk * 2], lo[elm + chunk * 2]); in ia_css_s3a_vmem_decode()
373 hi[elm + chunk * 3], lo[elm + chunk * 3]); in ia_css_s3a_vmem_decode()
375 hi[elm + chunk * 4], lo[elm + chunk * 4]); in ia_css_s3a_vmem_decode()
377 hi[elm + chunk * 5], lo[elm + chunk * 5]); in ia_css_s3a_vmem_decode()
379 hi[elm + chunk * 6], lo[elm + chunk * 6]); in ia_css_s3a_vmem_decode()
381 hi[elm + chunk * 7], lo[elm + chunk * 7]); in ia_css_s3a_vmem_decode()
[all …]

Completed in 70 milliseconds

12345678910>>...13