Lines Matching refs:ws
157 MEM_STATIC size_t ZSTD_cwksp_available_space(ZSTD_cwksp* ws);
159 MEM_STATIC void ZSTD_cwksp_assert_internal_consistency(ZSTD_cwksp* ws) { in ZSTD_cwksp_assert_internal_consistency() argument
160 (void)ws; in ZSTD_cwksp_assert_internal_consistency()
161 assert(ws->workspace <= ws->objectEnd); in ZSTD_cwksp_assert_internal_consistency()
162 assert(ws->objectEnd <= ws->tableEnd); in ZSTD_cwksp_assert_internal_consistency()
163 assert(ws->objectEnd <= ws->tableValidEnd); in ZSTD_cwksp_assert_internal_consistency()
164 assert(ws->tableEnd <= ws->allocStart); in ZSTD_cwksp_assert_internal_consistency()
165 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_assert_internal_consistency()
166 assert(ws->allocStart <= ws->workspaceEnd); in ZSTD_cwksp_assert_internal_consistency()
195 ZSTD_cwksp* ws, ZSTD_cwksp_alloc_phase_e phase) { in ZSTD_cwksp_internal_advance_phase() argument
196 assert(phase >= ws->phase); in ZSTD_cwksp_internal_advance_phase()
197 if (phase > ws->phase) { in ZSTD_cwksp_internal_advance_phase()
198 if (ws->phase < ZSTD_cwksp_alloc_buffers && in ZSTD_cwksp_internal_advance_phase()
200 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_internal_advance_phase()
202 if (ws->phase < ZSTD_cwksp_alloc_aligned && in ZSTD_cwksp_internal_advance_phase()
211 ws->allocStart = (BYTE*)ws->allocStart - ((size_t)ws->allocStart & (sizeof(U32)-1)); in ZSTD_cwksp_internal_advance_phase()
212 if (ws->allocStart < ws->tableValidEnd) { in ZSTD_cwksp_internal_advance_phase()
213 ws->tableValidEnd = ws->allocStart; in ZSTD_cwksp_internal_advance_phase()
216 ws->phase = phase; in ZSTD_cwksp_internal_advance_phase()
223 MEM_STATIC int ZSTD_cwksp_owns_buffer(const ZSTD_cwksp* ws, const void* ptr) { in ZSTD_cwksp_owns_buffer() argument
224 return (ptr != NULL) && (ws->workspace <= ptr) && (ptr <= ws->workspaceEnd); in ZSTD_cwksp_owns_buffer()
231 ZSTD_cwksp* ws, size_t bytes, ZSTD_cwksp_alloc_phase_e phase) { in ZSTD_cwksp_reserve_internal() argument
233 void* bottom = ws->tableEnd; in ZSTD_cwksp_reserve_internal()
234 ZSTD_cwksp_internal_advance_phase(ws, phase); in ZSTD_cwksp_reserve_internal()
235 alloc = (BYTE *)ws->allocStart - bytes; in ZSTD_cwksp_reserve_internal()
242 alloc, bytes, ZSTD_cwksp_available_space(ws) - bytes); in ZSTD_cwksp_reserve_internal()
243 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_internal()
247 ws->allocFailed = 1; in ZSTD_cwksp_reserve_internal()
250 if (alloc < ws->tableValidEnd) { in ZSTD_cwksp_reserve_internal()
251 ws->tableValidEnd = alloc; in ZSTD_cwksp_reserve_internal()
253 ws->allocStart = alloc; in ZSTD_cwksp_reserve_internal()
262 MEM_STATIC BYTE* ZSTD_cwksp_reserve_buffer(ZSTD_cwksp* ws, size_t bytes) { in ZSTD_cwksp_reserve_buffer() argument
263 return (BYTE*)ZSTD_cwksp_reserve_internal(ws, bytes, ZSTD_cwksp_alloc_buffers); in ZSTD_cwksp_reserve_buffer()
269 MEM_STATIC void* ZSTD_cwksp_reserve_aligned(ZSTD_cwksp* ws, size_t bytes) { in ZSTD_cwksp_reserve_aligned() argument
271 …return ZSTD_cwksp_reserve_internal(ws, ZSTD_cwksp_align(bytes, sizeof(U32)), ZSTD_cwksp_alloc_alig… in ZSTD_cwksp_reserve_aligned()
279 MEM_STATIC void* ZSTD_cwksp_reserve_table(ZSTD_cwksp* ws, size_t bytes) { in ZSTD_cwksp_reserve_table() argument
281 void* alloc = ws->tableEnd; in ZSTD_cwksp_reserve_table()
283 void* top = ws->allocStart; in ZSTD_cwksp_reserve_table()
286 alloc, bytes, ZSTD_cwksp_available_space(ws) - bytes); in ZSTD_cwksp_reserve_table()
288 ZSTD_cwksp_internal_advance_phase(ws, phase); in ZSTD_cwksp_reserve_table()
289 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_table()
293 ws->allocFailed = 1; in ZSTD_cwksp_reserve_table()
296 ws->tableEnd = end; in ZSTD_cwksp_reserve_table()
305 MEM_STATIC void* ZSTD_cwksp_reserve_object(ZSTD_cwksp* ws, size_t bytes) { in ZSTD_cwksp_reserve_object() argument
307 void* alloc = ws->objectEnd; in ZSTD_cwksp_reserve_object()
313 alloc, bytes, roundedBytes, ZSTD_cwksp_available_space(ws) - roundedBytes); in ZSTD_cwksp_reserve_object()
316 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_object()
318 if (ws->phase != ZSTD_cwksp_alloc_objects || end > ws->workspaceEnd) { in ZSTD_cwksp_reserve_object()
320 ws->allocFailed = 1; in ZSTD_cwksp_reserve_object()
323 ws->objectEnd = end; in ZSTD_cwksp_reserve_object()
324 ws->tableEnd = end; in ZSTD_cwksp_reserve_object()
325 ws->tableValidEnd = end; in ZSTD_cwksp_reserve_object()
331 MEM_STATIC void ZSTD_cwksp_mark_tables_dirty(ZSTD_cwksp* ws) { in ZSTD_cwksp_mark_tables_dirty() argument
335 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_mark_tables_dirty()
336 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_mark_tables_dirty()
337 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_mark_tables_dirty()
338 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_mark_tables_dirty()
341 MEM_STATIC void ZSTD_cwksp_mark_tables_clean(ZSTD_cwksp* ws) { in ZSTD_cwksp_mark_tables_clean() argument
343 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_mark_tables_clean()
344 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_mark_tables_clean()
345 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_mark_tables_clean()
346 ws->tableValidEnd = ws->tableEnd; in ZSTD_cwksp_mark_tables_clean()
348 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_mark_tables_clean()
354 MEM_STATIC void ZSTD_cwksp_clean_tables(ZSTD_cwksp* ws) { in ZSTD_cwksp_clean_tables() argument
356 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_clean_tables()
357 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_clean_tables()
358 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_clean_tables()
359 ZSTD_memset(ws->tableValidEnd, 0, (BYTE*)ws->tableEnd - (BYTE*)ws->tableValidEnd); in ZSTD_cwksp_clean_tables()
361 ZSTD_cwksp_mark_tables_clean(ws); in ZSTD_cwksp_clean_tables()
368 MEM_STATIC void ZSTD_cwksp_clear_tables(ZSTD_cwksp* ws) { in ZSTD_cwksp_clear_tables() argument
372 ws->tableEnd = ws->objectEnd; in ZSTD_cwksp_clear_tables()
373 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_clear_tables()
380 MEM_STATIC void ZSTD_cwksp_clear(ZSTD_cwksp* ws) { in ZSTD_cwksp_clear() argument
385 ws->tableEnd = ws->objectEnd; in ZSTD_cwksp_clear()
386 ws->allocStart = ws->workspaceEnd; in ZSTD_cwksp_clear()
387 ws->allocFailed = 0; in ZSTD_cwksp_clear()
388 if (ws->phase > ZSTD_cwksp_alloc_buffers) { in ZSTD_cwksp_clear()
389 ws->phase = ZSTD_cwksp_alloc_buffers; in ZSTD_cwksp_clear()
391 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_clear()
399 MEM_STATIC void ZSTD_cwksp_init(ZSTD_cwksp* ws, void* start, size_t size, ZSTD_cwksp_static_alloc_e… in ZSTD_cwksp_init() argument
402 ws->workspace = start; in ZSTD_cwksp_init()
403 ws->workspaceEnd = (BYTE*)start + size; in ZSTD_cwksp_init()
404 ws->objectEnd = ws->workspace; in ZSTD_cwksp_init()
405 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_init()
406 ws->phase = ZSTD_cwksp_alloc_objects; in ZSTD_cwksp_init()
407 ws->isStatic = isStatic; in ZSTD_cwksp_init()
408 ZSTD_cwksp_clear(ws); in ZSTD_cwksp_init()
409 ws->workspaceOversizedDuration = 0; in ZSTD_cwksp_init()
410 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_init()
413 MEM_STATIC size_t ZSTD_cwksp_create(ZSTD_cwksp* ws, size_t size, ZSTD_customMem customMem) { in ZSTD_cwksp_create() argument
417 ZSTD_cwksp_init(ws, workspace, size, ZSTD_cwksp_dynamic_alloc); in ZSTD_cwksp_create()
421 MEM_STATIC void ZSTD_cwksp_free(ZSTD_cwksp* ws, ZSTD_customMem customMem) { in ZSTD_cwksp_free() argument
422 void *ptr = ws->workspace; in ZSTD_cwksp_free()
424 ZSTD_memset(ws, 0, sizeof(ZSTD_cwksp)); in ZSTD_cwksp_free()
437 MEM_STATIC size_t ZSTD_cwksp_sizeof(const ZSTD_cwksp* ws) { in ZSTD_cwksp_sizeof() argument
438 return (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->workspace); in ZSTD_cwksp_sizeof()
441 MEM_STATIC size_t ZSTD_cwksp_used(const ZSTD_cwksp* ws) { in ZSTD_cwksp_used() argument
442 return (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->workspace) in ZSTD_cwksp_used()
443 + (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->allocStart); in ZSTD_cwksp_used()
446 MEM_STATIC int ZSTD_cwksp_reserve_failed(const ZSTD_cwksp* ws) { in ZSTD_cwksp_reserve_failed() argument
447 return ws->allocFailed; in ZSTD_cwksp_reserve_failed()
454 MEM_STATIC size_t ZSTD_cwksp_available_space(ZSTD_cwksp* ws) { in ZSTD_cwksp_available_space() argument
455 return (size_t)((BYTE*)ws->allocStart - (BYTE*)ws->tableEnd); in ZSTD_cwksp_available_space()
458 MEM_STATIC int ZSTD_cwksp_check_available(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_available() argument
459 return ZSTD_cwksp_available_space(ws) >= additionalNeededSpace; in ZSTD_cwksp_check_available()
462 MEM_STATIC int ZSTD_cwksp_check_too_large(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_too_large() argument
464 ws, additionalNeededSpace * ZSTD_WORKSPACETOOLARGE_FACTOR); in ZSTD_cwksp_check_too_large()
467 MEM_STATIC int ZSTD_cwksp_check_wasteful(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_wasteful() argument
468 return ZSTD_cwksp_check_too_large(ws, additionalNeededSpace) in ZSTD_cwksp_check_wasteful()
469 && ws->workspaceOversizedDuration > ZSTD_WORKSPACETOOLARGE_MAXDURATION; in ZSTD_cwksp_check_wasteful()
473 ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_bump_oversized_duration() argument
474 if (ZSTD_cwksp_check_too_large(ws, additionalNeededSpace)) { in ZSTD_cwksp_bump_oversized_duration()
475 ws->workspaceOversizedDuration++; in ZSTD_cwksp_bump_oversized_duration()
477 ws->workspaceOversizedDuration = 0; in ZSTD_cwksp_bump_oversized_duration()