43
43
* (e.g. {@link MemorySegment#allocateNative(long, long, ResourceScope)}); since {@link SegmentAllocator} is a <em>functional interface</em>,
44
44
* clients can easily obtain a native allocator by using either a lambda expression or a method reference.
45
45
* <p>
46
- * This interface also defines factories for commonly used allocators; for instance {@link #arenaAllocator (ResourceScope)}
47
- * and {@link #arenaAllocator (long, ResourceScope)} are arena-style native allocators. Finally {@link #prefixAllocator(MemorySegment)}
46
+ * This interface also defines factories for commonly used allocators; for instance {@link #arenaUnbounded (ResourceScope)}
47
+ * and {@link #arenaBounded (long, ResourceScope)} are arena-style native allocators. Finally {@link #prefixAllocator(MemorySegment)}
48
48
* returns an allocator which wraps a segment (either on-heap or off-heap) and recycles its content upon each new allocation request.
49
49
*/
50
50
@ FunctionalInterface
@@ -319,8 +319,8 @@ default MemorySegment allocate(long bytesSize) {
319
319
MemorySegment allocate (long bytesSize , long bytesAlignment );
320
320
321
321
/**
322
- * Returns a native arena-based allocator which allocates a single memory segment, of given size (using malloc),
323
- * and then responds to allocation request by returning different slices of that same segment
322
+ * Returns a native arena-based allocator which {@linkplain MemorySegment#allocateNative(long, ResourceScope) allocates}
323
+ * a single memory segment, of given size, and then responds to allocation request by returning different slices of that same segment
324
324
* (until no further allocation is possible).
325
325
* This can be useful when clients want to perform multiple allocation requests while avoiding the cost associated
326
326
* with allocating a new off-heap memory region upon each allocation request.
@@ -333,32 +333,70 @@ default MemorySegment allocate(long bytesSize) {
333
333
* the allocator capacity.
334
334
*
335
335
* @param size the size (in bytes) of the allocation arena.
336
- * @param scope the scope associated with the segments returned by this allocator.
336
+ * @param scope the scope associated with the segments returned by the arena-based allocator.
337
337
* @return a new bounded arena-based allocator
338
338
* @throws IllegalArgumentException if {@code size <= 0}.
339
339
* @throws IllegalStateException if {@code scope} has been already closed, or if access occurs from a thread other
340
340
* than the thread owning {@code scope}.
341
341
*/
342
- static SegmentAllocator arenaAllocator (long size , ResourceScope scope ) {
342
+ static SegmentAllocator arenaBounded (long size , ResourceScope scope ) {
343
343
Objects .requireNonNull (scope );
344
344
return scope .ownerThread () == null ?
345
345
new ArenaAllocator .BoundedSharedArenaAllocator (scope , size ) :
346
346
new ArenaAllocator .BoundedArenaAllocator (scope , size );
347
347
}
348
348
349
349
/**
350
- * Returns a native unbounded arena-based allocator.
350
+ * Returns a native unbounded arena-based allocator, with predefined block size .
351
351
* <p>
352
- * The returned allocator allocates a memory segment {@code S} of a certain fixed size (using malloc) and then
353
- * responds to allocation requests in one of the following ways:
352
+ * The returned allocator {@linkplain MemorySegment#allocateNative(long, ResourceScope) allocates} a memory segment
353
+ * {@code S} of a certain (fixed) block size and then responds to allocation requests in one of the following ways:
354
354
* <ul>
355
355
* <li>if the size of the allocation requests is smaller than the size of {@code S}, and {@code S} has a <em>free</em>
356
356
* slice {@code S'} which fits that allocation request, return that {@code S'}.
357
357
* <li>if the size of the allocation requests is smaller than the size of {@code S}, and {@code S} has no <em>free</em>
358
- * slices which fits that allocation request, allocate a new segment {@code S'} (using malloc) , which has same size as {@code S}
358
+ * slices which fits that allocation request, allocate a new segment {@code S'}, which has same size as {@code S}
359
359
* and set {@code S = S'}; the allocator then tries to respond to the same allocation request again.
360
- * <li>if the size of the allocation requests is bigger than the size of {@code S}, allocate a new segment {@code S'}
361
- * (using malloc), which has a sufficient size to satisfy the allocation request, and return {@code S'}.
360
+ * <li>if the size of the allocation requests is bigger than the size of {@code S}, allocate a new segment {@code S'},
361
+ * which has a sufficient size to satisfy the allocation request, and return {@code S'}.
362
+ * </ul>
363
+ * <p>
364
+ * The block size of the returned arena-based allocator is unspecified, can be platform-dependent, and should generally
365
+ * not be relied upon. Clients can {@linkplain #arenaUnbounded(long, ResourceScope) obtain} an unbounded arena-based allocator
366
+ * with specific block size, if they so wish.
367
+ * <p>
368
+ * This segment allocator can be useful when clients want to perform multiple allocation requests while avoiding the
369
+ * cost associated with allocating a new off-heap memory region upon each allocation request.
370
+ * <p>
371
+ * An allocator associated with a <em>shared</em> resource scope is thread-safe and allocation requests may be
372
+ * performed concurrently; conversely, if the arena allocator is associated with a <em>confined</em> resource scope,
373
+ * allocation requests can only occur from the thread owning the allocator's resource scope.
374
+ * <p>
375
+ * The returned allocator might throw an {@link OutOfMemoryError} if an incoming allocation request exceeds
376
+ * the system capacity.
377
+ *
378
+ * @param scope the scope associated with the segments returned by the arena-based allocator.
379
+ * @return a new unbounded arena-based allocator
380
+ * @throws IllegalStateException if {@code scope} has been already closed, or if access occurs from a thread other
381
+ * than the thread owning {@code scope}.
382
+ */
383
+ static SegmentAllocator arenaUnbounded (ResourceScope scope ) {
384
+ return arenaUnbounded (ArenaAllocator .DEFAULT_BLOCK_SIZE , scope );
385
+ }
386
+
387
+ /**
388
+ * Returns a native unbounded arena-based allocator, with given block size.
389
+ * <p>
390
+ * The returned allocator {@linkplain MemorySegment#allocateNative(long, ResourceScope) allocates} a memory segment
391
+ * {@code S} of the specified block size and then responds to allocation requests in one of the following ways:
392
+ * <ul>
393
+ * <li>if the size of the allocation requests is smaller than the size of {@code S}, and {@code S} has a <em>free</em>
394
+ * slice {@code S'} which fits that allocation request, return that {@code S'}.
395
+ * <li>if the size of the allocation requests is smaller than the size of {@code S}, and {@code S} has no <em>free</em>
396
+ * slices which fits that allocation request, allocate a new segment {@code S'}, which has same size as {@code S}
397
+ * and set {@code S = S'}; the allocator then tries to respond to the same allocation request again.
398
+ * <li>if the size of the allocation requests is bigger than the size of {@code S}, allocate a new segment {@code S'},
399
+ * which has a sufficient size to satisfy the allocation request, and return {@code S'}.
362
400
* </ul>
363
401
* <p>
364
402
* This segment allocator can be useful when clients want to perform multiple allocation requests while avoiding the
@@ -371,16 +409,21 @@ static SegmentAllocator arenaAllocator(long size, ResourceScope scope) {
371
409
* The returned allocator might throw an {@link OutOfMemoryError} if an incoming allocation request exceeds
372
410
* the system capacity.
373
411
*
374
- * @param scope the scope associated with the segments returned by this allocator.
412
+ * @param blockSize the block size associated with the arena-based allocator.
413
+ * @param scope the scope associated with the segments returned by the arena-based allocator.
375
414
* @return a new unbounded arena-based allocator
415
+ * @throws IllegalArgumentException if {@code blockSize <= 0}.
376
416
* @throws IllegalStateException if {@code scope} has been already closed, or if access occurs from a thread other
377
417
* than the thread owning {@code scope}.
378
418
*/
379
- static SegmentAllocator arenaAllocator ( ResourceScope scope ) {
419
+ static SegmentAllocator arenaUnbounded ( long blockSize , ResourceScope scope ) {
380
420
Objects .requireNonNull (scope );
421
+ if (blockSize <= 0 ) {
422
+ throw new IllegalArgumentException ("Invalid block size: " + blockSize );
423
+ }
381
424
return scope .ownerThread () == null ?
382
- new ArenaAllocator .UnboundedSharedArenaAllocator (scope ) :
383
- new ArenaAllocator .UnboundedArenaAllocator (scope );
425
+ new ArenaAllocator .UnboundedSharedArenaAllocator (blockSize , scope ) :
426
+ new ArenaAllocator .UnboundedArenaAllocator (blockSize , scope );
384
427
}
385
428
386
429
/**
0 commit comments