2021-01-31 12:43:28 +01:00
package it.cavallium.dbengine.client ;
2021-01-31 22:20:00 +01:00
import com.google.common.primitives.Ints ;
2021-02-02 15:36:11 +01:00
import com.google.common.primitives.Longs ;
2021-01-31 12:43:28 +01:00
import it.cavallium.dbengine.database.Column ;
2021-01-31 19:52:47 +01:00
import it.cavallium.dbengine.database.LLKeyValueDatabase ;
2021-01-31 15:47:48 +01:00
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary ;
2021-02-01 02:21:53 +01:00
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep ;
2021-02-02 15:36:11 +01:00
import it.cavallium.dbengine.database.collections.DatabaseStageEntry ;
import it.cavallium.dbengine.database.collections.DatabaseStageMap ;
import it.cavallium.dbengine.database.collections.SubStageGetterMap ;
import it.cavallium.dbengine.database.collections.SubStageGetterMapDeep ;
2021-01-31 19:52:47 +01:00
import it.cavallium.dbengine.database.collections.SubStageGetterSingleBytes ;
2021-01-31 12:43:28 +01:00
import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection ;
2021-02-03 14:37:02 +01:00
import it.cavallium.dbengine.database.serialization.Serializer ;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength ;
2021-02-01 02:21:53 +01:00
import java.io.IOException ;
import java.nio.file.Files ;
2021-01-31 12:43:28 +01:00
import java.nio.file.Path ;
2021-01-31 19:52:47 +01:00
import java.text.DecimalFormat ;
2021-01-31 15:47:48 +01:00
import java.time.Duration ;
import java.time.Instant ;
import java.util.Arrays ;
2021-02-01 02:21:53 +01:00
import java.util.Comparator ;
2021-01-31 22:20:00 +01:00
import java.util.HashMap ;
2021-01-31 12:43:28 +01:00
import java.util.List ;
2021-01-31 19:52:47 +01:00
import java.util.Locale ;
2021-02-01 02:21:53 +01:00
import java.util.concurrent.CompletionException ;
2021-02-02 00:09:46 +01:00
import java.util.concurrent.atomic.AtomicInteger ;
2021-01-31 19:52:47 +01:00
import java.util.function.Function ;
2021-01-31 22:20:00 +01:00
import reactor.core.publisher.Flux ;
2021-01-31 15:47:48 +01:00
import reactor.core.publisher.Mono ;
import reactor.core.publisher.Sinks ;
import reactor.core.publisher.Sinks.One ;
2021-01-31 19:52:47 +01:00
import reactor.core.scheduler.Schedulers ;
import reactor.util.function.Tuples ;
2021-01-31 12:43:28 +01:00
2021-02-02 19:40:37 +01:00
public class SpeedExample {
2021-01-31 12:43:28 +01:00
2021-02-02 15:36:11 +01:00
public static final boolean printPreviousValue = false ;
2021-02-02 20:08:22 +01:00
public static final int numRepeats = 1000 ;
public static final int batchSize = 1000 ;
2021-01-31 19:52:47 +01:00
2021-01-31 22:20:00 +01:00
public static void main ( String [ ] args ) throws InterruptedException {
2021-02-02 15:36:11 +01:00
rangeTestPutMultiSame ( )
. then ( rangeTestPutMultiProgressive ( ) )
. then ( testPutMulti ( ) )
. then ( testPutValue ( ) )
. then ( testAtPut ( ) )
. then ( test2LevelPut ( ) )
. then ( test3LevelPut ( ) )
. then ( test4LevelPut ( ) )
2021-02-01 02:21:53 +01:00
. subscribeOn ( Schedulers . parallel ( ) )
2021-02-02 20:09:31 +01:00
. blockOptional ( ) ;
2021-01-31 19:52:47 +01:00
}
2021-02-02 15:36:11 +01:00
private static Mono < Void > test2LevelPut ( ) {
var k1ser = SerializerFixedBinaryLength . noop ( 4 ) ;
var k2ser = SerializerFixedBinaryLength . noop ( 4 ) ;
var vser = SerializerFixedBinaryLength . noop ( 4 ) ;
var ssg = new SubStageGetterMap < byte [ ] , byte [ ] > ( k2ser , vser ) ;
return test ( " 2 level put " ,
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
2021-02-02 19:40:37 +01:00
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionaryDeep . deepTail ( dict ,
k1ser ,
ssg . getKeyBinaryLength ( ) ,
ssg
) ) ) ,
2021-02-02 15:36:11 +01:00
tuple - > Flux . range ( 0 , batchSize ) . flatMap ( n - > {
var itemKey1 = Ints . toByteArray ( n / 4 ) ;
var itemKey2 = Ints . toByteArray ( n ) ;
var newValue = Ints . toByteArray ( n ) ;
return Mono
. defer ( ( ) - > Mono
. fromRunnable ( ( ) - > {
if ( printPreviousValue )
System . out . println ( " Setting new value at key " + Arrays . toString ( itemKey1 ) + " + " + Arrays . toString ( itemKey2 ) + " : " + Arrays . toString ( newValue ) ) ;
} )
. then ( tuple . getT2 ( ) . at ( null , itemKey1 ) )
. map ( handle - > ( DatabaseStageMap < byte [ ] , byte [ ] , DatabaseStageEntry < byte [ ] > > ) handle )
. flatMap ( handleK1 - > handleK1 . at ( null , itemKey2 ) )
. flatMap ( handleK2 - > handleK2 . setAndGetPrevious ( newValue ) )
. doOnSuccess ( oldValue - > {
if ( printPreviousValue )
System . out . println ( " Old value: " + ( oldValue = = null ? " None " : Arrays . toString ( oldValue ) ) ) ;
} )
) ;
} )
. then ( ) ,
numRepeats ,
tuple - > tuple . getT1 ( ) . close ( ) ) ;
}
private static Mono < Void > test3LevelPut ( ) {
var k1ser = SerializerFixedBinaryLength . noop ( 4 ) ;
var k2ser = SerializerFixedBinaryLength . noop ( 8 ) ;
var k3ser = SerializerFixedBinaryLength . noop ( 4 ) ;
var vser = SerializerFixedBinaryLength . noop ( 4 ) ;
var ssg3 = new SubStageGetterMap < byte [ ] , byte [ ] > ( k3ser , vser ) ;
var ssg2 = new SubStageGetterMapDeep < > ( ssg3 , k2ser , ssg3 . getKeyBinaryLength ( ) ) ;
return test ( " 3 level put " ,
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
. map ( tuple - > tuple . mapT2 ( dict - > {
2021-02-02 18:42:18 +01:00
return DatabaseMapDictionaryDeep
2021-02-02 19:40:37 +01:00
. deepTail ( dict , k1ser , ssg2 . getKeyBinaryLength ( ) , ssg2 ) ;
2021-02-02 15:36:11 +01:00
} ) ) ,
tuple - > Flux . range ( 0 , batchSize ) . flatMap ( n - > {
var itemKey1 = Ints . toByteArray ( n / 4 ) ;
var itemKey2 = Longs . toByteArray ( n ) ;
var itemKey3 = Ints . toByteArray ( n ) ;
var newValue = Ints . toByteArray ( n ) ;
return Mono
. defer ( ( ) - > Mono
. fromRunnable ( ( ) - > {
if ( printPreviousValue )
System . out . println ( " Setting new value at key " + Arrays . toString ( itemKey1 ) + " + " + Arrays . toString ( itemKey2 ) + " + " + Arrays . toString ( itemKey3 ) + " : " + Arrays . toString ( newValue ) ) ;
} )
. then ( tuple . getT2 ( ) . at ( null , itemKey1 ) )
. flatMap ( handleK1 - > handleK1 . at ( null , itemKey2 ) )
. flatMap ( handleK2 - > handleK2 . at ( null , itemKey3 ) )
. flatMap ( handleK3 - > handleK3 . setAndGetPrevious ( newValue ) )
. doOnSuccess ( oldValue - > {
if ( printPreviousValue )
System . out . println ( " Old value: " + ( oldValue = = null ? " None " : Arrays . toString ( oldValue ) ) ) ;
} )
) ;
} )
. then ( ) ,
numRepeats ,
tuple - > tuple . getT1 ( ) . close ( ) ) ;
}
private static Mono < Void > test4LevelPut ( ) {
var k1ser = SerializerFixedBinaryLength . noop ( 4 ) ;
var k2ser = SerializerFixedBinaryLength . noop ( 8 ) ;
var k3ser = SerializerFixedBinaryLength . noop ( 4 ) ;
var k4ser = SerializerFixedBinaryLength . noop ( 8 ) ;
var vser = SerializerFixedBinaryLength . noop ( 4 ) ;
2021-02-02 18:42:18 +01:00
SubStageGetterMap < byte [ ] , byte [ ] > ssg4 = new SubStageGetterMap < > ( k4ser , vser ) ;
2021-02-02 15:36:11 +01:00
var ssg3 = new SubStageGetterMapDeep < > ( ssg4 , k3ser , ssg4 . getKeyBinaryLength ( ) ) ;
var ssg2 = new SubStageGetterMapDeep < > ( ssg3 , k2ser , ssg3 . getKeyBinaryLength ( ) ) ;
return test ( " 4 level put " ,
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
2021-02-02 18:42:18 +01:00
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionaryDeep
2021-02-02 19:40:37 +01:00
. deepTail ( dict , k1ser , ssg2 . getKeyBinaryLength ( ) , ssg2 ) ) ) ,
2021-02-02 15:36:11 +01:00
tuple - > Flux . range ( 0 , batchSize ) . flatMap ( n - > {
var itemKey1 = Ints . toByteArray ( n / 4 ) ;
var itemKey2 = Longs . toByteArray ( n ) ;
var itemKey3 = Ints . toByteArray ( n * 2 ) ;
var itemKey4 = Longs . toByteArray ( n * 3L ) ;
var newValue = Ints . toByteArray ( n * 4 ) ;
return Mono
. defer ( ( ) - > Mono
. fromRunnable ( ( ) - > {
if ( printPreviousValue )
System . out . println ( " Setting new value at key " + Arrays . toString ( itemKey1 ) + " + " + Arrays . toString ( itemKey2 ) + " + " + Arrays . toString ( itemKey3 ) + " + " + Arrays . toString ( itemKey4 ) + " : " + Arrays . toString ( newValue ) ) ;
} )
. then ( tuple . getT2 ( ) . at ( null , itemKey1 ) )
. flatMap ( handleK1 - > handleK1 . at ( null , itemKey2 ) )
. flatMap ( handleK2 - > handleK2 . at ( null , itemKey3 ) )
. flatMap ( handleK3 - > handleK3 . at ( null , itemKey4 ) )
. flatMap ( handleK4 - > handleK4 . setAndGetPrevious ( newValue ) )
. doOnSuccess ( oldValue - > {
if ( printPreviousValue )
System . out . println ( " Old value: " + ( oldValue = = null ? " None " : Arrays . toString ( oldValue ) ) ) ;
} )
) ;
} )
. then ( ) ,
numRepeats ,
tuple - > tuple . getT1 ( ) . close ( ) ) ;
}
2021-01-31 19:52:47 +01:00
private static Mono < Void > testAtPut ( ) {
var ssg = new SubStageGetterSingleBytes ( ) ;
2021-02-01 10:52:33 +01:00
var ser = SerializerFixedBinaryLength . noop ( 4 ) ;
2021-01-31 19:52:47 +01:00
var itemKey = new byte [ ] { 0 , 1 , 2 , 3 } ;
var newValue = new byte [ ] { 4 , 5 , 6 , 7 } ;
2021-01-31 22:20:00 +01:00
return test ( " MapDictionaryDeep::at::put (same key, same value, " + batchSize + " times) " ,
2021-01-31 19:52:47 +01:00
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
2021-02-02 19:40:37 +01:00
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionaryDeep . simple ( dict , ser , ssg ) ) ) ,
2021-01-31 22:54:26 +01:00
tuple - > Flux . range ( 0 , batchSize ) . flatMap ( n - > Mono
2021-01-31 15:59:21 +01:00
. defer ( ( ) - > Mono
2021-01-31 19:52:47 +01:00
. fromRunnable ( ( ) - > {
if ( printPreviousValue )
System . out . println ( " Setting new value at key " + Arrays . toString ( itemKey ) + " : " + Arrays . toString ( newValue ) ) ;
} )
2021-02-01 12:19:17 +01:00
. then ( tuple . getT2 ( ) . at ( null , itemKey ) )
2021-01-31 15:59:21 +01:00
. flatMap ( handle - > handle . setAndGetPrevious ( newValue ) )
2021-01-31 19:52:47 +01:00
. doOnSuccess ( oldValue - > {
if ( printPreviousValue )
System . out . println ( " Old value: " + ( oldValue = = null ? " None " : Arrays . toString ( oldValue ) ) ) ;
2021-01-31 15:59:21 +01:00
} )
2021-01-31 22:54:26 +01:00
) )
2021-01-31 22:20:00 +01:00
. then ( ) ,
2021-01-31 21:23:43 +01:00
numRepeats ,
2021-01-31 19:52:47 +01:00
tuple - > tuple . getT1 ( ) . close ( ) ) ;
}
private static Mono < Void > testPutValueAndGetPrevious ( ) {
var ssg = new SubStageGetterSingleBytes ( ) ;
2021-02-01 10:52:33 +01:00
var ser = SerializerFixedBinaryLength . noop ( 4 ) ;
2021-01-31 19:52:47 +01:00
var itemKey = new byte [ ] { 0 , 1 , 2 , 3 } ;
var newValue = new byte [ ] { 4 , 5 , 6 , 7 } ;
2021-01-31 22:20:00 +01:00
return test ( " MapDictionaryDeep::putValueAndGetPrevious (same key, same value, " + batchSize + " times) " ,
2021-01-31 19:52:47 +01:00
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
2021-02-02 19:40:37 +01:00
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionaryDeep . simple ( dict , ser , ssg ) ) ) ,
2021-01-31 22:54:26 +01:00
tuple - > Flux . range ( 0 , batchSize ) . flatMap ( n - > Mono
2021-01-31 19:52:47 +01:00
. defer ( ( ) - > Mono
. fromRunnable ( ( ) - > {
if ( printPreviousValue )
System . out . println ( " Setting new value at key " + Arrays . toString ( itemKey ) + " : " + Arrays . toString ( newValue ) ) ;
} )
2021-02-01 12:19:17 +01:00
. then ( tuple . getT2 ( ) . putValueAndGetPrevious ( itemKey , newValue ) )
2021-01-31 19:52:47 +01:00
. doOnSuccess ( oldValue - > {
if ( printPreviousValue )
System . out . println ( " Old value: " + ( oldValue = = null ? " None " : Arrays . toString ( oldValue ) ) ) ;
} )
2021-01-31 22:54:26 +01:00
) )
2021-01-31 22:20:00 +01:00
. then ( ) ,
2021-01-31 21:23:43 +01:00
numRepeats ,
2021-01-31 19:52:47 +01:00
tuple - > tuple . getT1 ( ) . close ( ) ) ;
}
private static Mono < Void > testPutValue ( ) {
var ssg = new SubStageGetterSingleBytes ( ) ;
2021-02-01 10:52:33 +01:00
var ser = SerializerFixedBinaryLength . noop ( 4 ) ;
2021-01-31 19:52:47 +01:00
var itemKey = new byte [ ] { 0 , 1 , 2 , 3 } ;
var newValue = new byte [ ] { 4 , 5 , 6 , 7 } ;
2021-01-31 22:20:00 +01:00
return test ( " MapDictionaryDeep::putValue (same key, same value, " + batchSize + " times) " ,
2021-01-31 19:52:47 +01:00
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
2021-02-02 19:40:37 +01:00
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionaryDeep . simple ( dict , ser , ssg ) ) ) ,
2021-01-31 22:54:26 +01:00
tuple - > Flux . range ( 0 , batchSize ) . flatMap ( n - > Mono
2021-01-31 19:52:47 +01:00
. defer ( ( ) - > Mono
. fromRunnable ( ( ) - > {
if ( printPreviousValue )
System . out . println ( " Setting new value at key " + Arrays . toString ( itemKey ) + " : " + Arrays . toString ( newValue ) ) ;
} )
2021-02-01 12:19:17 +01:00
. then ( tuple . getT2 ( ) . putValue ( itemKey , newValue ) )
2021-01-31 22:54:26 +01:00
) )
2021-01-31 22:20:00 +01:00
. then ( ) ,
numRepeats ,
tuple - > tuple . getT1 ( ) . close ( ) ) ;
}
private static Mono < Void > testPutMulti ( ) {
var ssg = new SubStageGetterSingleBytes ( ) ;
2021-02-01 10:52:33 +01:00
var ser = SerializerFixedBinaryLength . noop ( 4 ) ;
2021-02-01 12:19:17 +01:00
HashMap < byte [ ] , byte [ ] > keysToPut = new HashMap < > ( ) ;
2021-02-01 02:21:53 +01:00
for ( int i = 0 ; i < batchSize ; i + + ) {
2021-02-01 12:19:17 +01:00
keysToPut . put ( Ints . toByteArray ( i * 3 ) , Ints . toByteArray ( i * 11 ) ) ;
2021-01-31 22:20:00 +01:00
}
var putMultiFlux = Flux . fromIterable ( keysToPut . entrySet ( ) ) ;
return test ( " MapDictionaryDeep::putMulti (batch of " + batchSize + " entries) " ,
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
2021-02-02 19:40:37 +01:00
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionaryDeep . simple ( dict , ser , ssg ) ) ) ,
2021-02-02 00:09:46 +01:00
tuple - > Mono . defer ( ( ) - > tuple . getT2 ( ) . putMulti ( putMultiFlux ) ) ,
2021-01-31 21:23:43 +01:00
numRepeats ,
2021-02-02 00:09:46 +01:00
tuple - > Mono
. fromRunnable ( ( ) - > System . out . println ( " Calculating size " ) )
. then ( tuple . getT2 ( ) . size ( null , false ) )
. doOnNext ( s - > System . out . println ( " Size after: " + s ) )
. then ( tuple . getT1 ( ) . close ( ) )
) ;
2021-01-31 21:23:43 +01:00
}
private static Mono < Void > rangeTestAtPut ( ) {
2021-02-01 10:52:33 +01:00
var ser = SerializerFixedBinaryLength . noop ( 4 ) ;
2021-02-01 12:19:17 +01:00
var vser = Serializer . noop ( ) ;
2021-01-31 21:23:43 +01:00
var itemKey = new byte [ ] { 0 , 1 , 2 , 3 } ;
var newValue = new byte [ ] { 4 , 5 , 6 , 7 } ;
2021-01-31 22:20:00 +01:00
return test ( " MapDictionary::at::put (same key, same value, " + batchSize + " times) " ,
2021-01-31 21:23:43 +01:00
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionary . simple ( dict , ser , vser ) ) ) ,
2021-01-31 22:54:26 +01:00
tuple - > Flux . range ( 0 , batchSize ) . flatMap ( n - > Mono
2021-01-31 21:23:43 +01:00
. defer ( ( ) - > Mono
. fromRunnable ( ( ) - > {
if ( printPreviousValue )
System . out . println ( " Setting new value at key " + Arrays . toString ( itemKey ) + " : " + Arrays . toString ( newValue ) ) ;
} )
2021-02-01 12:19:17 +01:00
. then ( tuple . getT2 ( ) . at ( null , itemKey ) )
2021-01-31 21:23:43 +01:00
. flatMap ( handle - > handle . setAndGetPrevious ( newValue ) )
. doOnSuccess ( oldValue - > {
if ( printPreviousValue )
System . out . println ( " Old value: " + ( oldValue = = null ? " None " : Arrays . toString ( oldValue ) ) ) ;
} )
2021-01-31 22:54:26 +01:00
) )
2021-01-31 22:20:00 +01:00
. then ( ) ,
2021-01-31 21:23:43 +01:00
numRepeats ,
tuple - > tuple . getT1 ( ) . close ( ) ) ;
}
private static Mono < Void > rangeTestPutValueAndGetPrevious ( ) {
2021-02-01 10:52:33 +01:00
var ser = SerializerFixedBinaryLength . noop ( 4 ) ;
2021-02-01 12:19:17 +01:00
var vser = Serializer . noop ( ) ;
2021-01-31 21:23:43 +01:00
var itemKey = new byte [ ] { 0 , 1 , 2 , 3 } ;
var newValue = new byte [ ] { 4 , 5 , 6 , 7 } ;
2021-01-31 22:20:00 +01:00
return test ( " MapDictionary::putValueAndGetPrevious (same key, same value, " + batchSize + " times) " ,
2021-01-31 21:23:43 +01:00
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionary . simple ( dict , ser , vser ) ) ) ,
2021-01-31 22:54:26 +01:00
tuple - > Flux . range ( 0 , batchSize ) . flatMap ( n - > Mono
2021-01-31 21:23:43 +01:00
. defer ( ( ) - > Mono
. fromRunnable ( ( ) - > {
if ( printPreviousValue )
System . out . println ( " Setting new value at key " + Arrays . toString ( itemKey ) + " : " + Arrays . toString ( newValue ) ) ;
} )
2021-02-01 12:19:17 +01:00
. then ( tuple . getT2 ( ) . putValueAndGetPrevious ( itemKey , newValue ) )
2021-01-31 21:23:43 +01:00
. doOnSuccess ( oldValue - > {
if ( printPreviousValue )
System . out . println ( " Old value: " + ( oldValue = = null ? " None " : Arrays . toString ( oldValue ) ) ) ;
} )
2021-01-31 22:54:26 +01:00
) )
2021-01-31 22:20:00 +01:00
. then ( ) ,
2021-01-31 21:23:43 +01:00
numRepeats ,
tuple - > tuple . getT1 ( ) . close ( ) ) ;
}
private static Mono < Void > rangeTestPutValue ( ) {
2021-02-01 10:52:33 +01:00
var ser = SerializerFixedBinaryLength . noop ( 4 ) ;
2021-02-01 12:19:17 +01:00
var vser = Serializer . noop ( ) ;
2021-01-31 21:23:43 +01:00
var itemKey = new byte [ ] { 0 , 1 , 2 , 3 } ;
var newValue = new byte [ ] { 4 , 5 , 6 , 7 } ;
2021-01-31 22:20:00 +01:00
return test ( " MapDictionary::putValue (same key, same value, " + batchSize + " times) " ,
2021-01-31 21:23:43 +01:00
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionary . simple ( dict , ser , vser ) ) ) ,
2021-01-31 22:54:26 +01:00
tuple - > Flux . range ( 0 , batchSize ) . flatMap ( n - > Mono
2021-01-31 21:23:43 +01:00
. defer ( ( ) - > Mono
. fromRunnable ( ( ) - > {
if ( printPreviousValue )
System . out . println ( " Setting new value at key " + Arrays . toString ( itemKey ) + " : " + Arrays . toString ( newValue ) ) ;
} )
2021-02-01 12:19:17 +01:00
. then ( tuple . getT2 ( ) . putValue ( itemKey , newValue ) )
2021-01-31 22:54:26 +01:00
) )
2021-01-31 22:20:00 +01:00
. then ( ) ,
numRepeats ,
tuple - > tuple . getT1 ( ) . close ( ) ) ;
}
2021-02-02 00:09:46 +01:00
private static Mono < Void > rangeTestPutMultiSame ( ) {
2021-02-01 10:52:33 +01:00
var ser = SerializerFixedBinaryLength . noop ( 4 ) ;
2021-02-01 12:19:17 +01:00
var vser = Serializer . noop ( ) ;
HashMap < byte [ ] , byte [ ] > keysToPut = new HashMap < > ( ) ;
2021-01-31 22:20:00 +01:00
for ( int i = 0 ; i < batchSize ; i + + ) {
2021-02-01 12:19:17 +01:00
keysToPut . put ( Ints . toByteArray ( i * 3 ) , Ints . toByteArray ( i * 11 ) ) ;
2021-01-31 22:20:00 +01:00
}
2021-02-02 15:36:11 +01:00
return test ( " MapDictionary::putMulti (same keys, batch of " + batchSize + " entries) " ,
2021-01-31 22:20:00 +01:00
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionary . simple ( dict , ser , vser ) ) ) ,
tuple - > Mono
2021-02-02 00:09:46 +01:00
. defer ( ( ) - > tuple . getT2 ( ) . putMulti ( Flux . fromIterable ( keysToPut . entrySet ( ) ) )
2021-01-31 21:23:43 +01:00
) ,
numRepeats ,
2021-02-02 00:09:46 +01:00
tuple - > Mono
. fromRunnable ( ( ) - > System . out . println ( " Calculating size " ) )
. then ( tuple . getT2 ( ) . size ( null , false ) )
. doOnNext ( s - > System . out . println ( " Size after: " + s ) )
. then ( tuple . getT1 ( ) . close ( ) )
) ;
}
private static Mono < Void > rangeTestPutMultiProgressive ( ) {
var ser = SerializerFixedBinaryLength . noop ( 4 ) ;
var vser = Serializer . noop ( ) ;
AtomicInteger ai = new AtomicInteger ( 0 ) ;
2021-02-02 15:36:11 +01:00
return test ( " MapDictionary::putMulti (progressive keys, batch of " + batchSize + " entries) " ,
2021-02-02 00:09:46 +01:00
tempDb ( )
. flatMap ( db - > db . getDictionary ( " testmap " ) . map ( dict - > Tuples . of ( db , dict ) ) )
. map ( tuple - > tuple . mapT2 ( dict - > DatabaseMapDictionary . simple ( dict , ser , vser ) ) ) ,
tuple - > Mono
. defer ( ( ) - > {
var aiv = ai . incrementAndGet ( ) ;
HashMap < byte [ ] , byte [ ] > keysToPut = new HashMap < > ( ) ;
for ( int i = 0 ; i < batchSize ; i + + ) {
keysToPut . put (
Ints . toByteArray ( i * 3 + ( batchSize * aiv ) ) ,
Ints . toByteArray ( i * 11 + ( batchSize * aiv ) )
) ;
}
return tuple . getT2 ( ) . putMulti ( Flux . fromIterable ( keysToPut . entrySet ( ) ) ) ;
} ) ,
numRepeats ,
tuple - > Mono
. fromRunnable ( ( ) - > System . out . println ( " Calculating size " ) )
. then ( tuple . getT2 ( ) . size ( null , false ) )
. doOnNext ( s - > System . out . println ( " Size after: " + s ) )
. then ( tuple . getT1 ( ) . close ( ) )
) ;
2021-01-31 19:52:47 +01:00
}
2021-02-02 20:08:22 +01:00
public static < U > Mono < ? extends LLKeyValueDatabase > tempDb ( ) {
2021-02-01 12:08:16 +01:00
var wrkspcPath = Path . of ( " /tmp/tempdb/ " ) ;
2021-02-01 02:21:53 +01:00
return Mono
. fromCallable ( ( ) - > {
if ( Files . exists ( wrkspcPath ) ) {
Files . walk ( wrkspcPath )
. sorted ( Comparator . reverseOrder ( ) )
. forEach ( file - > {
try {
Files . delete ( file ) ;
} catch ( IOException ex ) {
throw new CompletionException ( ex ) ;
}
} ) ;
}
Files . createDirectories ( wrkspcPath ) ;
return null ;
} )
. subscribeOn ( Schedulers . boundedElastic ( ) )
. then ( new LLLocalDatabaseConnection ( wrkspcPath , true ) . connect ( ) )
2021-01-31 19:52:47 +01:00
. flatMap ( conn - > conn . getDatabase ( " testdb " , List . of ( Column . dictionary ( " testmap " ) ) , false ) ) ;
}
2021-02-01 12:08:16 +01:00
public static < A , B , C > Mono < Void > test ( String name ,
Mono < A > setup ,
Function < A , Mono < B > > test ,
long numRepeats ,
Function < A , Mono < C > > close ) {
2021-01-31 19:52:47 +01:00
One < Instant > instantInit = Sinks . one ( ) ;
One < Instant > instantInitTest = Sinks . one ( ) ;
One < Instant > instantEndTest = Sinks . one ( ) ;
One < Instant > instantEnd = Sinks . one ( ) ;
2021-01-31 22:54:26 +01:00
Duration WAIT_TIME = Duration . ofSeconds ( 5 ) ;
Duration WAIT_TIME_END = Duration . ofSeconds ( 5 ) ;
2021-01-31 19:52:47 +01:00
return Mono
2021-02-01 02:21:53 +01:00
. delay ( WAIT_TIME )
2021-02-02 00:09:46 +01:00
. doOnSuccess ( s - > {
System . out . println ( " ---------------------------------------------------------------------- " ) ;
System . out . println ( name ) ;
} )
2021-02-01 02:21:53 +01:00
. then ( Mono . fromRunnable ( ( ) - > instantInit . tryEmitValue ( now ( ) ) ) )
2021-01-31 19:52:47 +01:00
. then ( setup )
. doOnSuccess ( s - > instantInitTest . tryEmitValue ( now ( ) ) )
2021-02-02 00:09:46 +01:00
. flatMap ( a - > Mono . defer ( ( ) - > test . apply ( a ) ) . repeat ( numRepeats - 1 )
2021-01-31 19:52:47 +01:00
. then ( )
. doOnSuccess ( s - > instantEndTest . tryEmitValue ( now ( ) ) )
. then ( close . apply ( a ) ) )
. doOnSuccess ( s - > instantEnd . tryEmitValue ( now ( ) ) )
. then ( Mono . zip ( instantInit . asMono ( ) , instantInitTest . asMono ( ) , instantEndTest . asMono ( ) , instantEnd . asMono ( ) ) )
. doOnSuccess ( tuple - > {
System . out . println (
2021-02-01 02:21:53 +01:00
" \ t - Executed " + DecimalFormat . getInstance ( Locale . ITALY ) . format ( ( numRepeats * batchSize ) ) + " times: " ) ;
2021-01-31 19:52:47 +01:00
System . out . println ( " \ t - Test time: " + DecimalFormat
. getInstance ( Locale . ITALY )
2021-02-01 02:21:53 +01:00
. format ( Duration . between ( tuple . getT2 ( ) , tuple . getT3 ( ) ) . toNanos ( ) / ( double ) ( numRepeats * batchSize ) / ( double ) 1000000 )
2021-01-31 19:52:47 +01:00
+ " ms " ) ;
System . out . println ( " \ t - Test speed: " + DecimalFormat
. getInstance ( Locale . ITALY )
2021-02-01 02:21:53 +01:00
. format ( ( numRepeats * batchSize ) / ( Duration . between ( tuple . getT2 ( ) , tuple . getT3 ( ) ) . toNanos ( ) / ( double ) 1000000 / ( double ) 1000 ) )
2021-01-31 19:52:47 +01:00
+ " tests/s " ) ;
System . out . println ( " \ t - Total time: " + DecimalFormat
. getInstance ( Locale . ITALY )
. format ( Duration . between ( tuple . getT2 ( ) , tuple . getT3 ( ) ) . toNanos ( ) / ( double ) 1000000 ) + " ms " ) ;
System . out . println ( " \ t - Total time (setup+test+end): " + DecimalFormat
. getInstance ( Locale . ITALY )
2021-02-01 02:21:53 +01:00
. format ( Duration . between ( tuple . getT1 ( ) , tuple . getT4 ( ) ) . toNanos ( ) / ( double ) 1000000 ) + " ms " ) ;
2021-01-31 19:52:47 +01:00
System . out . println ( " ---------------------------------------------------------------------- " ) ;
} )
2021-02-01 02:21:53 +01:00
. delayElement ( WAIT_TIME_END )
2021-01-31 19:52:47 +01:00
. then ( ) ;
}
public static Instant now ( ) {
return Instant . ofEpochSecond ( 0 , System . nanoTime ( ) ) ;
2021-01-31 12:43:28 +01:00
}
}