Initial try on reading data back.

This commit is contained in:
Mark Nellemann 2023-06-28 10:40:45 +02:00
parent 0f5ed50e86
commit a7f0bc4822
5 changed files with 79 additions and 48 deletions

View file

@ -1,9 +1,41 @@
# Memory Performance Test
## Examples
```shell
java -Xms128g -Xmx128g -XX:+UseLargePages -XX:+AlwaysPreTouch \
-XX:-UseParallelGC -XX:MaxGCPauseMillis=500 -Xgcthreads3 \
-jar memstress-0.0.1-all.jar -t 96
```
```shell
java -Xms144g -Xmx144g -XX:+UseLargePages -XX:+AlwaysPreTouch \
-XX:-UseParallelGC -XX:MaxGCPauseMillis=500 -Xgcthreads3 \
-jar memstress-0.0.1-all.jar -t 128
```
```shell
java -Xms160g -Xmx160g -XX:+UseLargePages -XX:+AlwaysPreTouch \
-XX:-UseParallelGC -XX:MaxGCPauseMillis=500 -Xgcthreads3 \
-jar memstress-0.0.1-all.jar -t 144
```
```shell
java -Xms192g -Xmx192g -XX:+UseLargePages -XX:+AlwaysPreTouch \
-XX:-UseParallelGC -XX:MaxGCPauseMillis=500 -Xgcthreads3 \
-jar memstress-0.0.1-all.jar -t 160
```
```shell
java -Xms240g -Xmx240g -XX:+UseLargePages -XX:+AlwaysPreTouch \
-XX:-UseParallelGC -XX:MaxGCPauseMillis=500 -Xgcthreads3 \
-jar memstress-0.0.1-all.jar -t 192
```
```shell
java -Xms256g -Xmx256g -XX:+UseLargePages -XX:+AlwaysPreTouch \
-XX:-UseParallelGC -XX:MaxGCPauseMillis=500 -Xgcthreads3 \
-jar memstress-0.0.1-all.jar -t 240
```

View file

@ -2,7 +2,6 @@ plugins {
id 'groovy'
id 'application'
id "net.nemerosa.versioning" version "2.15.1"
id "com.netflix.nebula.ospackage" version "11.2.0"
id 'com.github.johnrengelman.shadow' version '8.1.1'
}
@ -55,41 +54,3 @@ jar {
)
}
}
apply plugin: 'com.netflix.nebula.ospackage'
ospackage {
packageName = 'memstress'
release = '1'
user = 'root'
packager = "Mark Nellemann <mark.nellemann@gmail.com>"
into '/opt/memstress'
from(shadowJar.outputs.files) {
into 'lib'
}
from('build/scriptsShadow') {
into 'bin'
}
from(['README.md', 'LICENSE']) {
into 'doc'
}
}
buildDeb {
dependsOn build, startShadowScripts
}
buildRpm {
dependsOn build, startShadowScripts
os = org.redline_rpm.header.Os.LINUX
}
tasks.register("packages") {
group "build"
dependsOn ":buildDeb"
dependsOn ":buildRpm"
}

View file

@ -27,9 +27,8 @@ public class Application implements Callable<Integer> {
public Integer call() throws Exception {
MyDatabase database = new MyDatabase(maxTables, maxRowsPerTable, maxDataPerRow);
database.build("testDb");
System.out.println("TODO: How to search / read from data stored in rows?");
database.write("testDb");
database.read("testDb");
Scanner scanner = new Scanner(System.in);
System.out.println("Press ENTER to stop");

View file

@ -12,7 +12,7 @@ import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
public class MyDatabase {
@ -22,7 +22,6 @@ public class MyDatabase {
private final int BYTE_SIZE_1GB = 1_000_000_000;
private final DatabaseManager databaseManager = new DatabaseManager();
private final Random random = new Random();
// Use when searching or using later?
private final ArrayList<Table> tables = new ArrayList<Table>();
@ -49,9 +48,11 @@ public class MyDatabase {
}
public Database build(String dbName) {
public void write(String dbName) {
Instant instant1 = Instant.now();
Database database = databaseManager.createDatabase(dbName);
AtomicLong bytesWritten = new AtomicLong();
for (int t = 1; t <= maxTables; t++) {
String tableName = String.format("table_%d", t);
@ -64,6 +65,7 @@ public class MyDatabase {
HashMap<String, ByteBuffer> map = new HashMap<String, ByteBuffer>();
for (int m = 1; m <= maxDataPerRow; m++) {
map.put(randomString(), randomBytes());
bytesWritten.addAndGet(byteBase.length);
}
table.insertEntry(rowIdx, map);
}
@ -72,10 +74,31 @@ public class MyDatabase {
}
Instant instant2 = Instant.now();
log.info("Done building in-memory database \"{}\" in {}", dbName, Duration.between(instant1, instant2));
return database;
log.info("Done writing {} to \"{}\" in {}", bytesWritten, dbName, Duration.between(instant1, instant2));
}
public void read(String dbName) {
Instant instant1 = Instant.now();
Database database = databaseManager.getDatabase(dbName);
AtomicLong bytesRead = new AtomicLong();
for(Table table : tables) {
table.getRows().forEach((idx, row) -> {
HashMap<String, ByteBuffer> values = row.getColumnValuesMap();
values.forEach((str, byteBuffer) -> {
byte[] array = byteBuffer.array();
bytesRead.addAndGet(array.length);
});
});
}
Instant instant2 = Instant.now();
log.info("Done reading {} from \"{}\" in {}", bytesRead.get(), dbName, Duration.between(instant1, instant2));
}
String randomString() {
baseCar[(idx++) % 128]++;
return new String(baseCar);

View file

@ -3,14 +3,19 @@
*/
package biz.nellemann.memstress.db;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
public class DatabaseManager {
final Logger log = LoggerFactory.getLogger(DatabaseManager.class);
private HashMap<String, Database> databaseHashMap;
public Database createDatabase(String databaseName) {
if (databaseHashMap.containsKey(databaseName)) {
System.out.println("A database already exists with this name");
log.warn("createDatabase() - A database already exists with this name: {}", databaseName);
} else {
databaseHashMap.put(databaseName, new Database(databaseName));
}
@ -21,6 +26,17 @@ public class DatabaseManager {
databaseHashMap.remove(databaseName);
}
public Database getDatabase(String databaseName) {
if (databaseHashMap.containsKey(databaseName)) {
return databaseHashMap.get(databaseName);
} else {
log.warn("getDatabase() - Database was not found: {}", databaseName);
}
return null;
}
public DatabaseManager() {
this.databaseHashMap = new HashMap<>();
}