Skip to content

GDB-11761 Moves the initialization of the field for the max batch size #28

New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Merged
merged 1 commit into from
Aug 12, 2025
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 29 additions & 25 deletions src/main/java/com/ontotext/trree/plugin/mongodb/MongoDBPlugin.java
Original file line number Diff line number Diff line change
Expand Up @@ -48,27 +48,8 @@ public class MongoDBPlugin extends PluginBase implements Preprocessor, PatternIn
public static final IRI BATCH = SimpleValueFactory.getInstance().createIRI(NAMESPACE + "batchSize");

protected static final String MONGODB_PROPERTIES = "mongodb.properties";

public final int MAX_BATCH_SIZE;

{
int maxBatch;
try {
maxBatch = Integer.parseInt(System.getProperty("graphdb.mongodb.maxBatchSize", "1000"));
} catch (NumberFormatException e) {
getLogger().error("Invalid graphdb.mongodb.maxBatchSize: {}", System.getProperty(
"graphdb.mongodb.maxBatchSize"));
maxBatch = 1000;
}
if (maxBatch > 10000) {
getLogger().warn("graphdb.mongodb.maxBatchSize size is too large. Max allowed is 10000");
maxBatch = 10000;
}
if (maxBatch == 0) {
getLogger().info("MongoDB batch loading is disabled");
}
MAX_BATCH_SIZE = maxBatch;
}
private int maxBatchSize = 1000;

protected ValueFactory vf = SimpleValueFactory.getInstance();

Expand Down Expand Up @@ -163,6 +144,8 @@ public String getName() {
*/
@Override
public void initialize(InitReason initReason, PluginConnection pluginConnection) {
maxBatchSize = readMaxBatchSizeConfig();

Entities entities = pluginConnection.getEntities();
// register the predicates
serviceId = entities.put(SERVICE, Scope.SYSTEM);
Expand All @@ -183,7 +166,6 @@ public void initialize(InitReason initReason, PluginConnection pluginConnection)
collationId = entities.put(COLLATION, Scope.SYSTEM);
batchSize = entities.put(BATCH, Scope.SYSTEM);


predicateSet = new long[] {serviceId, databaseId, collectionId, userId, passwordId, authDbId, dropId, queryId,
projectionId, aggregationId, hintId, entityId, graphId, collationId, batchSize, rdf_type};
Arrays.sort(predicateSet);
Expand Down Expand Up @@ -508,15 +490,15 @@ private Integer readBatchSize(long object, Entities entities) {
Utils.getString(entities, object));
return null;
}
if (batchSizeCfg >= MAX_BATCH_SIZE) {
if (MAX_BATCH_SIZE == 0) {
if (batchSizeCfg >= maxBatchSize) {
if (maxBatchSize == 0) {
getLogger().warn("Batch document functionality is disabled. Ignoring {} configuration.",
BATCH);
} else {
getLogger().warn("Batch size {} exceeds maximum {}. Using default size.",
Utils.getString(entities, object), MAX_BATCH_SIZE);
Utils.getString(entities, object), maxBatchSize);
}
batchSizeCfg = MAX_BATCH_SIZE;
batchSizeCfg = maxBatchSize;
}
return batchSizeCfg;
}
Expand Down Expand Up @@ -791,6 +773,28 @@ private void logUpdatedSetting(String suffix, String setting) {
getLogger().info("Setting {} for MongoDB service {}", setting, suffix);
}

private int readMaxBatchSizeConfig() {
int maxBatch;
try {
maxBatch = Integer.parseInt(System.getProperty("graphdb.mongodb.maxBatchSize", "1000"));
} catch (NumberFormatException e) {
getLogger().error("Invalid graphdb.mongodb.maxBatchSize: {}. Setting default 1000 as fallback.",
System.getProperty("graphdb.mongodb.maxBatchSize"));
maxBatch = 1000;
}

if (maxBatch > 10000) {
getLogger().warn("graphdb.mongodb.maxBatchSize size is too large. Max allowed is 10000");
maxBatch = 10000;
}

if (maxBatch == 0) {
getLogger().info("MongoDB batch loading is disabled");
}

return maxBatch;
}

/**
* Lazy initialized iterator used to represent model pattern selections, defined before the actual
* query definition. Until the query is defined and added to the given context the iterator will
Expand Down