Skip to content

Use more meaningful names in fields of generated Datasets #16385

Closed
@andsel

Description

@andsel

The runtime generated Datasets used to model a pipeline execution flow contains fields which are named in incremental index order, like "field1"..."fieldN". In the cases where the generated code must be read by a human (use of JVM properties -Dorg.codehaus.janino.source_debugging.enable=true and -Dorg.codehaus.janino.source_debugging.dir=<path> to debug or understand better what's effectively executed, such names are not very usefull.
For example consider what's generated today

public final class CompiledDataset2 extends org.logstash.config.ir.compiler.BaseDataset
    implements org.logstash.config.ir.compiler.Dataset {
  private final org.logstash.config.ir.compiler.OutputDelegatorExt field0;
  private final org.logstash.config.ir.compiler.Dataset field1;
  private final org.jruby.RubyArray field2;

  public CompiledDataset2(java.util.Map arguments) {
    field0 = ((org.logstash.config.ir.compiler.OutputDelegatorExt) arguments.get("field0"));
    field1 = ((org.logstash.config.ir.compiler.Dataset) arguments.get("field1"));
    field2 = ((org.jruby.RubyArray) arguments.get("field2"));
  }

  public java.util.Collection compute(
      org.jruby.RubyArray batchArg, boolean flushArg, boolean shutdownArg) {
    org.logstash.config.ir.compiler.Utils.copyNonCancelledEvents(
        field1.compute(batchArg, flushArg, shutdownArg), field2);
    org.apache.logging.log4j.ThreadContext.put("plugin.id", field0.getId().toString());
    field0.multiReceive(field2);
    field1.clear();
    org.apache.logging.log4j.ThreadContext.remove("plugin.id");
    field2.clear();
    return null;
  }

  public void clear() {}
}

To something with mnemonic field names:

public final class CompiledDataset2 extends org.logstash.config.ir.compiler.BaseDataset
    implements org.logstash.config.ir.compiler.Dataset {
  private final java.util.ArrayList outputBufferField;
  private final org.logstash.config.ir.compiler.Dataset field1;
  private final org.jruby.RubyArray inputBufferField;
  private final org.logstash.config.ir.compiler.FilterDelegatorExt pluginField;

  public CompiledDataset2(java.util.Map arguments) {
    outputBufferField = ((java.util.ArrayList) arguments.get("outputBufferField"));
    field1 = ((org.logstash.config.ir.compiler.Dataset) arguments.get("field1"));
    inputBufferField = ((org.jruby.RubyArray) arguments.get("inputBufferField"));
    pluginField =
        ((org.logstash.config.ir.compiler.FilterDelegatorExt) arguments.get("pluginField"));
  }

  public java.util.Collection compute(
      org.jruby.RubyArray batchArg, boolean flushArg, boolean shutdownArg) {
    if (this.isDone()) {
      return outputBufferField;
    }
    org.logstash.config.ir.compiler.Utils.copyNonCancelledEvents(
        field1.compute(batchArg, flushArg, shutdownArg), inputBufferField);
    org.apache.logging.log4j.ThreadContext.put("plugin.id", pluginField.getId().toString());
    outputBufferField.addAll(pluginField.multiFilter(inputBufferField));
    org.apache.logging.log4j.ThreadContext.remove("plugin.id");
    inputBufferField.clear();
    this.setDone();
    return outputBufferField;
  }

  public void clear() {
    if (this.isDone()) {
      field1.clear();
      outputBufferField.clear();
      this.clearDone();
    }
  }
}

Metadata

Metadata

Assignees

Type

No type

Projects

No projects

Milestone

No milestone

Relationships

None yet

Development

No branches or pull requests

Issue actions