Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
111 changes: 110 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,32 @@ under the License.
<executableJava>java</executableJava>
<jmhProfArgument> </jmhProfArgument>
<profilerOutputDir>${build.directory}/profile-results</profilerOutputDir>

<!-- The spotless related.-->
<checkstyle.version>10.18.2</checkstyle.version>
<spotless.skip>false</spotless.skip>
<spotless.version>2.43.0</spotless.version>
<spotless.scalafmt.version>3.4.3</spotless.scalafmt.version>
<spotless.delimiter>package</spotless.delimiter>
<spotless.license.header>
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
</spotless.license.header>
</properties>

<repositories>
Expand Down Expand Up @@ -630,7 +656,82 @@ under the License.
</profiles>

<build>
<extensions>

<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.3.1</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<!-- Note: match version with docs/flinkDev/ide_setup.md -->
<version>${checkstyle.version}</version>
</dependency>
</dependencies>

<executions>
<execution>
<id>validate</id>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
<configuration>
<!-- Skip the packages, because the classes in the package is generated to follow thrift.-->
<excludes>
org/apache/flink/benchmark/thrift/**
</excludes>
<suppressionsLocation>/tools/maven/suppressions.xml</suppressionsLocation>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<configLocation>/tools/maven/checkstyle.xml</configLocation>
<logViolationsToConsole>true</logViolationsToConsole>
<failOnViolation>true</failOnViolation>
</configuration>
</plugin>

<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
<version>${spotless.version}</version>
<configuration>
<java>
<googleJavaFormat>
<version>1.24.0</version>
<style>AOSP</style>
</googleJavaFormat>

<!-- \# refers to the static imports -->
<importOrder>
<order>org.apache.flink,org.apache.flink.shaded,,javax,java,scala,\#</order>
</importOrder>

<removeUnusedImports />

<excludes>
<!-- Skip the packages, because the classes in the package is generated to follow thrift.-->
<exclude>src/main/java/org/apache/flink/benchmark/thrift/**</exclude>
</excludes>
</java>
</configuration>
<executions>
<execution>
<id>spotless-check</id>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</pluginManagement>

<extensions>
<!-- required for getting the correct protoc binary for protobuf -->
<extension>
<groupId>kr.motd.maven</groupId>
Expand Down Expand Up @@ -754,6 +855,14 @@ under the License.
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
</plugin>
<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -85,14 +85,13 @@ public void setUp() throws Exception {
env.setBufferTimeout(-1);
}

protected Configuration createConfiguration(
boolean compressionEnabled) {
protected Configuration createConfiguration(boolean compressionEnabled) {
Configuration configuration = super.createConfiguration();

configuration.set(
NettyShuffleEnvironmentOptions.SHUFFLE_COMPRESSION_CODEC,
compressionEnabled ?
NettyShuffleEnvironmentOptions.CompressionCodec.LZ4
compressionEnabled
? NettyShuffleEnvironmentOptions.CompressionCodec.LZ4
: NettyShuffleEnvironmentOptions.CompressionCodec.NONE);
configuration.set(
CoreOptions.TMP_DIRS,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.CoreOptions;
import org.apache.flink.configuration.NettyShuffleEnvironmentOptions;
import org.apache.flink.streaming.api.graph.StreamGraph;
import org.apache.flink.streaming.api.graph.StreamingJobGraphGenerator;
import org.apache.flink.util.FileUtils;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,7 @@ public enum CheckpointMode {
TaskManagerOptions.MEMORY_SEGMENT_SIZE,
CheckpointEnvironmentContext.START_MEMORY_SEGMENT_SIZE);
config.set(
CheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT,
Duration.ofMillis(0));
CheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT, Duration.ofMillis(0));
config.set(TaskManagerOptions.BUFFER_DEBLOAT_ENABLED, false);
return config;
}),
Expand All @@ -110,8 +109,7 @@ public enum CheckpointMode {
TaskManagerOptions.MEMORY_SEGMENT_SIZE,
CheckpointEnvironmentContext.START_MEMORY_SEGMENT_SIZE);
config.set(
CheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT,
Duration.ofMillis(1));
CheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT, Duration.ofMillis(1));
config.set(TaskManagerOptions.BUFFER_DEBLOAT_ENABLED, false);
return config;
}),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@
import org.apache.flink.streaming.api.functions.sink.legacy.SinkFunction;
import org.apache.flink.streaming.api.functions.source.ContinuousFileReaderOperatorFactory;
import org.apache.flink.streaming.api.functions.source.TimestampedFileInputSplit;
import org.apache.flink.streaming.api.functions.source.legacy.SourceFunction;

import joptsimple.internal.Strings;
import org.apache.flink.streaming.api.functions.source.legacy.SourceFunction;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.runner.Runner;
Expand All @@ -43,19 +43,19 @@

@OperationsPerInvocation(value = ContinuousFileReaderOperatorBenchmark.RECORDS_PER_INVOCATION)
public class ContinuousFileReaderOperatorBenchmark extends BenchmarkBase {
private static final int SPLITS_PER_INVOCATION = 100;
private static final int LINES_PER_SPLIT = 175_000;
public static final int RECORDS_PER_INVOCATION = SPLITS_PER_INVOCATION * LINES_PER_SPLIT;
private static final int splitsPerInvocation = 100;
private static final int linesPerSplit = 175_000;
public static final int RECORDS_PER_INVOCATION = splitsPerInvocation * linesPerSplit;

private static final TimestampedFileInputSplit SPLIT =
private static final TimestampedFileInputSplit split =
new TimestampedFileInputSplit(0, 0, new Path("."), 0, 0, new String[] {});
private static final String LINE = Strings.repeat('0', 10);
private static final String line = Strings.repeat('0', 10);

// Source should wait until all elements reach sink. Otherwise, END_OF_INPUT is sent once all
// splits are emitted.
// Thus, all subsequent reads in ContinuousFileReaderOperator would be made in CLOSING state in
// a simple while-true loop (MailboxExecutor.isIdle is always true).
private static OneShotLatch TARGET_COUNT_REACHED_LATCH = new OneShotLatch();
private static OneShotLatch targetCountReachedLatch = new OneShotLatch();

public static void main(String[] args) throws RunnerException {
Options options =
Expand All @@ -73,7 +73,7 @@ public static void main(String[] args) throws RunnerException {

@Benchmark
public void readFileSplit(FlinkEnvironmentContext context) throws Exception {
TARGET_COUNT_REACHED_LATCH.reset();
targetCountReachedLatch.reset();
StreamExecutionEnvironment env = context.env;
env.enableCheckpointing(100)
.setParallelism(1)
Expand All @@ -93,15 +93,15 @@ private static class MockSourceFunction implements SourceFunction<TimestampedFil

@Override
public void run(SourceContext<TimestampedFileInputSplit> ctx) {
while (isRunning && count < SPLITS_PER_INVOCATION) {
while (isRunning && count < splitsPerInvocation) {
count++;
synchronized (ctx.getCheckpointLock()) {
ctx.collect(SPLIT);
ctx.collect(split);
}
}
while (isRunning) {
try {
TARGET_COUNT_REACHED_LATCH.await(100, TimeUnit.MILLISECONDS);
targetCountReachedLatch.await(100, TimeUnit.MILLISECONDS);
return;
} catch (InterruptedException e) {
if (!isRunning) {
Expand All @@ -124,13 +124,13 @@ private static class MockInputFormat extends FileInputFormat<String> {

@Override
public boolean reachedEnd() {
return count >= ContinuousFileReaderOperatorBenchmark.LINES_PER_SPLIT;
return count >= ContinuousFileReaderOperatorBenchmark.linesPerSplit;
}

@Override
public String nextRecord(String s) {
count++;
return LINE;
return line;
}

@Override
Expand All @@ -151,7 +151,7 @@ private static class LimitedSink implements SinkFunction<String> {
@Override
public void invoke(String value, Context context) {
if (++count == RECORDS_PER_INVOCATION) {
TARGET_COUNT_REACHED_LATCH.trigger();
targetCountReachedLatch.trigger();
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,11 +103,12 @@ protected Configuration createConfiguration() {
final Configuration configuration = new Configuration();
configuration.set(RestOptions.BIND_PORT, "0");
// no equivalent config available.
//configuration.setInteger(
// configuration.setInteger(
// NettyShuffleEnvironmentOptions.NETWORK_NUM_BUFFERS, NUM_NETWORK_BUFFERS);
configuration.set(DeploymentOptions.TARGET, MiniClusterPipelineExecutorServiceLoader.NAME);
configuration.set(DeploymentOptions.ATTACHED, true);
// It doesn't make sense to wait for the final checkpoint in benchmarks since it only prolongs
// It doesn't make sense to wait for the final checkpoint in benchmarks since it only
// prolongs
// the test but doesn't give any advantages.
configuration.set(CheckpointingOptions.ENABLE_CHECKPOINTS_AFTER_TASKS_FINISH, false);
// TODO: remove this line after FLINK-28243 will be done
Expand Down
Loading