Skip to content

Commit 3e1534b

Browse files
committed
With fair scheduler pools
1 parent fbd3218 commit 3e1534b

File tree

5 files changed

+19
-0
lines changed

5 files changed

+19
-0
lines changed
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
<?xml version="1.0"?>
2+
<allocations>
3+
<pool name="pool1">
4+
<schedulingMode>FIFO</schedulingMode>
5+
<weight>1</weight>
6+
<minShare>1</minShare>
7+
</pool>
8+
<pool name="pool2">
9+
<schedulingMode>FIFO</schedulingMode>
10+
<weight>1</weight>
11+
<minShare>1</minShare>
12+
</pool>
13+
</allocations>

modules/core/src/main/scala/com.snowplowanalytics.snowplow.lakes/processing/SparkUtils.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@ private[processing] object SparkUtils {
7171
for {
7272
_ <- Logger[F].debug(s"Initializing local DataFrame with name $viewName")
7373
_ <- Sync[F].blocking {
74+
spark.sparkContext.setLocalProperty("spark.scheduler.pool", "pool1")
7475
spark.emptyDataFrame.createTempView(viewName)
7576
}
7677
} yield ()
@@ -84,6 +85,7 @@ private[processing] object SparkUtils {
8485
for {
8586
_ <- Logger[F].debug(s"Saving batch of ${rows.size} events to local DataFrame $viewName")
8687
_ <- Sync[F].blocking {
88+
spark.sparkContext.setLocalProperty("spark.scheduler.pool", "pool1")
8789
spark
8890
.createDataFrame(rows.toList.asJava, schema)
8991
.coalesce(1)
@@ -108,6 +110,7 @@ private[processing] object SparkUtils {
108110
def dropView[F[_]: Sync](spark: SparkSession, viewName: String): F[Unit] =
109111
Logger[F].info(s"Removing Spark data frame $viewName from local disk...") >>
110112
Sync[F].blocking {
113+
spark.sparkContext.setLocalProperty("spark.scheduler.pool", "pool1")
111114
spark.catalog.dropTempView(viewName)
112115
}.void
113116

modules/core/src/main/scala/com.snowplowanalytics.snowplow.lakes/tables/DeltaWriter.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,7 @@ class DeltaWriter(config: Config.Delta) extends Writer {
9090
Sync[F].untilDefinedM {
9191
Sync[F]
9292
.blocking[Option[Unit]] {
93+
df.sparkSession.sparkContext.setLocalProperty("spark.scheduler.pool", "pool2")
9394
df.write
9495
.format("delta")
9596
.mode("append")

modules/core/src/main/scala/com.snowplowanalytics.snowplow.lakes/tables/HudiWriter.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,7 @@ class HudiWriter(config: Config.Hudi) extends Writer {
7979

8080
override def write[F[_]: Sync](df: DataFrame): F[Unit] =
8181
Sync[F].blocking {
82+
df.sparkSession.sparkContext.setLocalProperty("spark.scheduler.pool", "pool2")
8283
df.write
8384
.format("hudi")
8485
.mode("append")

modules/core/src/main/scala/com.snowplowanalytics.snowplow.lakes/tables/IcebergWriter.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ class IcebergWriter(config: Config.Iceberg) extends Writer {
5858

5959
override def write[F[_]: Sync](df: DataFrame): F[Unit] =
6060
Sync[F].blocking {
61+
df.sparkSession.sparkContext.setLocalProperty("spark.scheduler.pool", "pool2")
6162
df.write
6263
.format("iceberg")
6364
.mode("append")

0 commit comments

Comments
 (0)