File tree Expand file tree Collapse file tree 5 files changed +19
-0
lines changed
scala/com.snowplowanalytics.snowplow.lakes Expand file tree Collapse file tree 5 files changed +19
-0
lines changed Original file line number Diff line number Diff line change
1
+ <?xml version =" 1.0" ?>
2
+ <allocations >
3
+ <pool name =" pool1" >
4
+ <schedulingMode >FIFO</schedulingMode >
5
+ <weight >1</weight >
6
+ <minShare >1</minShare >
7
+ </pool >
8
+ <pool name =" pool2" >
9
+ <schedulingMode >FIFO</schedulingMode >
10
+ <weight >1</weight >
11
+ <minShare >1</minShare >
12
+ </pool >
13
+ </allocations >
Original file line number Diff line number Diff line change @@ -71,6 +71,7 @@ private[processing] object SparkUtils {
71
71
for {
72
72
_ <- Logger [F ].debug(s " Initializing local DataFrame with name $viewName" )
73
73
_ <- Sync [F ].blocking {
74
+ spark.sparkContext.setLocalProperty(" spark.scheduler.pool" , " pool1" )
74
75
spark.emptyDataFrame.createTempView(viewName)
75
76
}
76
77
} yield ()
@@ -84,6 +85,7 @@ private[processing] object SparkUtils {
84
85
for {
85
86
_ <- Logger [F ].debug(s " Saving batch of ${rows.size} events to local DataFrame $viewName" )
86
87
_ <- Sync [F ].blocking {
88
+ spark.sparkContext.setLocalProperty(" spark.scheduler.pool" , " pool1" )
87
89
spark
88
90
.createDataFrame(rows.toList.asJava, schema)
89
91
.coalesce(1 )
@@ -108,6 +110,7 @@ private[processing] object SparkUtils {
108
110
def dropView [F [_]: Sync ](spark : SparkSession , viewName : String ): F [Unit ] =
109
111
Logger [F ].info(s " Removing Spark data frame $viewName from local disk... " ) >>
110
112
Sync [F ].blocking {
113
+ spark.sparkContext.setLocalProperty(" spark.scheduler.pool" , " pool1" )
111
114
spark.catalog.dropTempView(viewName)
112
115
}.void
113
116
Original file line number Diff line number Diff line change @@ -90,6 +90,7 @@ class DeltaWriter(config: Config.Delta) extends Writer {
90
90
Sync [F ].untilDefinedM {
91
91
Sync [F ]
92
92
.blocking[Option [Unit ]] {
93
+ df.sparkSession.sparkContext.setLocalProperty(" spark.scheduler.pool" , " pool2" )
93
94
df.write
94
95
.format(" delta" )
95
96
.mode(" append" )
Original file line number Diff line number Diff line change @@ -79,6 +79,7 @@ class HudiWriter(config: Config.Hudi) extends Writer {
79
79
80
80
override def write [F [_]: Sync ](df : DataFrame ): F [Unit ] =
81
81
Sync [F ].blocking {
82
+ df.sparkSession.sparkContext.setLocalProperty(" spark.scheduler.pool" , " pool2" )
82
83
df.write
83
84
.format(" hudi" )
84
85
.mode(" append" )
Original file line number Diff line number Diff line change @@ -58,6 +58,7 @@ class IcebergWriter(config: Config.Iceberg) extends Writer {
58
58
59
59
override def write [F [_]: Sync ](df : DataFrame ): F [Unit ] =
60
60
Sync [F ].blocking {
61
+ df.sparkSession.sparkContext.setLocalProperty(" spark.scheduler.pool" , " pool2" )
61
62
df.write
62
63
.format(" iceberg" )
63
64
.mode(" append" )
You can’t perform that action at this time.
0 commit comments