Skip to content

Commit 2c388fe

Browse files
committed
Rebase and address comments
1 parent ad98949 commit 2c388fe

File tree

8 files changed

+125
-94
lines changed

8 files changed

+125
-94
lines changed

coral-hive/src/main/java/com/linkedin/coral/transformers/CoralRelToSqlNodeConverter.java

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -350,14 +350,17 @@ private SqlNode generateRightChildForSqlJoinWithLateralViews(BiRel e, Result rig
350350
}
351351

352352
/**
353-
* Override this method to handle the conversion for RelNode `f(x).y.z` where `f` is an operator, which
354-
* returns a struct containing field `y`, `y` is also a struct containing field `z`.
353+
* Override this method to handle the conversion for {@link RexFieldAccess} `f(x).y.z` where `f` is an operator,
354+
* which returns a struct containing field `y`, `y` is also a struct containing field `z`.
355355
*
356-
* Calcite will convert this RelNode to a SqlIdentifier directly (check
356+
* Calcite will convert this RelNode to a {@link SqlIdentifier} directly (check
357357
* {@link org.apache.calcite.rel.rel2sql.SqlImplementor.Context#toSql(RexProgram, RexNode)}),
358358
* which is not aligned with our expectation since we want to apply transformations on `f(x)` with
359359
* {@link com.linkedin.coral.common.transformers.SqlCallTransformer}. Therefore, we override this
360-
* method to convert `f(x)` to SqlCall, `.` to {@link com.linkedin.coral.common.functions.FunctionFieldReferenceOperator#DOT}
360+
* method to convert `f(x)` to {@link SqlCall}, `.` to {@link com.linkedin.coral.common.functions.FunctionFieldReferenceOperator#DOT},
361+
* so `f(x).y.z` will be converted to `(f(x).y).z`.
362+
*
363+
* Check `CoralSparkTest#testConvertFieldAccessOnFunctionCall` for unit test and example.
361364
*/
362365
@Override
363366
public Context aliasContext(Map<String, RelDataType> aliases, boolean qualified) {
@@ -373,7 +376,8 @@ public SqlNode toSql(RexProgram program, RexNode rex) {
373376
accessNames.add(((RexFieldAccess) referencedExpr).getField().getName());
374377
referencedExpr = ((RexFieldAccess) referencedExpr).getReferenceExpr();
375378
}
376-
if (referencedExpr.getKind() == SqlKind.OTHER_FUNCTION || referencedExpr.getKind() == SqlKind.CAST) {
379+
final SqlKind sqlKind = referencedExpr.getKind();
380+
if (sqlKind == SqlKind.OTHER_FUNCTION || sqlKind == SqlKind.CAST || sqlKind == SqlKind.ROW) {
377381
SqlNode functionCall = toSql(program, referencedExpr);
378382
Collections.reverse(accessNames);
379383
for (String accessName : accessNames) {

coral-spark/src/main/java/com/linkedin/coral/spark/CoralToSparkSqlCallConverter.java

Lines changed: 37 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -9,147 +9,151 @@
99

1010
import org.apache.calcite.sql.SqlCall;
1111
import org.apache.calcite.sql.SqlNode;
12+
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
1213
import org.apache.calcite.sql.util.SqlShuttle;
1314

1415
import com.linkedin.coral.common.transformers.OperatorRenameSqlCallTransformer;
1516
import com.linkedin.coral.common.transformers.SqlCallTransformers;
1617
import com.linkedin.coral.spark.containers.SparkUDFInfo;
17-
import com.linkedin.coral.spark.transformers.FallBackToHiveUDFTransformer;
18-
import com.linkedin.coral.spark.transformers.TransportableUDFTransformer;
18+
import com.linkedin.coral.spark.transformers.FallBackToLinkedInHiveUDFTransformer;
19+
import com.linkedin.coral.spark.transformers.TransportUDFTransformer;
1920

20-
import static com.linkedin.coral.spark.transformers.TransportableUDFTransformer.*;
21+
import static com.linkedin.coral.spark.transformers.TransportUDFTransformer.*;
2122

2223

2324
/**
2425
* This class extends the class of {@link org.apache.calcite.sql.util.SqlShuttle} and initialize a {@link com.linkedin.coral.common.transformers.SqlCallTransformers}
2526
* which containing a list of {@link com.linkedin.coral.common.transformers.SqlCallTransformer} to traverse the hierarchy of a {@link org.apache.calcite.sql.SqlCall}
2627
* and converts the functions from Coral operator to Spark operator if it is required
28+
*
29+
* In this converter, we need to apply {@link TransportUDFTransformer} before {@link FallBackToLinkedInHiveUDFTransformer}
30+
* because we should try to transform a UDF to an equivalent Transport UDF before falling back to LinkedIn Hive UDF.
2731
*/
2832
public class CoralToSparkSqlCallConverter extends SqlShuttle {
2933
private final SqlCallTransformers sqlCallTransformers;
3034

3135
public CoralToSparkSqlCallConverter(Set<SparkUDFInfo> sparkUDFInfos) {
3236
this.sqlCallTransformers = SqlCallTransformers.of(
33-
// Transportable UDFs
34-
new TransportableUDFTransformer("com.linkedin.dali.udf.date.hive.DateFormatToEpoch",
37+
// Transport UDFs
38+
new TransportUDFTransformer("com.linkedin.dali.udf.date.hive.DateFormatToEpoch",
3539
"com.linkedin.stdudfs.daliudfs.spark.DateFormatToEpoch", DALI_UDFS_IVY_URL_SPARK_2_11,
3640
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
3741

38-
new TransportableUDFTransformer("com.linkedin.dali.udf.date.hive.EpochToDateFormat",
42+
new TransportUDFTransformer("com.linkedin.dali.udf.date.hive.EpochToDateFormat",
3943
"com.linkedin.stdudfs.daliudfs.spark.EpochToDateFormat", DALI_UDFS_IVY_URL_SPARK_2_11,
4044
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
4145

42-
new TransportableUDFTransformer("com.linkedin.dali.udf.date.hive.EpochToEpochMilliseconds",
46+
new TransportUDFTransformer("com.linkedin.dali.udf.date.hive.EpochToEpochMilliseconds",
4347
"com.linkedin.stdudfs.daliudfs.spark.EpochToEpochMilliseconds", DALI_UDFS_IVY_URL_SPARK_2_11,
4448
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
4549

46-
new TransportableUDFTransformer("com.linkedin.dali.udf.isguestmemberid.hive.IsGuestMemberId",
50+
new TransportUDFTransformer("com.linkedin.dali.udf.isguestmemberid.hive.IsGuestMemberId",
4751
"com.linkedin.stdudfs.daliudfs.spark.IsGuestMemberId", DALI_UDFS_IVY_URL_SPARK_2_11,
4852
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
4953

50-
new TransportableUDFTransformer("com.linkedin.dali.udf.istestmemberid.hive.IsTestMemberId",
54+
new TransportUDFTransformer("com.linkedin.dali.udf.istestmemberid.hive.IsTestMemberId",
5155
"com.linkedin.stdudfs.daliudfs.spark.IsTestMemberId", DALI_UDFS_IVY_URL_SPARK_2_11,
5256
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
5357

54-
new TransportableUDFTransformer("com.linkedin.dali.udf.maplookup.hive.MapLookup",
58+
new TransportUDFTransformer("com.linkedin.dali.udf.maplookup.hive.MapLookup",
5559
"com.linkedin.stdudfs.daliudfs.spark.MapLookup", DALI_UDFS_IVY_URL_SPARK_2_11, DALI_UDFS_IVY_URL_SPARK_2_12,
5660
sparkUDFInfos),
5761

58-
new TransportableUDFTransformer("com.linkedin.dali.udf.sanitize.hive.Sanitize",
62+
new TransportUDFTransformer("com.linkedin.dali.udf.sanitize.hive.Sanitize",
5963
"com.linkedin.stdudfs.daliudfs.spark.Sanitize", DALI_UDFS_IVY_URL_SPARK_2_11, DALI_UDFS_IVY_URL_SPARK_2_12,
6064
sparkUDFInfos),
6165

62-
new TransportableUDFTransformer("com.linkedin.dali.udf.watbotcrawlerlookup.hive.WATBotCrawlerLookup",
66+
new TransportUDFTransformer("com.linkedin.dali.udf.watbotcrawlerlookup.hive.WATBotCrawlerLookup",
6367
"com.linkedin.stdudfs.daliudfs.spark.WatBotCrawlerLookup", DALI_UDFS_IVY_URL_SPARK_2_11,
6468
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
6569

66-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.DateFormatToEpoch",
70+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.DateFormatToEpoch",
6771
"com.linkedin.stdudfs.daliudfs.spark.DateFormatToEpoch", DALI_UDFS_IVY_URL_SPARK_2_11,
6872
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
6973

70-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.EpochToDateFormat",
74+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.EpochToDateFormat",
7175
"com.linkedin.stdudfs.daliudfs.spark.EpochToDateFormat", DALI_UDFS_IVY_URL_SPARK_2_11,
7276
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
7377

74-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.EpochToEpochMilliseconds",
78+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.EpochToEpochMilliseconds",
7579
"com.linkedin.stdudfs.daliudfs.spark.EpochToEpochMilliseconds", DALI_UDFS_IVY_URL_SPARK_2_11,
7680
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
7781

78-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.GetProfileSections",
82+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.GetProfileSections",
7983
"com.linkedin.stdudfs.daliudfs.spark.GetProfileSections", DALI_UDFS_IVY_URL_SPARK_2_11,
8084
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
8185

82-
new TransportableUDFTransformer("com.linkedin.stdudfs.stringudfs.hive.InitCap",
86+
new TransportUDFTransformer("com.linkedin.stdudfs.stringudfs.hive.InitCap",
8387
"com.linkedin.stdudfs.stringudfs.spark.InitCap",
8488
"ivy://com.linkedin.standard-udfs-common-sql-udfs:standard-udfs-string-udfs:1.0.1?classifier=spark_2.11",
8589
"ivy://com.linkedin.standard-udfs-common-sql-udfs:standard-udfs-string-udfs:1.0.1?classifier=spark_2.12",
8690
sparkUDFInfos),
8791

88-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.IsGuestMemberId",
92+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.IsGuestMemberId",
8993
"com.linkedin.stdudfs.daliudfs.spark.IsGuestMemberId", DALI_UDFS_IVY_URL_SPARK_2_11,
9094
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
9195

92-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.IsTestMemberId",
96+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.IsTestMemberId",
9397
"com.linkedin.stdudfs.daliudfs.spark.IsTestMemberId", DALI_UDFS_IVY_URL_SPARK_2_11,
9498
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
9599

96-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.MapLookup",
100+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.MapLookup",
97101
"com.linkedin.stdudfs.daliudfs.spark.MapLookup", DALI_UDFS_IVY_URL_SPARK_2_11, DALI_UDFS_IVY_URL_SPARK_2_12,
98102
sparkUDFInfos),
99103

100-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.PortalLookup",
104+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.PortalLookup",
101105
"com.linkedin.stdudfs.daliudfs.spark.PortalLookup", DALI_UDFS_IVY_URL_SPARK_2_11,
102106
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
103107

104-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.Sanitize",
108+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.Sanitize",
105109
"com.linkedin.stdudfs.daliudfs.spark.Sanitize", DALI_UDFS_IVY_URL_SPARK_2_11, DALI_UDFS_IVY_URL_SPARK_2_12,
106110
sparkUDFInfos),
107111

108-
new TransportableUDFTransformer("com.linkedin.stdudfs.userinterfacelookup.hive.UserInterfaceLookup",
112+
new TransportUDFTransformer("com.linkedin.stdudfs.userinterfacelookup.hive.UserInterfaceLookup",
109113
"com.linkedin.stdudfs.userinterfacelookup.spark.UserInterfaceLookup",
110114
"ivy://com.linkedin.standard-udf-userinterfacelookup:userinterfacelookup-std-udf:0.0.27?classifier=spark_2.11",
111115
"ivy://com.linkedin.standard-udf-userinterfacelookup:userinterfacelookup-std-udf:0.0.27?classifier=spark_2.12",
112116
sparkUDFInfos),
113117

114-
new TransportableUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.WatBotCrawlerLookup",
118+
new TransportUDFTransformer("com.linkedin.stdudfs.daliudfs.hive.WatBotCrawlerLookup",
115119
"com.linkedin.stdudfs.daliudfs.spark.WatBotCrawlerLookup", DALI_UDFS_IVY_URL_SPARK_2_11,
116120
DALI_UDFS_IVY_URL_SPARK_2_12, sparkUDFInfos),
117121

118-
new TransportableUDFTransformer("com.linkedin.jemslookup.udf.hive.JemsLookup",
122+
new TransportUDFTransformer("com.linkedin.jemslookup.udf.hive.JemsLookup",
119123
"com.linkedin.jemslookup.udf.spark.JemsLookup",
120124
"ivy://com.linkedin.jobs-udf:jems-udfs:2.1.7?classifier=spark_2.11",
121125
"ivy://com.linkedin.jobs-udf:jems-udfs:2.1.7?classifier=spark_2.12", sparkUDFInfos),
122126

123-
new TransportableUDFTransformer("com.linkedin.stdudfs.parsing.hive.UserAgentParser",
127+
new TransportUDFTransformer("com.linkedin.stdudfs.parsing.hive.UserAgentParser",
124128
"com.linkedin.stdudfs.parsing.spark.UserAgentParser",
125129
"ivy://com.linkedin.standard-udfs-parsing:parsing-stdudfs:3.0.3?classifier=spark_2.11",
126130
"ivy://com.linkedin.standard-udfs-parsing:parsing-stdudfs:3.0.3?classifier=spark_2.12", sparkUDFInfos),
127131

128-
new TransportableUDFTransformer("com.linkedin.stdudfs.parsing.hive.Ip2Str",
132+
new TransportUDFTransformer("com.linkedin.stdudfs.parsing.hive.Ip2Str",
129133
"com.linkedin.stdudfs.parsing.spark.Ip2Str",
130134
"ivy://com.linkedin.standard-udfs-parsing:parsing-stdudfs:3.0.3?classifier=spark_2.11",
131135
"ivy://com.linkedin.standard-udfs-parsing:parsing-stdudfs:3.0.3?classifier=spark_2.12", sparkUDFInfos),
132136

133-
new TransportableUDFTransformer("com.linkedin.stdudfs.lookup.hive.BrowserLookup",
137+
new TransportUDFTransformer("com.linkedin.stdudfs.lookup.hive.BrowserLookup",
134138
"com.linkedin.stdudfs.lookup.spark.BrowserLookup",
135139
"ivy://com.linkedin.standard-udfs-parsing:parsing-stdudfs:3.0.3?classifier=spark_2.11",
136140
"ivy://com.linkedin.standard-udfs-parsing:parsing-stdudfs:3.0.3?classifier=spark_2.12", sparkUDFInfos),
137141

138-
new TransportableUDFTransformer("com.linkedin.jobs.udf.hive.ConvertIndustryCode",
142+
new TransportUDFTransformer("com.linkedin.jobs.udf.hive.ConvertIndustryCode",
139143
"com.linkedin.jobs.udf.spark.ConvertIndustryCode",
140144
"ivy://com.linkedin.jobs-udf:jobs-udfs:2.1.6?classifier=spark_2.11",
141145
"ivy://com.linkedin.jobs-udf:jobs-udfs:2.1.6?classifier=spark_2.12", sparkUDFInfos),
142146

143-
// Transportable UDF for unit test
144-
new TransportableUDFTransformer("com.linkedin.coral.hive.hive2rel.CoralTestUDF",
147+
// Transport UDF for unit test
148+
new TransportUDFTransformer("com.linkedin.coral.hive.hive2rel.CoralTestUDF",
145149
"com.linkedin.coral.spark.CoralTestUDF",
146150
"ivy://com.linkedin.coral.spark.CoralTestUDF?classifier=spark_2.11", null, sparkUDFInfos),
147151

148152
// Built-in operator
149-
new OperatorRenameSqlCallTransformer("CARDINALITY", 1, "size"),
153+
new OperatorRenameSqlCallTransformer(SqlStdOperatorTable.CARDINALITY, 1, "size"),
150154

151155
// Fall back to the original Hive UDF defined in StaticHiveFunctionRegistry after failing to apply transformers above
152-
new FallBackToHiveUDFTransformer(sparkUDFInfos));
156+
new FallBackToLinkedInHiveUDFTransformer(sparkUDFInfos));
153157
}
154158

155159
@Override

coral-spark/src/main/java/com/linkedin/coral/spark/SparkSqlRewriter.java

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,8 @@ public class SparkSqlRewriter extends SqlShuttle {
4646
* is translated to
4747
* SELECT named_struct(.....)
4848
*
49+
* Check `CoralSparkTest#testAvoidCastToRow` for unit test and a more complex example.
50+
*
4951
* Also replaces:
5052
*
5153
* CAST(NULL AS NULL)
@@ -70,15 +72,14 @@ && containsSqlRowTypeSpec((SqlDataTypeSpec) call.getOperandList().get(1))) {
7072
private boolean containsSqlRowTypeSpec(SqlDataTypeSpec sqlDataTypeSpec) {
7173
if (sqlDataTypeSpec instanceof SqlRowTypeSpec) {
7274
return true;
73-
}
74-
if (sqlDataTypeSpec instanceof SqlArrayTypeSpec) {
75+
} else if (sqlDataTypeSpec instanceof SqlArrayTypeSpec) {
7576
return containsSqlRowTypeSpec(((SqlArrayTypeSpec) sqlDataTypeSpec).getElementTypeSpec());
76-
}
77-
if (sqlDataTypeSpec instanceof SqlMapTypeSpec) {
77+
} else if (sqlDataTypeSpec instanceof SqlMapTypeSpec) {
7878
return containsSqlRowTypeSpec(((SqlMapTypeSpec) sqlDataTypeSpec).getKeyTypeSpec())
7979
|| containsSqlRowTypeSpec(((SqlMapTypeSpec) sqlDataTypeSpec).getValueTypeSpec());
80+
} else {
81+
return false;
8082
}
81-
return false;
8283
}
8384

8485
/**
Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -23,14 +23,14 @@
2323

2424

2525
/**
26-
* After failing to transform UDF with {@link TransportableUDFTransformer},
26+
* After failing to transform UDF with {@link TransportUDFTransformer},
2727
* we use this transformer to fall back to the original Hive UDF defined in
2828
* {@link com.linkedin.coral.hive.hive2rel.functions.StaticHiveFunctionRegistry}.
2929
* This is reasonable since Spark understands and has ability to run Hive UDF.
30-
* Check `CoralSparkTest#testFallBackToHiveUDFTransformer()` for an example.
30+
* Check `CoralSparkTest#testFallBackToLinkedInHiveUDFTransformer()` for an example.
3131
*/
32-
public class FallBackToHiveUDFTransformer extends SqlCallTransformer {
33-
private static final Logger LOG = LoggerFactory.getLogger(FallBackToHiveUDFTransformer.class);
32+
public class FallBackToLinkedInHiveUDFTransformer extends SqlCallTransformer {
33+
private static final Logger LOG = LoggerFactory.getLogger(FallBackToLinkedInHiveUDFTransformer.class);
3434

3535
/**
3636
* Some LinkedIn UDFs get registered correctly in a SparkSession, and hence a DataFrame is successfully
@@ -46,33 +46,35 @@ public class FallBackToHiveUDFTransformer extends SqlCallTransformer {
4646
"com.linkedin.coral.hive.hive2rel.CoralTestUnsupportedUDF");
4747
private final Set<SparkUDFInfo> sparkUDFInfos;
4848

49-
public FallBackToHiveUDFTransformer(Set<SparkUDFInfo> sparkUDFInfos) {
49+
public FallBackToLinkedInHiveUDFTransformer(Set<SparkUDFInfo> sparkUDFInfos) {
5050
this.sparkUDFInfos = sparkUDFInfos;
5151
}
5252

5353
@Override
5454
protected boolean condition(SqlCall sqlCall) {
55-
final String functionClassName = sqlCall.getOperator().getName();
56-
if (UNSUPPORTED_HIVE_UDFS.contains(functionClassName)) {
57-
throw new UnsupportedUDFException(functionClassName);
58-
}
59-
return functionClassName.contains(".") && !functionClassName.equals(".");
55+
final SqlOperator operator = sqlCall.getOperator();
56+
final String operatorName = operator.getName();
57+
return operator instanceof VersionedSqlUserDefinedFunction && operatorName.contains(".")
58+
&& !operatorName.equals(".");
6059
}
6160

6261
@Override
6362
protected SqlCall transform(SqlCall sqlCall) {
6463
final VersionedSqlUserDefinedFunction operator = (VersionedSqlUserDefinedFunction) sqlCall.getOperator();
65-
final String functionClassName = operator.getName();
66-
final String expandedFunctionName = operator.getViewDependentFunctionName();
64+
final String operatorName = operator.getName();
65+
if (UNSUPPORTED_HIVE_UDFS.contains(operatorName)) {
66+
throw new UnsupportedUDFException(operatorName);
67+
}
68+
final String viewDependentFunctionName = operator.getViewDependentFunctionName();
6769
final List<String> dependencies = operator.getIvyDependencies();
6870
List<URI> listOfUris = dependencies.stream().map(URI::create).collect(Collectors.toList());
69-
LOG.info("Function: {} is not a Builtin UDF or Transportable UDF. We fall back to its Hive "
70-
+ "function with ivy dependency: {}", functionClassName, String.join(",", dependencies));
71+
LOG.info("Function: {} is not a Builtin UDF or Transport UDF. We fall back to its Hive "
72+
+ "function with ivy dependency: {}", operatorName, String.join(",", dependencies));
7173
final SparkUDFInfo sparkUDFInfo =
72-
new SparkUDFInfo(functionClassName, expandedFunctionName, listOfUris, SparkUDFInfo.UDFTYPE.HIVE_CUSTOM_UDF);
74+
new SparkUDFInfo(operatorName, viewDependentFunctionName, listOfUris, SparkUDFInfo.UDFTYPE.HIVE_CUSTOM_UDF);
7375
sparkUDFInfos.add(sparkUDFInfo);
7476
final SqlOperator convertedFunction =
75-
createSqlOperatorOfFunction(expandedFunctionName, operator.getReturnTypeInference());
77+
createSqlOperator(viewDependentFunctionName, operator.getReturnTypeInference());
7678
return convertedFunction.createCall(sqlCall.getParserPosition(), sqlCall.getOperandList());
7779
}
7880
}

0 commit comments

Comments
 (0)