Skip to content

Commit 60bb659

Browse files
committed
Merge branch 'main' into CLI_VIIa
# Conflicts: # clients/cli/src/main/java/org/apache/gravitino/cli/CommandEntities.java # clients/cli/src/main/java/org/apache/gravitino/cli/ErrorMessages.java # clients/cli/src/main/java/org/apache/gravitino/cli/GravitinoOptions.java # clients/cli/src/test/java/org/apache/gravitino/cli/TestCommandEntities.java # docs/cli.md
2 parents 2a090cc + eba65cd commit 60bb659

File tree

42 files changed

+1073
-162
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+1073
-162
lines changed

authorizations/authorization-ranger/build.gradle.kts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,10 @@ plugins {
2626

2727
val scalaVersion: String = project.properties["scalaVersion"] as? String ?: extra["defaultScalaVersion"].toString()
2828
val sparkVersion: String = libs.versions.spark35.get()
29-
val kyuubiVersion: String = libs.versions.kyuubi4spark35.get()
29+
val kyuubiVersion: String = libs.versions.kyuubi4paimon.get()
3030
val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".")
3131
val icebergVersion: String = libs.versions.iceberg4spark.get()
32+
val paimonVersion: String = libs.versions.paimon.get()
3233

3334
dependencies {
3435
implementation(project(":api")) {
@@ -86,7 +87,7 @@ dependencies {
8687
exclude("io.dropwizard.metrics")
8788
exclude("org.rocksdb")
8889
}
89-
testImplementation("org.apache.kyuubi:kyuubi-spark-authz_$scalaVersion:$kyuubiVersion") {
90+
testImplementation("org.apache.kyuubi:kyuubi-spark-authz-shaded_$scalaVersion:$kyuubiVersion") {
9091
exclude("com.sun.jersey")
9192
}
9293
testImplementation(libs.hadoop3.client)
@@ -100,6 +101,7 @@ dependencies {
100101
exclude("io.netty")
101102
}
102103
testImplementation("org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_$scalaVersion:$icebergVersion")
104+
testImplementation("org.apache.paimon:paimon-spark-$sparkMajorVersion:$paimonVersion")
103105
}
104106

105107
tasks {

authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ protected AuthorizationPlugin newPlugin(String catalogProvider, Map<String, Stri
3434
switch (catalogProvider) {
3535
case "hive":
3636
case "lakehouse-iceberg":
37+
case "lakehouse-paimon":
3738
return RangerAuthorizationHadoopSQLPlugin.getInstance(config);
3839
default:
3940
throw new IllegalArgumentException("Unknown catalog provider: " + catalogProvider);

authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
import org.apache.gravitino.authorization.Privilege;
3636
import org.apache.gravitino.authorization.SecurableObject;
3737
import org.apache.gravitino.authorization.SecurableObjects;
38-
import org.apache.gravitino.authorization.ranger.RangerPrivileges.RangerHivePrivilege;
38+
import org.apache.gravitino.authorization.ranger.RangerPrivileges.RangerHadoopSQLPrivilege;
3939
import org.apache.gravitino.authorization.ranger.reference.RangerDefines.PolicyResource;
4040
import org.apache.gravitino.exceptions.AuthorizationPluginException;
4141
import org.slf4j.Logger;
@@ -96,26 +96,28 @@ public void validateRangerMetadataObject(List<String> names, RangerMetadataObjec
9696
public Map<Privilege.Name, Set<RangerPrivilege>> privilegesMappingRule() {
9797
return ImmutableMap.of(
9898
Privilege.Name.CREATE_CATALOG,
99-
ImmutableSet.of(RangerHivePrivilege.CREATE),
99+
ImmutableSet.of(RangerHadoopSQLPrivilege.CREATE),
100100
Privilege.Name.USE_CATALOG,
101-
ImmutableSet.of(RangerHivePrivilege.SELECT),
101+
ImmutableSet.of(RangerHadoopSQLPrivilege.SELECT),
102102
Privilege.Name.CREATE_SCHEMA,
103-
ImmutableSet.of(RangerHivePrivilege.CREATE),
103+
ImmutableSet.of(RangerHadoopSQLPrivilege.CREATE),
104104
Privilege.Name.USE_SCHEMA,
105-
ImmutableSet.of(RangerHivePrivilege.SELECT),
105+
ImmutableSet.of(RangerHadoopSQLPrivilege.SELECT),
106106
Privilege.Name.CREATE_TABLE,
107-
ImmutableSet.of(RangerHivePrivilege.CREATE),
107+
ImmutableSet.of(RangerHadoopSQLPrivilege.CREATE),
108108
Privilege.Name.MODIFY_TABLE,
109109
ImmutableSet.of(
110-
RangerHivePrivilege.UPDATE, RangerHivePrivilege.ALTER, RangerHivePrivilege.WRITE),
110+
RangerHadoopSQLPrivilege.UPDATE,
111+
RangerHadoopSQLPrivilege.ALTER,
112+
RangerHadoopSQLPrivilege.WRITE),
111113
Privilege.Name.SELECT_TABLE,
112-
ImmutableSet.of(RangerHivePrivilege.READ, RangerHivePrivilege.SELECT));
114+
ImmutableSet.of(RangerHadoopSQLPrivilege.READ, RangerHadoopSQLPrivilege.SELECT));
113115
}
114116

115117
@Override
116118
/** Set the default owner rule. */
117119
public Set<RangerPrivilege> ownerMappingRule() {
118-
return ImmutableSet.of(RangerHivePrivilege.ALL);
120+
return ImmutableSet.of(RangerHadoopSQLPrivilege.ALL);
119121
}
120122

121123
@Override

authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPrivileges.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525

2626
public class RangerPrivileges {
2727
/** Ranger Hive privileges enumeration. */
28-
public enum RangerHivePrivilege implements RangerPrivilege {
28+
public enum RangerHadoopSQLPrivilege implements RangerPrivilege {
2929
ALL("all"),
3030
SELECT("select"),
3131
UPDATE("update"),
@@ -41,7 +41,7 @@ public enum RangerHivePrivilege implements RangerPrivilege {
4141

4242
private final String name; // Access a type in the Ranger policy item
4343

44-
RangerHivePrivilege(String name) {
44+
RangerHadoopSQLPrivilege(String name) {
4545
this.name = name;
4646
}
4747

@@ -117,7 +117,7 @@ public boolean equalsTo(String value) {
117117

118118
static List<Class<? extends Enum<? extends RangerPrivilege>>> allRangerPrivileges =
119119
Lists.newArrayList(
120-
RangerPrivileges.RangerHivePrivilege.class, RangerPrivileges.RangerHdfsPrivilege.class);
120+
RangerHadoopSQLPrivilege.class, RangerPrivileges.RangerHdfsPrivilege.class);
121121

122122
public static RangerPrivilege valueOf(String name) {
123123
Preconditions.checkArgument(name != null, "Privilege name string cannot be null!");

authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ protected static void waitForUpdatingPolicies() throws InterruptedException {
193193

194194
protected abstract void useCatalog() throws InterruptedException;
195195

196-
protected abstract void checkHaveNoPrivileges();
196+
protected abstract void checkWithoutPrivileges();
197197

198198
protected abstract void testAlterTable();
199199

@@ -269,7 +269,7 @@ void testCreateTable() throws InterruptedException {
269269
AccessControlException.class, () -> sparkSession.sql(SQL_SELECT_TABLE).collectAsList());
270270

271271
// Clean up
272-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
272+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
273273
catalog.asSchemas().dropSchema(schemaName, false);
274274
metalake.deleteRole(createTableRole);
275275
metalake.deleteRole(createSchemaRole);
@@ -323,10 +323,10 @@ void testReadWriteTableWithMetalakeLevelRole() throws InterruptedException {
323323
// case 7: If we don't have the role, we can't insert and select from data.
324324
metalake.deleteRole(readWriteRole);
325325
waitForUpdatingPolicies();
326-
checkHaveNoPrivileges();
326+
checkWithoutPrivileges();
327327

328328
// Clean up
329-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
329+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
330330
catalog.asSchemas().dropSchema(schemaName, false);
331331
}
332332

@@ -387,10 +387,10 @@ void testReadWriteTableWithTableLevelRole() throws InterruptedException {
387387
// case 7: If we don't have the role, we can't insert and select from data.
388388
metalake.deleteRole(roleName);
389389
waitForUpdatingPolicies();
390-
checkHaveNoPrivileges();
390+
checkWithoutPrivileges();
391391

392392
// Clean up
393-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
393+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
394394
catalog.asSchemas().dropSchema(schemaName, false);
395395
}
396396

@@ -441,10 +441,10 @@ void testReadOnlyTable() throws InterruptedException {
441441
// case 7: If we don't have the role, we can't insert and select from data.
442442
metalake.deleteRole(readOnlyRole);
443443
waitForUpdatingPolicies();
444-
checkHaveNoPrivileges();
444+
checkWithoutPrivileges();
445445

446446
// Clean up
447-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
447+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
448448
catalog.asSchemas().dropSchema(schemaName, false);
449449
}
450450

@@ -496,10 +496,10 @@ void testWriteOnlyTable() throws InterruptedException {
496496
// case 7: If we don't have the role, we can't insert and select from data.
497497
metalake.deleteRole(writeOnlyRole);
498498
waitForUpdatingPolicies();
499-
checkHaveNoPrivileges();
499+
checkWithoutPrivileges();
500500

501501
// Clean up
502-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
502+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
503503
catalog.asSchemas().dropSchema(schemaName, false);
504504
}
505505

@@ -547,7 +547,7 @@ void testCreateAllPrivilegesRole() throws InterruptedException {
547547
sparkSession.sql(SQL_CREATE_TABLE);
548548

549549
// Clean up
550-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
550+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
551551
catalog.asSchemas().dropSchema(schemaName, false);
552552
metalake.deleteRole(roleName);
553553
}
@@ -690,7 +690,7 @@ void testRenameMetadataObject() throws InterruptedException {
690690
sparkSession.sql(SQL_RENAME_BACK_TABLE);
691691

692692
// Clean up
693-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
693+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
694694
catalog.asSchemas().dropSchema(schemaName, false);
695695
metalake.deleteRole(roleName);
696696
}
@@ -739,7 +739,7 @@ void testRenameMetadataObjectPrivilege() throws InterruptedException {
739739
sparkSession.sql(SQL_INSERT_TABLE);
740740

741741
// Clean up
742-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
742+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
743743
catalog.asSchemas().dropSchema(schemaName, false);
744744
metalake.deleteRole(roleName);
745745
}
@@ -774,7 +774,7 @@ void testChangeOwner() throws InterruptedException {
774774
metalake.deleteRole(helperRole);
775775
waitForUpdatingPolicies();
776776

777-
checkHaveNoPrivileges();
777+
checkWithoutPrivileges();
778778

779779
// case 2. user is the table owner
780780
MetadataObject tableObject =
@@ -787,7 +787,7 @@ void testChangeOwner() throws InterruptedException {
787787
checkTableAllPrivilegesExceptForCreating();
788788

789789
// Delete Gravitino's meta data
790-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
790+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
791791
waitForUpdatingPolicies();
792792

793793
// Fail to create the table
@@ -854,7 +854,7 @@ void testChangeOwner() throws InterruptedException {
854854
sparkSession.sql(SQL_DROP_SCHEMA);
855855

856856
// Clean up
857-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
857+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
858858
catalog.asSchemas().dropSchema(schemaName, false);
859859
}
860860

@@ -915,7 +915,7 @@ void testAllowUseSchemaPrivilege() throws InterruptedException {
915915
1, rows2.stream().filter(row -> row.getString(0).equals(schemaName)).count());
916916

917917
// Clean up
918-
catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName, tableName));
918+
catalog.asTableCatalog().purgeTable(NameIdentifier.of(schemaName, tableName));
919919
catalog.asSchemas().dropSchema(schemaName, false);
920920
metalake.revokeRolesFromUser(Lists.newArrayList(roleName), userName1);
921921
metalake.deleteRole(roleName);

authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ protected void useCatalog() throws InterruptedException {
120120
}
121121

122122
@Override
123-
protected void checkHaveNoPrivileges() {
123+
protected void checkWithoutPrivileges() {
124124
Assertions.assertThrows(AccessControlException.class, () -> sparkSession.sql(SQL_INSERT_TABLE));
125125
Assertions.assertThrows(
126126
AccessControlException.class, () -> sparkSession.sql(SQL_SELECT_TABLE).collectAsList());

authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -348,7 +348,7 @@ public void testFindManagedPolicy() {
348348
RangerMetadataObject.Type.TABLE,
349349
ImmutableSet.of(
350350
new RangerPrivileges.RangerHivePrivilegeImpl(
351-
RangerPrivileges.RangerHivePrivilege.ALL, Privilege.Condition.ALLOW)));
351+
RangerPrivileges.RangerHadoopSQLPrivilege.ALL, Privilege.Condition.ALLOW)));
352352
Assertions.assertNull(rangerHelper.findManagedPolicy(rangerSecurableObject));
353353

354354
// Add a policy for `db3.tab1`
@@ -398,7 +398,7 @@ static void createHivePolicy(
398398
policyItem.setAccesses(
399399
Arrays.asList(
400400
new RangerPolicy.RangerPolicyItemAccess(
401-
RangerPrivileges.RangerHivePrivilege.SELECT.toString())));
401+
RangerPrivileges.RangerHadoopSQLPrivilege.SELECT.toString())));
402402
RangerITEnv.updateOrCreateRangerPolicy(
403403
RangerDefines.SERVICE_TYPE_HIVE,
404404
RangerITEnv.RANGER_HIVE_REPO_NAME,

authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ static void allowAnyoneAccessInformationSchema() {
212212
policyItem.setAccesses(
213213
Arrays.asList(
214214
new RangerPolicy.RangerPolicyItemAccess(
215-
RangerPrivileges.RangerHivePrivilege.SELECT.toString())));
215+
RangerPrivileges.RangerHadoopSQLPrivilege.SELECT.toString())));
216216
updateOrCreateRangerPolicy(
217217
RANGER_HIVE_TYPE,
218218
RANGER_HIVE_REPO_NAME,

authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ public void startIntegrationTest() throws Exception {
9090
sparkSession =
9191
SparkSession.builder()
9292
.master("local[1]")
93-
.appName("Ranger Hive E2E integration test")
93+
.appName("Ranger Iceberg E2E integration test")
9494
.config("spark.sql.catalog.iceberg", "org.apache.iceberg.spark.SparkCatalog")
9595
.config("spark.sql.catalog.iceberg.type", "hive")
9696
.config("spark.sql.catalog.iceberg.uri", HIVE_METASTORE_URIS)
@@ -147,7 +147,7 @@ protected void checkDeleteSQLWithWritePrivileges() {
147147
}
148148

149149
@Override
150-
protected void checkHaveNoPrivileges() {
150+
protected void checkWithoutPrivileges() {
151151
Assertions.assertThrows(AccessControlException.class, () -> sparkSession.sql(SQL_INSERT_TABLE));
152152
Assertions.assertThrows(
153153
AccessControlException.class, () -> sparkSession.sql(SQL_SELECT_TABLE).collectAsList());

0 commit comments

Comments
 (0)