diff --git a/build.gradle b/build.gradle index 433f85a69c..cf80227f2d 100644 --- a/build.gradle +++ b/build.gradle @@ -594,6 +594,10 @@ dependencies { implementation 'com.nimbusds:nimbus-jose-jwt:9.46' implementation 'com.rfksystems:blake2b:2.0.0' implementation 'com.password4j:password4j:1.8.2' + + // Action privileges: check tables and compact collections + implementation 'com.selectivem.collections:special-collections-complete:1.4.0' + //JWT implementation "io.jsonwebtoken:jjwt-api:${jjwt_version}" implementation "io.jsonwebtoken:jjwt-impl:${jjwt_version}" @@ -747,6 +751,7 @@ dependencies { integrationTestImplementation "org.apache.httpcomponents:fluent-hc:4.5.14" integrationTestImplementation "org.apache.httpcomponents:httpcore:4.4.16" integrationTestImplementation "org.apache.httpcomponents:httpasyncclient:4.1.5" + integrationTestImplementation "org.mockito:mockito-core:5.14.2" //spotless implementation('com.google.googlejavaformat:google-java-format:1.24.0') { diff --git a/src/integrationTest/java/org/opensearch/security/privileges/ActionPrivilegesTest.java b/src/integrationTest/java/org/opensearch/security/privileges/ActionPrivilegesTest.java new file mode 100644 index 0000000000..7807dae748 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/ActionPrivilegesTest.java @@ -0,0 +1,1033 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.stream.Collectors; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.fasterxml.jackson.core.JsonProcessingException; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Suite; + +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.securityconf.FlattenedActionGroups; +import org.opensearch.security.securityconf.impl.CType; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.user.User; +import org.opensearch.security.util.MockIndexMetadataBuilder; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.security.privileges.PrivilegeEvaluatorResponseMatcher.isAllowed; +import static org.opensearch.security.privileges.PrivilegeEvaluatorResponseMatcher.isForbidden; +import static org.opensearch.security.privileges.PrivilegeEvaluatorResponseMatcher.isPartiallyOk; +import static org.opensearch.security.privileges.PrivilegeEvaluatorResponseMatcher.missingPrivileges; +import static org.opensearch.security.util.MockIndexMetadataBuilder.dataStreams; +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +/** + * Unit tests for ActionPrivileges. As the ActionPrivileges provides quite a few different code paths for checking + * privileges with different performance characteristics, this test suite defines different test cases for making sure + * all these code paths are tested. So, all functionality must be tested for "well-known" actions and non-well-known + * actions. For index privileges, there are a couple of more tests dimensions. See below. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + ActionPrivilegesTest.ClusterPrivileges.class, + ActionPrivilegesTest.IndexPrivileges.IndicesAndAliases.class, + ActionPrivilegesTest.IndexPrivileges.DataStreams.class, + ActionPrivilegesTest.Misc.class, + ActionPrivilegesTest.StatefulIndexPrivilegesHeapSize.class }) +public class ActionPrivilegesTest { + public static class ClusterPrivileges { + @Test + public void wellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats*", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasClusterPrivilege(ctx("test_role"), "cluster:monitor/nodes/stats"), isAllowed()); + assertThat( + subject.hasClusterPrivilege(ctx("other_role"), "cluster:monitor/nodes/stats"), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats")) + ); + assertThat( + subject.hasClusterPrivilege(ctx("test_role"), "cluster:monitor/nodes/other"), + isForbidden(missingPrivileges("cluster:monitor/nodes/other")) + ); + } + + @Test + public void notWellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats*", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasClusterPrivilege(ctx("test_role"), "cluster:monitor/nodes/stats/somethingnotwellknown"), isAllowed()); + assertThat( + subject.hasClusterPrivilege(ctx("other_role"), "cluster:monitor/nodes/stats/somethingnotwellknown"), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats/somethingnotwellknown")) + ); + assertThat( + subject.hasClusterPrivilege(ctx("test_role"), "cluster:monitor/nodes/something/else"), + isForbidden(missingPrivileges("cluster:monitor/nodes/something/else")) + ); + } + + @Test + public void wildcard() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - '*'", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasClusterPrivilege(ctx("test_role"), "cluster:whatever"), isAllowed()); + assertThat( + subject.hasClusterPrivilege(ctx("other_role"), "cluster:whatever"), + isForbidden(missingPrivileges("cluster:whatever")) + ); + } + + @Test + public void explicit_wellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("non_explicit_role:\n" + // + " cluster_permissions:\n" + // + " - '*'\n" + // + "explicit_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats\n" + // + "semi_explicit_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats*\n", // + CType.ROLES + ); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasExplicitClusterPrivilege(ctx("explicit_role"), "cluster:monitor/nodes/stats"), isAllowed()); + assertThat(subject.hasExplicitClusterPrivilege(ctx("semi_explicit_role"), "cluster:monitor/nodes/stats"), isAllowed()); + assertThat( + subject.hasExplicitClusterPrivilege(ctx("non_explicit_role"), "cluster:monitor/nodes/stats"), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats")) + ); + assertThat( + subject.hasExplicitClusterPrivilege(ctx("other_role"), "cluster:monitor/nodes/stats"), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats")) + ); + } + + @Test + public void explicit_notWellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("non_explicit_role:\n" + // + " cluster_permissions:\n" + // + " - '*'\n" + // + "explicit_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/notwellknown\n" + // + "semi_explicit_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/*\n", // + CType.ROLES + ); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasExplicitClusterPrivilege(ctx("explicit_role"), "cluster:monitor/nodes/notwellknown"), isAllowed()); + assertThat(subject.hasExplicitClusterPrivilege(ctx("semi_explicit_role"), "cluster:monitor/nodes/notwellknown"), isAllowed()); + assertThat( + subject.hasExplicitClusterPrivilege(ctx("non_explicit_role"), "cluster:monitor/nodes/notwellknown"), + isForbidden(missingPrivileges("cluster:monitor/nodes/notwellknown")) + ); + assertThat( + subject.hasExplicitClusterPrivilege(ctx("other_role"), "cluster:monitor/nodes/notwellknown"), + isForbidden(missingPrivileges("cluster:monitor/nodes/notwellknown")) + ); + } + + @Test + public void hasAny_wellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats*", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/nodes/stats")), isAllowed()); + assertThat( + subject.hasAnyClusterPrivilege( + ctx("test_role"), + ImmutableSet.of("cluster:monitor/nodes/foo", "cluster:monitor/nodes/stats") + ), + isAllowed() + ); + + assertThat( + subject.hasAnyClusterPrivilege(ctx("other_role"), ImmutableSet.of("cluster:monitor/nodes/stats")), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats")) + ); + assertThat( + subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/nodes/other")), + isForbidden(missingPrivileges("cluster:monitor/nodes/other")) + ); + } + + @Test + public void hasAny_notWellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/*", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat( + subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/nodes/notwellknown")), + isAllowed() + ); + assertThat( + subject.hasAnyClusterPrivilege( + ctx("test_role"), + ImmutableSet.of("cluster:monitor/other", "cluster:monitor/nodes/notwellknown") + ), + isAllowed() + ); + + assertThat( + subject.hasAnyClusterPrivilege(ctx("other_role"), ImmutableSet.of("cluster:monitor/nodes/notwellknown")), + isForbidden(missingPrivileges("cluster:monitor/nodes/notwellknown")) + ); + assertThat( + subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/other")), + isForbidden(missingPrivileges("cluster:monitor/other")) + ); + assertThat( + subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/other", "cluster:monitor/yetanother")), + isForbidden() + ); + } + + @Test + public void hasAny_wildcard() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - '*'", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:whatever")), isAllowed()); + + assertThat( + subject.hasAnyClusterPrivilege(ctx("other_role"), ImmutableSet.of("cluster:whatever")), + isForbidden(missingPrivileges("cluster:whatever")) + ); + } + } + + /** + * Tests for index privileges. This class contains two parameterized test suites, first for indices and aliases, + * second for data streams. + *

+ * Both test suites use parameters to create a 3-dimensional test case space to make sure all code paths are covered. + *

+ * The dimensions are (see also the params() methods): + *

    + *
  1. 1. roles.yml; index patterns: Different usages of patterns, wildcards and constant names. + *
  2. 2. roles.yml; action patterns: Well known actions vs non-well known actions combined with use of patterns vs use of constant action names + *
  3. 3. Statefulness: Shall the data structures from ActionPrivileges.StatefulIndexPrivileges be used or not + *
+ * As so many different situations need to be tested, the test oracle method covers() is used to verify the results. + */ + public static class IndexPrivileges { + + @RunWith(Parameterized.class) + public static class IndicesAndAliases { + final ActionSpec actionSpec; + final IndexSpec indexSpec; + final SecurityDynamicConfiguration roles; + final String primaryAction; + final ImmutableSet requiredActions; + final ImmutableSet otherActions; + final ActionPrivileges subject; + + @Test + public void positive_full() throws Exception { + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx("test_role"), requiredActions, resolved("index_a11")); + assertThat(result, isAllowed()); + } + + @Test + public void positive_partial() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, requiredActions, resolved("index_a11", "index_a12")); + + if (covers(ctx, "index_a11", "index_a12")) { + assertThat(result, isAllowed()); + } else if (covers(ctx, "index_a11")) { + assertThat(result, isPartiallyOk("index_a11")); + } else { + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + } + + @Test + public void positive_partial2() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege( + ctx, + requiredActions, + resolved("index_a11", "index_a12", "index_b1") + ); + + if (covers(ctx, "index_a11", "index_a12", "index_b1")) { + assertThat(result, isAllowed()); + } else if (covers(ctx, "index_a11", "index_a12")) { + assertThat(result, isPartiallyOk("index_a11", "index_a12")); + } else if (covers(ctx, "index_a11")) { + assertThat(result, isPartiallyOk("index_a11")); + } else { + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + } + + @Test + public void positive_noLocal() throws Exception { + IndexResolverReplacer.Resolved resolved = new IndexResolverReplacer.Resolved( + ImmutableSet.of(), + ImmutableSet.of(), + ImmutableSet.of("remote:a"), + ImmutableSet.of("remote:a"), + IndicesOptions.LENIENT_EXPAND_OPEN + ); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx("test_role"), requiredActions, resolved); + assertThat(result, isAllowed()); + } + + @Test + public void negative_wrongRole() throws Exception { + PrivilegesEvaluationContext ctx = ctx("other_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, requiredActions, resolved("index_a11")); + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + + @Test + public void negative_wrongAction() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, otherActions, resolved("index_a11")); + + if (actionSpec.givenPrivs.contains("*")) { + assertThat(result, isAllowed()); + } else { + assertThat(result, isForbidden(missingPrivileges(otherActions))); + } + } + + @Test + public void positive_hasExplicit_full() { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasExplicitIndexPrivilege(ctx, requiredActions, resolved("index_a11")); + + if (actionSpec.givenPrivs.contains("*")) { + // The * is forbidden for explicit privileges + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } else if (!requiredActions.contains("indices:data/read/search")) { + // For test purposes, we have designated "indices:data/read/search" as an action requiring explicit privileges + // Other actions are not covered here + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } else { + assertThat(result, isAllowed()); + } + } + + private boolean covers(PrivilegesEvaluationContext ctx, String... indices) { + for (String index : indices) { + if (!indexSpec.covers(ctx.getUser(), index)) { + return false; + } + } + return true; + } + + @Parameterized.Parameters(name = "{0}; actions: {1}; {2}") + public static Collection params() { + List result = new ArrayList<>(); + + for (IndexSpec indexSpec : Arrays.asList( + new IndexSpec().givenIndexPrivs("*"), // + new IndexSpec().givenIndexPrivs("index_*"), // + new IndexSpec().givenIndexPrivs("index_a11"), // + new IndexSpec().givenIndexPrivs("index_a1*"), // + new IndexSpec().givenIndexPrivs("index_${attrs.dept_no}"), // + new IndexSpec().givenIndexPrivs("alias_a1*") // + )) { + for (ActionSpec actionSpec : Arrays.asList( + new ActionSpec("wildcard")// + .givenPrivs("*") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("constant, well known")// + .givenPrivs("indices:data/read/search") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("pattern, well known")// + .givenPrivs("indices:data/read/*") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("pattern, well known, two required privs")// + .givenPrivs("indices:data/read/*") + .requiredPrivs("indices:data/read/search", "indices:data/read/get"), // + new ActionSpec("constant, non well known")// + .givenPrivs("indices:unknown/unwell") + .requiredPrivs("indices:unknown/unwell"), // + new ActionSpec("pattern, non well known")// + .givenPrivs("indices:unknown/*") + .requiredPrivs("indices:unknown/unwell"), // + new ActionSpec("pattern, non well known, two required privs")// + .givenPrivs("indices:unknown/*") + .requiredPrivs("indices:unknown/unwell", "indices:unknown/notatall")// + + )) { + for (Statefulness statefulness : Statefulness.values()) { + result.add(new Object[] { indexSpec, actionSpec, statefulness }); + } + } + } + return result; + } + + public IndicesAndAliases(IndexSpec indexSpec, ActionSpec actionSpec, Statefulness statefulness) throws Exception { + this.indexSpec = indexSpec; + this.actionSpec = actionSpec; + this.roles = indexSpec.toRolesConfig(actionSpec); + + this.primaryAction = actionSpec.primaryAction; + this.requiredActions = actionSpec.requiredPrivs; + + this.otherActions = actionSpec.wellKnownActions + ? ImmutableSet.of("indices:data/write/update") + : ImmutableSet.of("indices:foobar/unknown"); + this.indexSpec.indexMetadata = INDEX_METADATA; + + Settings settings = Settings.EMPTY; + if (statefulness == Statefulness.STATEFUL_LIMITED) { + settings = Settings.builder() + .put(ActionPrivileges.PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE.getKey(), new ByteSizeValue(10, ByteSizeUnit.BYTES)) + .build(); + } + + this.subject = new ActionPrivileges( + roles, + FlattenedActionGroups.EMPTY, + () -> INDEX_METADATA, + settings, + WellKnownActions.CLUSTER_ACTIONS, + WellKnownActions.INDEX_ACTIONS, + WellKnownActions.INDEX_ACTIONS + ); + + if (statefulness == Statefulness.STATEFUL || statefulness == Statefulness.STATEFUL_LIMITED) { + this.subject.updateStatefulIndexPrivileges(INDEX_METADATA, 1); + } + } + + final static Map INDEX_METADATA = // + indices("index_a11", "index_a12", "index_a21", "index_a22", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a11", "index_a12", "index_a21", "index_a22")// + .alias("alias_a1") + .of("index_a11", "index_a12")// + .alias("alias_a2") + .of("index_a21", "index_a22")// + .alias("alias_b") + .of("index_b1", "index_b2")// + .build() + .getIndicesLookup(); + + static IndexResolverReplacer.Resolved resolved(String... indices) { + return new IndexResolverReplacer.Resolved( + ImmutableSet.of(), + ImmutableSet.copyOf(indices), + ImmutableSet.copyOf(indices), + ImmutableSet.of(), + IndicesOptions.LENIENT_EXPAND_OPEN + ); + } + } + + @RunWith(Parameterized.class) + public static class DataStreams { + final ActionSpec actionSpec; + final IndexSpec indexSpec; + final SecurityDynamicConfiguration roles; + final String primaryAction; + final ImmutableSet requiredActions; + final ImmutableSet otherActions; + final ActionPrivileges subject; + + @Test + public void positive_full() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, requiredActions, resolved("data_stream_a11")); + if (covers(ctx, "data_stream_a11")) { + assertThat(result, isAllowed()); + } else if (covers(ctx, ".ds-data_stream_a11-000001")) { + assertThat( + result, + isPartiallyOk(".ds-data_stream_a11-000001", ".ds-data_stream_a11-000002", ".ds-data_stream_a11-000003") + ); + } else { + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + } + + @Test + public void positive_partial() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege( + ctx, + requiredActions, + resolved("data_stream_a11", "data_stream_a12") + ); + + if (covers(ctx, "data_stream_a11", "data_stream_a12")) { + assertThat(result, isAllowed()); + } else if (covers(ctx, "data_stream_a11")) { + assertThat( + result, + isPartiallyOk( + "data_stream_a11", + ".ds-data_stream_a11-000001", + ".ds-data_stream_a11-000002", + ".ds-data_stream_a11-000003" + ) + ); + } else if (covers(ctx, ".ds-data_stream_a11-000001")) { + assertThat( + result, + isPartiallyOk(".ds-data_stream_a11-000001", ".ds-data_stream_a11-000002", ".ds-data_stream_a11-000003") + ); + } else { + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + } + + @Test + public void negative_wrongRole() throws Exception { + PrivilegesEvaluationContext ctx = ctx("other_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, requiredActions, resolved("data_stream_a11")); + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + + @Test + public void negative_wrongAction() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, otherActions, resolved("data_stream_a11")); + assertThat(result, isForbidden(missingPrivileges(otherActions))); + } + + private boolean covers(PrivilegesEvaluationContext ctx, String... indices) { + for (String index : indices) { + if (!indexSpec.covers(ctx.getUser(), index)) { + return false; + } + } + return true; + } + + @Parameterized.Parameters(name = "{0}; actions: {1}; {2}") + public static Collection params() { + List result = new ArrayList<>(); + + for (IndexSpec indexSpec : Arrays.asList( + new IndexSpec().givenIndexPrivs("*"), // + new IndexSpec().givenIndexPrivs("data_stream_*"), // + new IndexSpec().givenIndexPrivs("data_stream_a11"), // + new IndexSpec().givenIndexPrivs("data_stream_a1*"), // + new IndexSpec().givenIndexPrivs("data_stream_${attrs.dept_no}"), // + new IndexSpec().givenIndexPrivs(".ds-data_stream_a11*") // + )) { + for (ActionSpec actionSpec : Arrays.asList( + new ActionSpec("constant, well known")// + .givenPrivs("indices:data/read/search") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("pattern, well known")// + .givenPrivs("indices:data/read/*") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("pattern, well known, two required privs")// + .givenPrivs("indices:data/read/*") + .requiredPrivs("indices:data/read/search", "indices:data/read/get"), // + new ActionSpec("constant, non well known")// + .givenPrivs("indices:unknown/unwell") + .requiredPrivs("indices:unknown/unwell"), // + new ActionSpec("pattern, non well known")// + .givenPrivs("indices:unknown/*") + .requiredPrivs("indices:unknown/unwell"), // + new ActionSpec("pattern, non well known, two required privs")// + .givenPrivs("indices:unknown/*") + .requiredPrivs("indices:unknown/unwell", "indices:unknown/notatall")// + + )) { + for (Statefulness statefulness : Statefulness.values()) { + result.add(new Object[] { indexSpec, actionSpec, statefulness }); + } + } + } + return result; + } + + public DataStreams(IndexSpec indexSpec, ActionSpec actionSpec, Statefulness statefulness) throws Exception { + this.indexSpec = indexSpec; + this.actionSpec = actionSpec; + this.roles = indexSpec.toRolesConfig(actionSpec); + + this.primaryAction = actionSpec.primaryAction; + this.requiredActions = actionSpec.requiredPrivs; + + this.otherActions = actionSpec.wellKnownActions + ? ImmutableSet.of("indices:data/write/update") + : ImmutableSet.of("indices:foobar/unknown"); + this.indexSpec.indexMetadata = INDEX_METADATA; + + Settings settings = Settings.EMPTY; + if (statefulness == Statefulness.STATEFUL_LIMITED) { + settings = Settings.builder() + .put(ActionPrivileges.PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE.getKey(), new ByteSizeValue(10, ByteSizeUnit.BYTES)) + .build(); + } + + this.subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, () -> INDEX_METADATA, settings); + + if (statefulness == Statefulness.STATEFUL || statefulness == Statefulness.STATEFUL_LIMITED) { + this.subject.updateStatefulIndexPrivileges(INDEX_METADATA, 1); + } + } + + final static Map INDEX_METADATA = // + dataStreams("data_stream_a11", "data_stream_a12", "data_stream_a21", "data_stream_a22", "data_stream_b1", "data_stream_b2") + .build() + .getIndicesLookup(); + + static IndexResolverReplacer.Resolved resolved(String... indices) { + ImmutableSet.Builder allIndices = ImmutableSet.builder(); + + for (String index : indices) { + IndexAbstraction indexAbstraction = INDEX_METADATA.get(index); + + if (indexAbstraction instanceof IndexAbstraction.DataStream) { + allIndices.addAll( + indexAbstraction.getIndices().stream().map(i -> i.getIndex().getName()).collect(Collectors.toList()) + ); + } + + allIndices.add(index); + } + + return new IndexResolverReplacer.Resolved( + ImmutableSet.of(), + allIndices.build(), + ImmutableSet.copyOf(indices), + ImmutableSet.of(), + IndicesOptions.LENIENT_EXPAND_OPEN + ); + } + } + + static class IndexSpec { + ImmutableList givenIndexPrivs = ImmutableList.of(); + boolean wildcardPrivs; + Map indexMetadata; + + IndexSpec() {} + + IndexSpec givenIndexPrivs(String... indexPatterns) { + this.givenIndexPrivs = ImmutableList.copyOf(indexPatterns); + this.wildcardPrivs = this.givenIndexPrivs.contains("*"); + return this; + } + + boolean covers(User user, String index) { + if (this.wildcardPrivs) { + return true; + } + + for (String givenIndexPriv : this.givenIndexPrivs) { + if (givenIndexPriv.contains("${")) { + for (Map.Entry entry : user.getCustomAttributesMap().entrySet()) { + givenIndexPriv = givenIndexPriv.replace("${" + entry.getKey() + "}", entry.getValue()); + } + } + + if (givenIndexPriv.endsWith("*")) { + if (index.startsWith(givenIndexPriv.substring(0, givenIndexPriv.length() - 1))) { + return true; + } + + for (IndexAbstraction indexAbstraction : indexMetadata.values()) { + if ((indexAbstraction instanceof IndexAbstraction.Alias + || indexAbstraction instanceof IndexAbstraction.DataStream) + && indexAbstraction.getName().startsWith(givenIndexPriv.substring(0, givenIndexPriv.length() - 1))) { + if (indexAbstraction.getIndices().stream().anyMatch(i -> i.getIndex().getName().equals(index))) { + return true; + } + } + } + } else if (givenIndexPrivs.contains("*")) { + // For simplicity, we only allow a sub-set of patterns. We assume here that the WildcardMatcher + // class fulfills all other cases correctly as per its contract + throw new RuntimeException("The tests only support index patterns with * at the end"); + } else { + if (index.equals(givenIndexPriv)) { + return true; + } + + IndexAbstraction indexAbstraction = indexMetadata.get(index); + + if (indexAbstraction instanceof IndexAbstraction.Alias || indexAbstraction instanceof IndexAbstraction.DataStream) { + if (indexAbstraction.getIndices().stream().anyMatch(i -> i.getIndex().getName().equals(index))) { + return true; + } + } + } + } + + return false; + } + + SecurityDynamicConfiguration toRolesConfig(ActionSpec actionSpec) { + try { + return SecurityDynamicConfiguration.fromMap( + ImmutableMap.of( + "test_role", + ImmutableMap.of( + "index_permissions", + Arrays.asList( + ImmutableMap.of("index_patterns", this.givenIndexPrivs, "allowed_actions", actionSpec.givenPrivs) + ) + ) + ), + CType.ROLES + ); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + @Override + public String toString() { + return this.givenIndexPrivs.stream().collect(Collectors.joining(",")); + } + } + + static class ActionSpec { + String name; + ImmutableList givenPrivs; + ImmutableSet requiredPrivs; + String primaryAction; + boolean wellKnownActions; + + ActionSpec(String name) { + super(); + this.name = name; + } + + ActionSpec givenPrivs(String... actions) { + this.givenPrivs = ImmutableList.copyOf(actions); + return this; + } + + ActionSpec requiredPrivs(String... requiredPrivs) { + this.requiredPrivs = ImmutableSet.copyOf(requiredPrivs); + this.primaryAction = requiredPrivs[0]; + this.wellKnownActions = this.requiredPrivs.stream().anyMatch(a -> WellKnownActions.INDEX_ACTIONS.contains(a)); + return this; + } + + @Override + public String toString() { + return name; + } + } + + enum Statefulness { + STATEFUL, + STATEFUL_LIMITED, + NON_STATEFUL + } + } + + public static class Misc { + @Test + public void relevantOnly_identity() throws Exception { + Map metadata = // + indices("index_a11", "index_a12", "index_b")// + .alias("alias_a") + .of("index_a11", "index_a12")// + .build() + .getIndicesLookup(); + + assertTrue( + "relevantOnly() returned identical object", + ActionPrivileges.StatefulIndexPrivileges.relevantOnly(metadata) == metadata + ); + } + + @Test + public void relevantOnly_closed() throws Exception { + Map metadata = indices("index_open_1", "index_open_2")// + .index("index_closed", IndexMetadata.State.CLOSE) + .build() + .getIndicesLookup(); + + assertNotNull("Original metadata contains index_open_1", metadata.get("index_open_1")); + assertNotNull("Original metadata contains index_closed", metadata.get("index_closed")); + + Map filteredMetadata = ActionPrivileges.StatefulIndexPrivileges.relevantOnly(metadata); + + assertNotNull("Filtered metadata contains index_open_1", filteredMetadata.get("index_open_1")); + assertNull("Filtered metadata does not contain index_closed", filteredMetadata.get("index_closed")); + } + + @Test + public void relevantOnly_dataStreamBackingIndices() throws Exception { + Map metadata = dataStreams("data_stream_1").build().getIndicesLookup(); + + assertNotNull("Original metadata contains backing index", metadata.get(".ds-data_stream_1-000001")); + assertNotNull("Original metadata contains data stream", metadata.get("data_stream_1")); + + Map filteredMetadata = ActionPrivileges.StatefulIndexPrivileges.relevantOnly(metadata); + + assertNull("Filtered metadata does not contain backing index", filteredMetadata.get(".ds-data_stream_1-000001")); + assertNotNull("Filtered metadata contains data stream", filteredMetadata.get("data_stream_1")); + } + + @Test + public void backingIndexToDataStream() { + Map metadata = indices("index").dataStream("data_stream").build().getIndicesLookup(); + + assertEquals("index", ActionPrivileges.StatefulIndexPrivileges.backingIndexToDataStream("index", metadata)); + assertEquals( + "data_stream", + ActionPrivileges.StatefulIndexPrivileges.backingIndexToDataStream(".ds-data_stream-000001", metadata) + ); + assertEquals("non_existing", ActionPrivileges.StatefulIndexPrivileges.backingIndexToDataStream("non_existing", metadata)); + } + + @Test + public void hasIndexPrivilege_errors() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml( + "role_with_errors:\n" + + " index_permissions:\n" + + " - index_patterns: ['/invalid_regex_with_attr${user.name}\\/']\n" + + " allowed_actions: ['indices:some_action*', 'indices:data/write/index']", + CType.ROLES + ); + + ActionPrivileges subject = new ActionPrivileges( + roles, + FlattenedActionGroups.EMPTY, + () -> Collections.emptyMap(), + Settings.EMPTY + ); + + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege( + ctx("role_with_errors"), + Set.of("indices:some_action", "indices:data/write/index"), + IndexResolverReplacer.Resolved.ofIndex("any_index") + ); + assertThat(result, isForbidden()); + assertTrue(result.hasEvaluationExceptions()); + assertTrue( + "Result mentions role_with_errors: " + result.getEvaluationExceptionInfo(), + result.getEvaluationExceptionInfo() + .startsWith("Exceptions encountered during privilege evaluation:\n" + "Error while evaluating role role_with_errors") + ); + } + + @Test + public void hasExplicitIndexPrivilege_errors() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml( + "role_with_errors:\n" + + " index_permissions:\n" + + " - index_patterns: ['/invalid_regex_with_attr${user.name}\\/']\n" + + " allowed_actions: ['system:admin/system_index*']", + CType.ROLES + ); + + ActionPrivileges subject = new ActionPrivileges( + roles, + FlattenedActionGroups.EMPTY, + () -> Collections.emptyMap(), + Settings.EMPTY + ); + + PrivilegesEvaluatorResponse result = subject.hasExplicitIndexPrivilege( + ctx("role_with_errors"), + Set.of("system:admin/system_index"), + IndexResolverReplacer.Resolved.ofIndex("any_index") + ); + assertThat(result, isForbidden()); + assertTrue(result.hasEvaluationExceptions()); + assertTrue( + "Result mentions role_with_errors: " + result.getEvaluationExceptionInfo(), + result.getEvaluationExceptionInfo() + .startsWith("Exceptions encountered during privilege evaluation:\n" + "Error while evaluating role role_with_errors") + ); + } + } + + /** + * Verifies that the heap size used by StatefulIndexPrivileges stays within expected bounds. + */ + @RunWith(Parameterized.class) + public static class StatefulIndexPrivilegesHeapSize { + + final Map indices; + final SecurityDynamicConfiguration roles; + final int expectedEstimatedNumberOfBytes; + + @Test + public void estimatedSize() throws Exception { + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, () -> indices, Settings.EMPTY); + + subject.updateStatefulIndexPrivileges(indices, 1); + + int lowerBound = (int) (expectedEstimatedNumberOfBytes * 0.9); + int upperBound = (int) (expectedEstimatedNumberOfBytes * 1.1); + + int actualEstimatedNumberOfBytes = subject.getEstimatedStatefulIndexByteSize(); + + assertTrue( + "estimatedNumberOfBytes: " + lowerBound + " < " + actualEstimatedNumberOfBytes + " < " + upperBound, + lowerBound < actualEstimatedNumberOfBytes && actualEstimatedNumberOfBytes < upperBound + ); + } + + public StatefulIndexPrivilegesHeapSize(int numberOfIndices, int numberOfRoles, int expectedEstimatedNumberOfBytes) { + this.indices = createIndices(numberOfIndices); + this.roles = createRoles(numberOfRoles, numberOfIndices); + this.expectedEstimatedNumberOfBytes = expectedEstimatedNumberOfBytes; + } + + @Parameterized.Parameters(name = "{0} indices; {1} roles; estimated number of bytes: {2}") + public static Collection params() { + List result = new ArrayList<>(); + + // indices; roles; expected number of bytes + result.add(new Object[] { 100, 10, 10_000 }); + result.add(new Object[] { 100, 100, 13_000 }); + result.add(new Object[] { 100, 1000, 26_000 }); + + result.add(new Object[] { 1000, 10, 92_000 }); + result.add(new Object[] { 1000, 100, 94_000 }); + result.add(new Object[] { 1000, 1000, 112_000 }); + + result.add(new Object[] { 10_000, 10, 890_000 }); + result.add(new Object[] { 10_000, 100, 930_000 }); + + return result; + } + + static Map createIndices(int numberOfIndices) { + String[] names = new String[numberOfIndices]; + + for (int i = 0; i < numberOfIndices; i++) { + names[i] = "index_" + i; + } + + return MockIndexMetadataBuilder.indices(names).build().getIndicesLookup(); + } + + static SecurityDynamicConfiguration createRoles(int numberOfRoles, int numberOfIndices) { + try { + Random random = new Random(1); + Map rolesDocument = new HashMap<>(); + List allowedActions = Arrays.asList( + "indices:data/read*", + "indices:admin/mappings/fields/get*", + "indices:admin/resolve/index", + "indices:data/write*", + "indices:admin/mapping/put" + ); + + for (int i = 0; i < numberOfRoles; i++) { + List indexPatterns = new ArrayList<>(); + int numberOfIndexPatterns = Math.min( + (int) ((Math.abs(random.nextGaussian() + 0.3)) * 0.5 * numberOfIndices), + numberOfIndices + ); + + int numberOfIndexPatterns10th = numberOfIndexPatterns / 10; + + if (numberOfIndexPatterns10th > 0) { + for (int k = 0; k < numberOfIndexPatterns10th; k++) { + indexPatterns.add("index_" + random.nextInt(numberOfIndices / 10) + "*"); + } + } else { + for (int k = 0; k < numberOfIndexPatterns; k++) { + indexPatterns.add("index_" + random.nextInt(numberOfIndices)); + } + } + + Map roleDocument = ImmutableMap.of( + "index_permissions", + Arrays.asList(ImmutableMap.of("index_patterns", indexPatterns, "allowed_actions", allowedActions)) + ); + + rolesDocument.put("role_" + i, roleDocument); + } + + return SecurityDynamicConfiguration.fromMap(rolesDocument, CType.ROLES); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + } + + static PrivilegesEvaluationContext ctx(String... roles) { + User user = new User("test_user"); + user.addAttributes(ImmutableMap.of("attrs.dept_no", "a11")); + return new PrivilegesEvaluationContext( + user, + ImmutableSet.copyOf(roles), + null, + null, + null, + null, + new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)), + null + ); + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivilegesTest.java b/src/integrationTest/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivilegesTest.java new file mode 100644 index 0000000000..118d5358f6 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivilegesTest.java @@ -0,0 +1,117 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.concurrent.atomic.AtomicReference; + +import org.awaitility.Awaitility; +import org.junit.Test; + +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.Settings; +import org.opensearch.node.Node; +import org.opensearch.threadpool.ThreadPool; + +import org.mockito.Mockito; +import org.mockito.stubbing.Answer; + +public class ClusterStateMetadataDependentPrivilegesTest { + + @Test + public void simpleUpdate() { + ThreadPool threadPool = threadPool(); + try { + ConcreteTestSubject subject = new ConcreteTestSubject(); + ClusterState clusterState = clusterState(metadata(1)); + ClusterService clusterService = Mockito.mock(ClusterService.class); + Mockito.when(clusterService.state()).thenReturn(clusterState); + + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + Awaitility.await().until(() -> subject.getCurrentlyUsedMetadataVersion() == 1); + subject.shutdown(); + } finally { + threadPool.shutdown(); + } + } + + @Test + public void frequentUpdates() throws Exception { + ThreadPool threadPool = threadPool(); + try { + ConcreteTestSubject subject = new ConcreteTestSubject(); + AtomicReference clusterStateReference = new AtomicReference<>(clusterState(metadata(1))); + ClusterService clusterService = Mockito.mock(ClusterService.class); + Mockito.when(clusterService.state()).thenAnswer((Answer) invocationOnMock -> clusterStateReference.get()); + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + + for (int i = 2; i <= 100; i++) { + clusterStateReference.set(clusterState(metadata(i))); + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + Thread.sleep(10); + } + + Awaitility.await().until(() -> subject.getCurrentlyUsedMetadataVersion() == 100); + subject.shutdown(); + } finally { + threadPool.shutdown(); + } + } + + @Test + public void shutdown() { + ThreadPool threadPool = threadPool(); + try { + ConcreteTestSubject subject = new ConcreteTestSubject(); + ClusterState clusterState = clusterState(metadata(1)); + ClusterService clusterService = Mockito.mock(ClusterService.class); + Mockito.when(clusterService.state()).thenReturn(clusterState); + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + subject.shutdown(); + } finally { + threadPool.shutdown(); + } + } + + static Metadata metadata(long version) { + return Metadata.builder().version(version).build(); + } + + static ClusterState clusterState(Metadata metadata) { + return ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); + } + + static ThreadPool threadPool() { + return new ThreadPool(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "name").build()); + } + + static class ConcreteTestSubject extends ClusterStateMetadataDependentPrivileges { + + private long currentMetadataVersion; + + @Override + protected void updateClusterStateMetadata(Metadata metadata) { + // We need to be slow with updates to test the debounce-functionality + try { + Thread.sleep(100); + } catch (InterruptedException e) {} + + this.currentMetadataVersion = metadata.version(); + } + + @Override + protected long getCurrentlyUsedMetadataVersion() { + return this.currentMetadataVersion; + } + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/IndexPatternTest.java b/src/integrationTest/java/org/opensearch/security/privileges/IndexPatternTest.java new file mode 100644 index 0000000000..e098a605e5 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/IndexPatternTest.java @@ -0,0 +1,252 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import org.junit.Test; + +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.support.WildcardMatcher; +import org.opensearch.security.user.User; + +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + +public class IndexPatternTest { + final static int CURRENT_YEAR = ZonedDateTime.now().get(ChronoField.YEAR); + final static int NEXT_YEAR = CURRENT_YEAR + 1; + + final static Metadata INDEX_METADATA = // + indices("index_a11", "index_a12", "index_a21", "index_a22", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a11", "index_a12", "index_a21", "index_a22")// + .alias("alias_b") + .of("index_b1", "index_b2")// + .dataStream("data_stream_a1")// + .dataStream("data_stream_b1")// + .index("index_year_" + CURRENT_YEAR)// + .index("index_year_" + NEXT_YEAR)// + .alias("alias_year_" + CURRENT_YEAR) + .of("index_current_year")// + .alias("alias_year_" + NEXT_YEAR) + .of("index_next_year")// + .build(); + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + @Test + public void constantIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("index_a11"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + assertFalse(indexPattern.isEmpty()); + assertTrue(indexPattern.dynamicOnly().isEmpty()); + assertEquals("index_a11", indexPattern.toString()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_a12", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void constantAlias() throws Exception { + IndexPattern indexPattern = IndexPattern.from("alias_a"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("alias_a", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("alias_a1", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void constantAlias_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("alias_a"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_b1", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void constantDataStream_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("data_stream_a1"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches(".ds-data_stream_a1-000001", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches(".ds-data_stream_a2-000001", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void patternIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("index_a1*"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_a21", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void patternAlias() throws Exception { + IndexPattern indexPattern = IndexPattern.from("alias_a*"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("alias_a", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("alias_b", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void patternAlias_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("alias_a*"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_b1", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void patternDataStream_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("data_stream_a*"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches(".ds-data_stream_a1-000001", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches(".ds-data_stream_b1-000001", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + /** + * Static invalid regular expressions are just ignored + */ + @Test + public void regex_invalid() throws Exception { + IndexPattern indexPattern = IndexPattern.from("/index_x\\/"); + assertFalse(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + assertTrue(indexPattern.isEmpty()); + } + + @Test + public void dateMathIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from(""); + assertFalse(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertEquals("", indexPattern.toString()); + + assertTrue(indexPattern.matches("index_year_" + CURRENT_YEAR, ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_year_" + NEXT_YEAR, ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void dateMathAlias_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from(""); + assertFalse(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertFalse(indexPattern.isEmpty()); + + assertTrue(indexPattern.matches("index_current_year", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_next_year", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test(expected = PrivilegesEvaluationException.class) + public void dateMathIndex_invalid() throws Exception { + IndexPattern indexPattern = IndexPattern.from(""); + indexPattern.matches("index_year_" + CURRENT_YEAR, ctx(), INDEX_METADATA.getIndicesLookup()); + } + + @Test + public void templatedIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("index_${attrs.a11}"); + assertFalse(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertEquals(indexPattern, indexPattern.dynamicOnly()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_a12", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test(expected = PrivilegesEvaluationException.class) + public void templatedIndex_invalid() throws Exception { + IndexPattern indexPattern = IndexPattern.from("/index_${attrs.a11}\\/"); + assertFalse(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + + indexPattern.matches("whatever", ctx(), INDEX_METADATA.getIndicesLookup()); + } + + @Test + public void mixed() throws Exception { + IndexPattern indexPattern = IndexPattern.from("index_${attrs.a11}", "index_a12"); + assertTrue(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertFalse(indexPattern.isEmpty()); + + assertEquals(WildcardMatcher.from("index_a12"), indexPattern.getStaticPattern()); + assertEquals(IndexPattern.from("index_${attrs.a11}"), indexPattern.dynamicOnly()); + assertEquals("index_a12 index_${attrs.a11}", indexPattern.toString()); + } + + @Test + public void mixed2() throws Exception { + IndexPattern indexPattern = IndexPattern.from("", "index_a12"); + assertTrue(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertFalse(indexPattern.isEmpty()); + + assertEquals(WildcardMatcher.from("index_a12"), indexPattern.getStaticPattern()); + assertEquals(IndexPattern.from(""), indexPattern.dynamicOnly()); + assertEquals("index_a12 ", indexPattern.toString()); + } + + @Test + public void equals() { + IndexPattern a1 = IndexPattern.from("data_stream_a*"); + IndexPattern a2 = IndexPattern.from("data_stream_a*"); + IndexPattern b = IndexPattern.from("", "data_stream_a*"); + + assertEquals(a1, a1); + assertEquals(a1, a2); + assertNotEquals(a1, b); + assertFalse(a1.equals(a1.toString())); + } + + private static PrivilegesEvaluationContext ctx() { + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)); + IndexResolverReplacer indexResolverReplacer = new IndexResolverReplacer(indexNameExpressionResolver, () -> CLUSTER_STATE, null); + User user = new User("test_user"); + user.addAttributes(ImmutableMap.of("attrs.a11", "a11")); + user.addAttributes(ImmutableMap.of("attrs.year", "year")); + + return new PrivilegesEvaluationContext( + user, + ImmutableSet.of(), + "indices:action/test", + null, + null, + indexResolverReplacer, + indexNameExpressionResolver, + () -> CLUSTER_STATE + ); + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/PrivilegeEvaluatorResponseMatcher.java b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegeEvaluatorResponseMatcher.java new file mode 100644 index 0000000000..dfaa065605 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegeEvaluatorResponseMatcher.java @@ -0,0 +1,182 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.Arrays; +import java.util.Set; + +import com.google.common.collect.ImmutableSet; +import org.hamcrest.Description; +import org.hamcrest.DiagnosingMatcher; + +/** + * Provides hamcrest matchers for PrivilegesEvaluatorResponse instances, which can be used with assertThat() calls. + */ +public abstract class PrivilegeEvaluatorResponseMatcher extends DiagnosingMatcher { + + /** + * Asserts that the status of the PrivilegesEvaluatorResponse is "allowed". + */ + public static PrivilegeEvaluatorResponseMatcher isAllowed() { + return new PrivilegeEvaluatorResponseMatcher() { + @Override + public void describeTo(Description description) { + description.appendText("Request is fully allowed; isAllowed() returns true"); + } + + @Override + protected boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription) { + if (!response.isAllowed()) { + mismatchDescription.appendText("isAllowed() is false"); + return false; + } + + if (response.isPartiallyOk()) { + mismatchDescription.appendText("isPartiallyOk() must be false if isAllowed() is true"); + return false; + } + + if (!response.getMissingPrivileges().isEmpty()) { + mismatchDescription.appendText("getMissingPrivileges() must be empty if isAllowed() is true"); + return false; + } + + return true; + } + }; + } + + /** + * Asserts that the status of the PrivilegesEvaluatorResponse is neither "allowed" or "partially allowed". You can + * add missingPrivileges sub-matchers to verify the actually missing privileges. + */ + public static PrivilegeEvaluatorResponseMatcher isForbidden(PrivilegeEvaluatorResponseMatcher... subMatchers) { + return new PrivilegeEvaluatorResponseMatcher() { + @Override + public void describeTo(Description description) { + description.appendText("Request is fully forbidden; isAllowed() returns false; isPartiallyOk() returns false"); + + for (PrivilegeEvaluatorResponseMatcher subMatcher : subMatchers) { + description.appendText("; "); + subMatcher.describeTo(description); + } + } + + @Override + protected boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription) { + if (response.isAllowed()) { + mismatchDescription.appendText("isAllowed() is true"); + return false; + } + + if (response.isPartiallyOk()) { + mismatchDescription.appendText("isPartiallyOk() is true"); + return false; + } + + for (PrivilegeEvaluatorResponseMatcher subMatcher : subMatchers) { + if (!subMatcher.matches(response, mismatchDescription)) { + return false; + } + } + + return true; + } + }; + } + + /** + * Asserts that the status of the PrivilegesEvaluatorResponse is "partially ok". You can specify the available + * indices are parameter. + */ + public static PrivilegeEvaluatorResponseMatcher isPartiallyOk(String... availableIndices) { + return new PrivilegeEvaluatorResponseMatcher() { + @Override + public void describeTo(Description description) { + description.appendText( + "Request is allowed for a subset of indices; isPartiallyOk() returns true; getAvailableIndices() returns " + ).appendValue(Arrays.asList(availableIndices)); + } + + @Override + protected boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription) { + if (!response.isPartiallyOk()) { + mismatchDescription.appendText("isPartiallyOk() is false"); + return false; + } + + if (response.isAllowed()) { + mismatchDescription.appendText("isAllowed() must be false if isPartiallyOk() is true"); + return false; + } + + if (!response.getAvailableIndices().equals(ImmutableSet.copyOf(availableIndices))) { + mismatchDescription.appendText("getAvailableIndices() is ").appendValue(Arrays.asList(response.getAvailableIndices())); + return false; + } + + return true; + } + }; + } + + /** + * Asserts that the missingPrivileges property of a PrivilegesEvaluatorResponse instance equals to the given parameters. + * Should be used as a sub-matcher for isForbidden(). + */ + public static PrivilegeEvaluatorResponseMatcher missingPrivileges(String... missingPrivileges) { + return missingPrivileges(ImmutableSet.copyOf(missingPrivileges)); + } + + /** + * Asserts that the missingPrivileges property of a PrivilegesEvaluatorResponse instance equals to the given parameters. + * Should be used as a sub-matcher for isForbidden(). + */ + public static PrivilegeEvaluatorResponseMatcher missingPrivileges(Set missingPrivileges) { + return new PrivilegeEvaluatorResponseMatcher() { + @Override + public void describeTo(Description description) { + description.appendText("Missing privileges are "); + description.appendValue(missingPrivileges); + } + + @Override + protected boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription) { + if (!response.getMissingPrivileges().equals(missingPrivileges)) { + mismatchDescription.appendText("getMissingPrivileges() returns ").appendValue(response.getMissingPrivileges()); + return false; + } + + return true; + } + }; + } + + @Override + protected boolean matches(Object o, Description mismatchDescription) { + if (!(o instanceof PrivilegesEvaluatorResponse)) { + mismatchDescription.appendText("The object is not an instance of PrivilegesEvaluatorResponse: ").appendValue(o); + } + + PrivilegesEvaluatorResponse response = (PrivilegesEvaluatorResponse) o; + + if (matches(response, mismatchDescription)) { + return true; + } else { + mismatchDescription.appendText("\n"); + mismatchDescription.appendText(response.toString()); + return false; + } + } + + protected abstract boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription); + +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java index eb8c61e679..538f03ee37 100644 --- a/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java +++ b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java @@ -19,6 +19,7 @@ import org.opensearch.script.mustache.MustachePlugin; import org.opensearch.script.mustache.RenderSearchTemplateAction; +import org.opensearch.test.framework.TestIndex; import org.opensearch.test.framework.TestSecurityConfig; import org.opensearch.test.framework.TestSecurityConfig.Role; import org.opensearch.test.framework.cluster.ClusterManager; @@ -65,11 +66,18 @@ public class PrivilegesEvaluatorTest { private String TEST_RENDER_SEARCH_TEMPLATE_QUERY = "{\"params\":{\"status\":[\"pending\",\"published\"]},\"source\":\"{\\\"query\\\": {\\\"terms\\\": {\\\"status\\\": [\\\"{{#status}}\\\",\\\"{{.}}\\\",\\\"{{/status}}\\\"]}}}\"}"; + final static TestIndex R = TestIndex.name("r").build(); + /** + * This is necessary so that the testNegativeLookaheadPattern test has an forbidden index to match against + */ + final static TestIndex T = TestIndex.name("t").build(); + @ClassRule public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS) .authc(AUTHC_HTTPBASIC_INTERNAL) .users(NEGATIVE_LOOKAHEAD, NEGATED_REGEX, SEARCH_TEMPLATE, RENDER_SEARCH_TEMPLATE, TestSecurityConfig.User.USER_ADMIN) .plugin(MustachePlugin.class) + .indices(R, T) .build(); @Test diff --git a/src/test/java/org/opensearch/security/securityconf/SecurityRolesPermissionsTest.java b/src/integrationTest/java/org/opensearch/security/privileges/RestEndpointPermissionTests.java similarity index 69% rename from src/test/java/org/opensearch/security/securityconf/SecurityRolesPermissionsTest.java rename to src/integrationTest/java/org/opensearch/security/privileges/RestEndpointPermissionTests.java index f21a3e98a2..1e61aa0206 100644 --- a/src/test/java/org/opensearch/security/securityconf/SecurityRolesPermissionsTest.java +++ b/src/integrationTest/java/org/opensearch/security/privileges/RestEndpointPermissionTests.java @@ -24,7 +24,7 @@ * GitHub history for details. */ -package org.opensearch.security.securityconf; +package org.opensearch.security.privileges; import java.io.IOException; import java.util.AbstractMap.SimpleEntry; @@ -46,21 +46,23 @@ import org.opensearch.security.DefaultObjectMapper; import org.opensearch.security.dlic.rest.api.Endpoint; import org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.PermissionBuilder; +import org.opensearch.security.securityconf.FlattenedActionGroups; import org.opensearch.security.securityconf.impl.CType; import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; -import org.opensearch.security.securityconf.impl.v7.ActionGroupsV7; -import org.opensearch.security.securityconf.impl.v7.RoleMappingsV7; import org.opensearch.security.securityconf.impl.v7.RoleV7; -import org.opensearch.security.securityconf.impl.v7.TenantV7; - -import org.mockito.Mockito; +import org.opensearch.security.user.User; import static org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.CERTS_INFO_ACTION; import static org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.ENDPOINTS_WITH_PERMISSIONS; import static org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.RELOAD_CERTS_ACTION; import static org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.SECURITY_CONFIG_UPDATE; -public class SecurityRolesPermissionsTest { +/** + * Moved from https://github.com/opensearch-project/security/blob/54361468f5c4b3a57f3ecffaf1bbe8dccee562be/src/test/java/org/opensearch/security/securityconf/SecurityRolesPermissionsTest.java + * + * See https://github.com/opensearch-project/security/pull/2411 + */ +public class RestEndpointPermissionTests { static final Map NO_REST_ADMIN_PERMISSIONS_ROLES = ImmutableMap.builder() .put("all_access", role("*")) @@ -111,44 +113,36 @@ static String[] allRestApiPermissions() { }).toArray(String[]::new); } - final ConfigModel configModel; + final ActionPrivileges actionPrivileges; - public SecurityRolesPermissionsTest() throws IOException { - this.configModel = new ConfigModelV7( - createRolesConfig(), - createRoleMappingsConfig(), - createActionGroupsConfig(), - createTenantsConfig(), - Mockito.mock(DynamicConfigModel.class), - Settings.EMPTY - ); + public RestEndpointPermissionTests() throws IOException { + this.actionPrivileges = new ActionPrivileges(createRolesConfig(), FlattenedActionGroups.EMPTY, null, Settings.EMPTY); } @Test public void hasNoExplicitClusterPermissionPermissionForRestAdmin() { for (final String role : NO_REST_ADMIN_PERMISSIONS_ROLES.keySet()) { - final SecurityRoles securityRolesForRole = configModel.getSecurityRoles().filter(ImmutableSet.of(role)); for (final Map.Entry entry : ENDPOINTS_WITH_PERMISSIONS.entrySet()) { final Endpoint endpoint = entry.getKey(); final PermissionBuilder permissionBuilder = entry.getValue(); if (endpoint == Endpoint.SSL) { Assert.assertFalse( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(CERTS_INFO_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(CERTS_INFO_ACTION)).isAllowed() ); Assert.assertFalse( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(RELOAD_CERTS_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(RELOAD_CERTS_ACTION)).isAllowed() ); } else if (endpoint == Endpoint.CONFIG) { Assert.assertFalse( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(SECURITY_CONFIG_UPDATE)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(SECURITY_CONFIG_UPDATE)).isAllowed() ); } else { Assert.assertFalse( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build()) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build()).isAllowed() ); } } @@ -158,28 +152,27 @@ public void hasNoExplicitClusterPermissionPermissionForRestAdmin() { @Test public void hasExplicitClusterPermissionPermissionForRestAdminWitFullAccess() { for (final String role : REST_ADMIN_PERMISSIONS_FULL_ACCESS_ROLES.keySet()) { - final SecurityRoles securityRolesForRole = configModel.getSecurityRoles().filter(ImmutableSet.of(role)); for (final Map.Entry entry : ENDPOINTS_WITH_PERMISSIONS.entrySet()) { final Endpoint endpoint = entry.getKey(); final PermissionBuilder permissionBuilder = entry.getValue(); if (endpoint == Endpoint.SSL) { Assert.assertTrue( endpoint.name() + "/" + CERTS_INFO_ACTION, - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(CERTS_INFO_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(CERTS_INFO_ACTION)).isAllowed() ); Assert.assertTrue( endpoint.name() + "/" + CERTS_INFO_ACTION, - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(RELOAD_CERTS_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(RELOAD_CERTS_ACTION)).isAllowed() ); } else if (endpoint == Endpoint.CONFIG) { Assert.assertTrue( endpoint.name() + "/" + SECURITY_CONFIG_UPDATE, - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(SECURITY_CONFIG_UPDATE)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(SECURITY_CONFIG_UPDATE)).isAllowed() ); } else { Assert.assertTrue( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build()) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build()).isAllowed() ); } } @@ -195,33 +188,31 @@ public void hasExplicitClusterPermissionPermissionForRestAdmin() { .collect(Collectors.toList()); for (final Endpoint endpoint : noSslEndpoints) { final String permission = ENDPOINTS_WITH_PERMISSIONS.get(endpoint).build(); - final SecurityRoles allowOnePermissionRole = configModel.getSecurityRoles() - .filter(ImmutableSet.of(restAdminApiRoleName(endpoint.name().toLowerCase(Locale.ROOT)))); - Assert.assertTrue(endpoint.name(), allowOnePermissionRole.hasExplicitClusterPermissionPermission(permission)); - assertHasNoPermissionsForRestApiAdminOnePermissionRole(endpoint, allowOnePermissionRole); + final PrivilegesEvaluationContext ctx = ctx(restAdminApiRoleName(endpoint.name().toLowerCase(Locale.ROOT))); + Assert.assertTrue(endpoint.name(), actionPrivileges.hasExplicitClusterPrivilege(ctx, permission).isAllowed()); + assertHasNoPermissionsForRestApiAdminOnePermissionRole(endpoint, ctx); } // verify SSL endpoint with 2 actions for (final String sslAction : ImmutableSet.of(CERTS_INFO_ACTION, RELOAD_CERTS_ACTION)) { - final SecurityRoles sslAllowRole = configModel.getSecurityRoles().filter(ImmutableSet.of(restAdminApiRoleName(sslAction))); + final PrivilegesEvaluationContext ctx = ctx(restAdminApiRoleName(sslAction)); final PermissionBuilder permissionBuilder = ENDPOINTS_WITH_PERMISSIONS.get(Endpoint.SSL); Assert.assertTrue( Endpoint.SSL + "/" + sslAction, - sslAllowRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(sslAction)) + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build(sslAction)).isAllowed() ); - assertHasNoPermissionsForRestApiAdminOnePermissionRole(Endpoint.SSL, sslAllowRole); + assertHasNoPermissionsForRestApiAdminOnePermissionRole(Endpoint.SSL, ctx); } // verify CONFIG endpoint with 1 action - final SecurityRoles securityConfigAllowRole = configModel.getSecurityRoles() - .filter(ImmutableSet.of(restAdminApiRoleName(SECURITY_CONFIG_UPDATE))); + final PrivilegesEvaluationContext ctx = ctx(restAdminApiRoleName(SECURITY_CONFIG_UPDATE)); final PermissionBuilder permissionBuilder = ENDPOINTS_WITH_PERMISSIONS.get(Endpoint.CONFIG); Assert.assertTrue( Endpoint.SSL + "/" + SECURITY_CONFIG_UPDATE, - securityConfigAllowRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(SECURITY_CONFIG_UPDATE)) + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build(SECURITY_CONFIG_UPDATE)).isAllowed() ); - assertHasNoPermissionsForRestApiAdminOnePermissionRole(Endpoint.CONFIG, securityConfigAllowRole); + assertHasNoPermissionsForRestApiAdminOnePermissionRole(Endpoint.CONFIG, ctx); } - void assertHasNoPermissionsForRestApiAdminOnePermissionRole(final Endpoint allowEndpoint, final SecurityRoles allowOnlyRoleForRole) { + void assertHasNoPermissionsForRestApiAdminOnePermissionRole(final Endpoint allowEndpoint, final PrivilegesEvaluationContext ctx) { final Collection noPermissionEndpoints = ENDPOINTS_WITH_PERMISSIONS.keySet() .stream() .filter(e -> e != allowEndpoint) @@ -231,14 +222,17 @@ void assertHasNoPermissionsForRestApiAdminOnePermissionRole(final Endpoint allow if (endpoint == Endpoint.SSL) { Assert.assertFalse( endpoint.name(), - allowOnlyRoleForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(CERTS_INFO_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build(CERTS_INFO_ACTION)).isAllowed() ); Assert.assertFalse( endpoint.name(), - allowOnlyRoleForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(RELOAD_CERTS_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build(RELOAD_CERTS_ACTION)).isAllowed() ); } else { - Assert.assertFalse(endpoint.name(), allowOnlyRoleForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build())); + Assert.assertFalse( + endpoint.name(), + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build()).isAllowed() + ); } } } @@ -256,22 +250,8 @@ static SecurityDynamicConfiguration createRolesConfig() throws IOExcepti return SecurityDynamicConfiguration.fromNode(rolesNode, CType.ROLES, 2, 0, 0); } - static SecurityDynamicConfiguration createRoleMappingsConfig() throws IOException { - final ObjectNode metaNode = DefaultObjectMapper.objectMapper.createObjectNode(); - metaNode.set("_meta", meta("rolesmapping")); - return SecurityDynamicConfiguration.fromNode(metaNode, CType.ROLESMAPPING, 2, 0, 0); - } - - static SecurityDynamicConfiguration createActionGroupsConfig() throws IOException { - final ObjectNode metaNode = DefaultObjectMapper.objectMapper.createObjectNode(); - metaNode.set("_meta", meta("actiongroups")); - return SecurityDynamicConfiguration.fromNode(metaNode, CType.ACTIONGROUPS, 2, 0, 0); - } - - static SecurityDynamicConfiguration createTenantsConfig() throws IOException { - final ObjectNode metaNode = DefaultObjectMapper.objectMapper.createObjectNode(); - metaNode.set("_meta", meta("tenants")); - return SecurityDynamicConfiguration.fromNode(metaNode, CType.TENANTS, 2, 0, 0); + static PrivilegesEvaluationContext ctx(String... roles) { + return new PrivilegesEvaluationContext(new User("test_user"), ImmutableSet.copyOf(roles), null, null, null, null, null, null); } } diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeadersTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeadersTest.java new file mode 100644 index 0000000000..47503fa870 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeadersTest.java @@ -0,0 +1,395 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import org.junit.Test; + +import org.opensearch.Version; +import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsRequest; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.CheckedFunction; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.MatchQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.RangeQueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.search.internal.ShardSearchRequest; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.Base64Helper; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.user.User; +import org.opensearch.security.util.MockIndexMetadataBuilder; +import org.opensearch.test.framework.TestSecurityConfig; +import org.opensearch.transport.Transport; + +import org.mockito.Mockito; + +import static org.opensearch.security.Song.ARTIST_STRING; +import static org.opensearch.security.Song.ARTIST_TWINS; +import static org.opensearch.security.Song.FIELD_ARTIST; +import static org.opensearch.security.Song.FIELD_STARS; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +public class DlsFlsLegacyHeadersTest { + static NamedXContentRegistry xContentRegistry = new NamedXContentRegistry( + ImmutableList.of( + new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField(TermQueryBuilder.NAME), + (CheckedFunction) (p) -> TermQueryBuilder.fromXContent(p) + ), + new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField(MatchQueryBuilder.NAME), + (CheckedFunction) (p) -> MatchQueryBuilder.fromXContent(p) + ), + new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField(RangeQueryBuilder.NAME), + (CheckedFunction) (p) -> RangeQueryBuilder.fromXContent(p) + ) + ) + ); + + /** + * Basic test that the DLS header matches the one produced in previous versions. + *

+ * Test configuration corresponds to DlsIntegrationTests.testShouldSearchI1_S2I2_S3() + */ + @Test + public void dls_simple() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("read_where_field_artist_matches_artist_string").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .dls(String.format("{\"match\":{\"%s\":\"%s\"}}", FIELD_ARTIST, ARTIST_STRING)) + .on("*") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "my_index1").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders( + ctx(metadata, "read_where_field_artist_matches_artist_string"), + dlsFlsProcessedConfig, + metadata, + false + ).getDlsHeader(); + + // Created with DlsIntegrationTests.testShouldSearchI1_S2I2_S3() on an earlier OpenSearch version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAA3QAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAF0AB17Im1hdGNoIjp7ImFydGlzdCI6IlN0cmluZyJ9fXh0AAlteV9pbmRleDFzcQB+AAZ3DAAAABA/QAAAAAAAAXEAfgAIeHQAEXNlY29uZC10ZXN0LWluZGV4c3EAfgAGdwwAAAAQP0AAAAAAAAFxAH4ACHh4"; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + /** + * Test that the DLS header matches the one produced in previous versions. In this case, two roles need to be considered. + *

+ * Test configuration corresponds to DlsIntegrationTests.testShouldSearchI1_S3I1_S6I2_S2() + */ + @Test + public void dls_twoRoles() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("read_where_field_artist_matches_artist_twins").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .dls(String.format("{\"match\":{\"%s\":\"%s\"}}", FIELD_ARTIST, ARTIST_TWINS)) + .on("*"), + new TestSecurityConfig.Role("read_where_field_stars_greater_than_five").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .dls(String.format("{\"range\":{\"%s\":{\"gt\":%d}}}", FIELD_STARS, 5)) + .on("*") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "my_index1").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders( + ctx(metadata, "read_where_field_artist_matches_artist_twins", "read_where_field_stars_greater_than_five"), + dlsFlsProcessedConfig, + metadata, + false + ).getDlsHeader(); + + // Created with DlsIntegrationTests.testShouldSearchI1_S3I1_S6I2_S2() on an earlier OpenSearch version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAA3QAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAJ0ABx7Im1hdGNoIjp7ImFydGlzdCI6IlR3aW5zIn19dAAceyJyYW5nZSI6eyJzdGFycyI6eyJndCI6NX19fXh0AAlteV9pbmRleDFzcQB+AAZ3DAAAABA/QAAAAAAAAnEAfgAIcQB+AAl4dAARc2Vjb25kLXRlc3QtaW5kZXhzcQB+AAZ3DAAAABA/QAAAAAAAAnEAfgAIcQB+AAl4eA=="; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + @Test + public void dls_none() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("role").clusterPermissions("cluster_composite_ops_ro").indexPermissions("read").on("*") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "my_index1").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders(ctx(metadata, "role"), dlsFlsProcessedConfig, metadata, false).getDlsHeader(); + + assertNull(header); + } + + /** + * Basic test that the FLS header matches the one produced in previous versions. + *

+ * Test configuration corresponds to FlsAndFieldMaskingTests.flsEnabledFieldsAreHiddenForNormalUsers() + */ + @Test + public void fls_simple() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("fls_exclude_stars_reader").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .fls("~stars") + .on("*") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "fls_index").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders(ctx(metadata, "fls_exclude_stars_reader"), dlsFlsProcessedConfig, metadata, false) + .getFlsHeader(); + + // Created with FlsAndFieldMaskingTests.flsEnabledFieldsAreHiddenForNormalUsers() on an earlier OpenSearch version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAA3QAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAF0AAZ+c3RhcnN4dAAJZmxzX2luZGV4c3EAfgAGdwwAAAAQP0AAAAAAAAFxAH4ACHh0ABFzZWNvbmQtdGVzdC1pbmRleHNxAH4ABncMAAAAED9AAAAAAAABcQB+AAh4eA=="; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + /** + * Test that the FLS header matches the one produced in previous versions. In this case, inclusion and exclusion is mixed + * and contradicts itself. + *

+ * Test configuration corresponds to FlsAndFieldMaskingTests.testGetDocumentWithNoTitleFieldAndOnlyTitleFieldFLSRestrictions() + */ + @Test + public void fls_mixedContradiction() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("example_inclusive_fls").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .fls("title") + .on("first-test-index"), + new TestSecurityConfig.Role("example_exclusive_fls").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .fls(String.format("~title")) + .on("first-test-index") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "fls_index").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders( + ctx(metadata, "example_inclusive_fls", "example_exclusive_fls"), + dlsFlsProcessedConfig, + metadata, + false + ).getFlsHeader(); + + // Created with FlsAndFieldMaskingTests.testGetDocumentWithNoTitleFieldAndOnlyTitleFieldFLSRestrictions() on an earlier OpenSearch + // version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAAXQAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAJ0AAV0aXRsZXQABn50aXRsZXh4"; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + /** + * Basic test that the field masking header matches the one produced in previous versions. + *

+ * Test configuration corresponds to FlsAndFieldMaskingTests.searchForDocuments() + */ + @Test + public void fieldMasking_simple() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("masked_title_artist_lyrics_reader").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .maskedFields("artist::/(?<=.{1})./::*", "lyrics::/(?<=.{1})./::*") + .on("first-test-index"), + new TestSecurityConfig.Role("masked_lyrics_reader").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .maskedFields("lyrics::/(?<=.{1})./::*") + .on("second-test-index") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders( + ctx(metadata, "masked_title_artist_lyrics_reader", "masked_lyrics_reader"), + dlsFlsProcessedConfig, + metadata, + false + ).getFmHeader(); + + // Created with FlsAndFieldMaskingTests.flsEnabledFieldsAreHiddenForNormalUsers() on an earlier OpenSearch version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAAnQAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAJ0ABdhcnRpc3Q6Oi8oPzw9LnsxfSkuLzo6KnQAF2x5cmljczo6Lyg/PD0uezF9KS4vOjoqeHQAEXNlY29uZC10ZXN0LWluZGV4c3EAfgAGdwwAAAAQP0AAAAAAAAF0ABdseXJpY3M6Oi8oPzw9LnsxfSkuLzo6Knh4"; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + @Test + public void performHeaderDecoration_oldNode() throws Exception { + Metadata metadata = exampleMetadata(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(exampleRolesConfig(), metadata); + + Transport.Connection connection = Mockito.mock(Transport.Connection.class); + Mockito.when(connection.getVersion()).thenReturn(Version.V_2_0_0); + + // ShardSearchRequest does not extend ActionRequest, thus the headers must be set + ShardSearchRequest request = Mockito.mock(ShardSearchRequest.class); + + Map headerSink = new HashMap<>(); + + DlsFlsLegacyHeaders subject = new DlsFlsLegacyHeaders(ctx(metadata, "test_role"), dlsFlsProcessedConfig, metadata, false); + + subject.performHeaderDecoration(connection, request, headerSink); + + assertEquals(subject.getDlsHeader(), headerSink.get(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER)); + assertEquals(subject.getFlsHeader(), headerSink.get(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER)); + assertEquals(subject.getFmHeader(), headerSink.get(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER)); + } + + @Test + public void performHeaderDecoration_actionRequest() throws Exception { + Metadata metadata = exampleMetadata(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(exampleRolesConfig(), metadata); + + Transport.Connection connection = Mockito.mock(Transport.Connection.class); + Mockito.when(connection.getVersion()).thenReturn(Version.V_2_0_0); + + // SearchRequest does extend ActionRequest, thus the headers must not be set + SearchRequest request = new SearchRequest(); + + Map headerSink = new HashMap<>(); + + DlsFlsLegacyHeaders subject = new DlsFlsLegacyHeaders(ctx(metadata, "test_role"), dlsFlsProcessedConfig, metadata, false); + + subject.performHeaderDecoration(connection, request, headerSink); + assertEquals(0, headerSink.size()); + } + + @Test + public void performHeaderDecoration_newNode() throws Exception { + Metadata metadata = exampleMetadata(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(exampleRolesConfig(), metadata); + + Transport.Connection connection = Mockito.mock(Transport.Connection.class); + Mockito.when(connection.getVersion()).thenReturn(Version.CURRENT); + + // ShardSearchRequest does not extend ActionRequest, thus the headers must be set + ShardSearchRequest request = Mockito.mock(ShardSearchRequest.class); + + Map headerSink = new HashMap<>(); + + DlsFlsLegacyHeaders subject = new DlsFlsLegacyHeaders(ctx(metadata, "test_role"), dlsFlsProcessedConfig, metadata, false); + + subject.performHeaderDecoration(connection, request, headerSink); + assertEquals(0, headerSink.size()); + } + + @Test + public void prepare() throws Exception { + Metadata metadata = exampleMetadata(); + + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + + DlsFlsLegacyHeaders.prepare( + threadContext, + ctx(metadata, "test_role"), + dlsFlsProcessedConfig(exampleRolesConfig(), metadata), + metadata, + false + ); + DlsFlsLegacyHeaders instance = threadContext.getTransient(DlsFlsLegacyHeaders.TRANSIENT_HEADER); + + assertNotNull(instance); + } + + @Test + public void prepare_ccs() throws Exception { + Metadata metadata = exampleMetadata(); + + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + threadContext.putTransient(ConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTED_CLUSTER_REQUEST, true); + User user = new User("test_user"); + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); + + PrivilegesEvaluationContext ctx = new PrivilegesEvaluationContext( + user, + ImmutableSet.of("test_role"), + null, + new ClusterSearchShardsRequest(), + null, + null, + new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)), + () -> clusterState + ); + + DlsFlsLegacyHeaders.prepare(threadContext, ctx, dlsFlsProcessedConfig(exampleRolesConfig(), metadata), metadata, false); + assertTrue(threadContext.getResponseHeaders().containsKey(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER)); + } + + static PrivilegesEvaluationContext ctx(Metadata metadata, String... roles) { + User user = new User("test_user"); + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); + + return new PrivilegesEvaluationContext( + user, + ImmutableSet.copyOf(roles), + null, + null, + null, + null, + new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)), + () -> clusterState + ); + } + + static DlsFlsProcessedConfig dlsFlsProcessedConfig(SecurityDynamicConfiguration rolesConfig, Metadata metadata) { + return new DlsFlsProcessedConfig( + rolesConfig, + metadata.getIndicesLookup(), + xContentRegistry, + Settings.EMPTY, + FieldMasking.Config.DEFAULT + ); + } + + static SecurityDynamicConfiguration exampleRolesConfig() { + return TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("test_role").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .dls("{\"match\":{\"artist\":\"foo\"}}") + .fls("~stars") + .maskedFields("foo") + .on("*") + ); + } + + static Metadata exampleMetadata() { + return MockIndexMetadataBuilder.indices("first-test-index", "second-test-index").build(); + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DocumentPrivilegesTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DocumentPrivilegesTest.java new file mode 100644 index 0000000000..97a0ddb69e --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DocumentPrivilegesTest.java @@ -0,0 +1,1397 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.DiagnosingMatcher; +import org.hamcrest.Matcher; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Suite; + +import org.opensearch.action.IndicesRequest; +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.CheckedFunction; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.BaseTermQueryBuilder; +import org.opensearch.index.query.MatchNoneQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.user.User; +import org.opensearch.test.framework.TestSecurityConfig; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.security.util.MockIndexMetadataBuilder.dataStreams; +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +/** + * Unit tests for the DocumentPrivileges class and the underlying AbstractRuleBasedPrivileges class. As these classes + * provide a number of different code paths for checking privileges, the inner test classes use parameterized tests + * to define test matrices to make sure all the code paths are covered. The dimensions of the matrices are: + *

    + *
  • Different user configurations: With user attrs, without user attrs, with single role, with mixed roles + *
  • Statefulness: As the AbstractRuleBasedPrivileges.StatefulRules class can either cover certain indices or not, + * this parameter simulates whether an index is covered or not. This is because the AbstractRuleBasedPrivileges.StatefulRules class + * is updated asynchronously and thus might just cover an index later. + *
  • DfmEmptyOverridesAll: The state of the "plugins.security.dfm_empty_overrides_all" setting. + *
+ * Note: The individual check these parameters and choose the correct assertions based on these parameters. + * This creates quite complex conditions, which might take a while to get an overview over - I am not too happy + * about this. The alternative would be a test oracle, which however will much more complex. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + DocumentPrivilegesTest.IndicesAndAliases_getRestriction.class, + DocumentPrivilegesTest.IndicesAndAliases_isUnrestricted.class, + DocumentPrivilegesTest.DataStreams_getRestriction.class, + DocumentPrivilegesTest.DlsQuery.class }) +public class DocumentPrivilegesTest { + + static NamedXContentRegistry xContentRegistry = new NamedXContentRegistry( + ImmutableList.of( + new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField(TermQueryBuilder.NAME), + (CheckedFunction) (p) -> TermQueryBuilder.fromXContent(p) + ) + ) + ); + + @RunWith(Parameterized.class) + public static class IndicesAndAliases_getRestriction { + final static Metadata INDEX_METADATA = // + indices("index_a1", "index_a2", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a1", "index_a2")// + .build(); + + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + final static IndexAbstraction.Index index_a1 = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get("index_a1"); + final static IndexAbstraction.Index index_a2 = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get("index_a2"); + final static IndexAbstraction.Index index_b1 = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get("index_b1"); + + final Statefulness statefulness; + final UserSpec userSpec; + final User user; + final IndexSpec indexSpec; + final IndexAbstraction.Index index; + final PrivilegesEvaluationContext context; + final boolean dfmEmptyOverridesAll; + + @Test + public void wildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r1")).on("*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r2")).on("*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + // If we have two DLS roles, we get the union of queries as restriction + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + // Only one role: Check that the restriction matches the role definition above. + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("dls_role_2")) { + // Only one role: Check that the restriction matches the role definition above. + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + // If dfmEmptyOverridesAll == false, roles with restrictions take precedence over roles without restrictions + // Thus, this check comes after the checks for the cases with present DLS roles + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + + IndexToRuleMap restrictionMap = subject.getRestrictions(context, Collections.singleton(index.getName())); + if (dlsRestriction.isUnrestricted()) { + assertTrue("restrictionMap should be unrestricted according to " + dlsRestriction, restrictionMap.isUnrestricted()); + } else { + assertEquals( + "restrictiobMap should contain " + dlsRestriction, + dlsRestriction.getQueries(), + restrictionMap.getIndexMap().get(index.getName()).getQueries() + ); + } + } + + @Test + public void indexPattern() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("index_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_b*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + // As the roles use index patterns, we have to check the requested index in order to know the effective restrictions + if (index == index_a1 || index == index_a2) { + // Only dls_role_1 and non_dls_role match index_a1 or index_a2. We need to check the effective roles. + if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == index_b1) { + // Only dls_role_2 and non_dls_role match index_b1. We need to check the effective roles. + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + // If dfmEmptyOverridesAll == false, roles with restrictions take precedence over roles without restrictions + // Thus, this check comes after the checks for the cases with present DLS roles + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPatternTemplate() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("index_${attr.attr_a}1"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_${attr.attr_a}*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("index_${attr.attr_a}*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (index == index_b1) { + // This test case never grants privileges to index_b1 + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.attributes.isEmpty()) { + // As all the roles in our roleConfig (see above) use user attributes, these won't work with + // users without attributes. Then, access should be also restricted + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + // As the roles use index patterns, we have to check the requested index in order to know the effective restrictions + if (index == index_a1) { + // dls_role_1, dls_role_2 and non_dls_role match index_a1. + if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else { + assertThat(dlsRestriction, isUnrestricted()); + } + } else if (index == index_a2) { + // only dls_role_2 and non_dls_role match index_a2 + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPatternTemplate_invalid() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("/index_${attr.attr_a}1\\/"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + + if (userSpec.roles.contains("dls_role_1") && !(userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll)) { + // dls_role_1 will yield an invalid regex pattern. As we also have user attributes, this will + // lead to an exception being thrown at evaluation time + + try { + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + fail("getRestriction() should have thrown an exception. However, it returned: " + dlsRestriction); + } catch (PrivilegesEvaluationException e) { + assertEquals("Error while evaluating index pattern of role dls_role_1", e.getMessage()); + } + + if (!dfmEmptyOverridesAll) { + // For the isUnrestricted(), we will only get an error if dfmEmptyOverridesAll == false. + // This is because for dfmEmptyOverridesAll == true, we just look for roles which give us + // unrestricted access + + try { + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + fail("isUnrestricted() should have thrown an exception. However, it returned: " + isUnrestricted); + } catch (PrivilegesEvaluationException e) { + assertEquals("Error while evaluating index pattern of role dls_role_1", e.getMessage()); + } + } else { + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + assertFalse("isUnrestricted() should return false, as there is no role which gives privileges", isUnrestricted); + } + + } else { + // Here, we just assert that no exception is being thrown + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + } + + @Test + public void queryPatternTemplate() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls("{\"term\": {\"dept\": \"${attr.attr_a}1\"}}") + .on("index_a1"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls("{\"term\": {\"dept\": \"${attr.attr_a}2\"}}") + .on("index_a*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("index_a*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (index == index_b1) { + // This test case never grants privileges to index_b1 + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.attributes.isEmpty()) { + // If a role uses undefined user attributes for DLS queries, the attribute templates + // remain unchanged in the resulting query. This is a property of the current attribute handling code. + // It would be probably better if an error would be raised in that case. + if (index == index_a1) { + if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat( + dlsRestriction, + isRestricted(termQuery("dept", "${attr.attr_a}1"), termQuery("dept", "${attr.attr_a}2")) + ); + } + } + } else if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + // As the roles use index patterns, we have to check the requested index in order to know the effective restrictions + if (index == index_a1) { + // dls_role_1, dls_role_2 and non_dls_role match index_a1. + if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "a1"), termQuery("dept", "a2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "a1"))); + } else { + assertThat(dlsRestriction, isUnrestricted()); + } + } else if (index == index_a2) { + // only dls_role_2 and non_dls_role match index_a2 + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "a2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void alias() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_a"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_a2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("alias_a") + ); + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else if (index == index_a1) { + if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == index_a2) { + if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == index_b1) { + // index_b1 is not member of alias_a. Thus, the role defintion does not give any privileges. + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Parameterized.Parameters(name = "{0}; {1}; {2}; {3}") + public static Collection params() { + List result = new ArrayList<>(); + + for (UserSpec userSpec : Arrays.asList( + new UserSpec("non_dls_role", "non_dls_role"), // + new UserSpec("dls_role_1", "dls_role_1"), // + new UserSpec("dls_role_1 and dls_role_2", "dls_role_1", "dls_role_2"), // + new UserSpec("dls_role_1 and non_dls_role", "dls_role_1", "non_dls_role"), // + new UserSpec("non_dls_role, attributes", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1, attributes", "dls_role_1").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and dls_role_2, attributes", "dls_role_1", "dls_role_2").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and non_dls_role, attributes", "dls_role_1", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("no roles")// + )) { + for (IndexSpec indexSpec : Arrays.asList( + new IndexSpec("index_a1"), // + new IndexSpec("index_a2"), // + new IndexSpec("index_b1") + )) { + for (Statefulness statefulness : Statefulness.values()) { + for (DfmEmptyOverridesAll dfmEmptyOverridesAll : DfmEmptyOverridesAll.values()) { + result.add(new Object[] { userSpec, indexSpec, statefulness, dfmEmptyOverridesAll }); + } + } + } + } + return result; + } + + public IndicesAndAliases_getRestriction( + UserSpec userSpec, + IndexSpec indexSpec, + Statefulness statefulness, + DfmEmptyOverridesAll dfmEmptyOverridesAll + ) { + this.userSpec = userSpec; + this.indexSpec = indexSpec; + this.user = userSpec.buildUser(); + this.index = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get(indexSpec.index); + this.context = new PrivilegesEvaluationContext( + this.user, + ImmutableSet.copyOf(userSpec.roles), + null, + null, + null, + null, + null, + () -> CLUSTER_STATE + ); + this.statefulness = statefulness; + this.dfmEmptyOverridesAll = dfmEmptyOverridesAll == DfmEmptyOverridesAll.DFM_EMPTY_OVERRIDES_ALL_TRUE; + } + + private DocumentPrivileges createSubject(SecurityDynamicConfiguration roleConfig) { + return new DocumentPrivileges( + roleConfig, + statefulness == Statefulness.STATEFUL ? INDEX_METADATA.getIndicesLookup() : Map.of(), + xContentRegistry, + Settings.builder().put("plugins.security.dfm_empty_overrides_all", this.dfmEmptyOverridesAll).build() + ); + } + } + + @RunWith(Parameterized.class) + public static class IndicesAndAliases_isUnrestricted { + final static Metadata INDEX_METADATA = // + indices("index_a1", "index_a2", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a1", "index_a2")// + .build(); + + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + final static IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = new IndexNameExpressionResolver( + new ThreadContext(Settings.EMPTY) + ); + final static IndexResolverReplacer RESOLVER_REPLACER = new IndexResolverReplacer( + INDEX_NAME_EXPRESSION_RESOLVER, + () -> CLUSTER_STATE, + null + ); + + final Statefulness statefulness; + final UserSpec userSpec; + final User user; + final IndicesSpec indicesSpec; + final IndexResolverReplacer.Resolved resolvedIndices; + final PrivilegesEvaluationContext context; + final boolean dfmEmptyOverridesAll; + + @Test + public void wildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r1")).on("*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r2")).on("*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + // For dfmEmptyOverridesAll == false, only non_dls_role must be there for an unrestricted result. + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void indexPattern() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("index_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_b*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + // For dfmEmptyOverridesAll == false, only non_dls_role must be there for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll + && userSpec.roles.equals(ImmutableList.of("dls_role_1", "non_dls_role")) + && indicesSpec.indices.equals(ImmutableList.of("index_b1"))) { + // index_b1 is only covered by non_dls_role, so we are also unrestricted here + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void template() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("index_${attr.attr_a}1"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_${attr.attr_a}*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("index_${attr.attr_a}*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (indicesSpec.indices.contains("index_b1")) { + // None of the roles above cover index_b1, so full restrictions should be assumed + assertFalse(result); + } else if (userSpec.attributes.isEmpty()) { + // All roles defined above use attributes. If there are no user attributes, we must get a restricted result. + assertFalse(result); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + // For dfmEmptyOverridesAll == false, only non_dls_role must be there for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll + && userSpec.roles.equals(ImmutableList.of("dls_role_1", "non_dls_role")) + && indicesSpec.indices.equals(ImmutableList.of("index_a2"))) { + // index_a2 is not covered by this configuration + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void alias_static() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_a"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_a2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("alias_a") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (resolvedIndices.getAllIndices().contains("index_b1")) { + // index_b1 is not covered by any of the above roles, so there should be always a restriction + assertFalse(result); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void alias_static_wildcardNonDls() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_a"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_a2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll + && userSpec.roles.contains("non_dls_role") + && indicesSpec.indices.equals(ImmutableList.of("index_b1"))) { + // index_b1 is covered neither by dls_role_1 nor dls_role_2, so it is unrestricted when non_dls_role is present + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void alias_wildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_a2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("alias_a*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (resolvedIndices.getAllIndices().contains("index_b1")) { + // index_b1 is not covered by any of the above roles, so there should be always a restriction + assertFalse(result); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void alias_template() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_${attr.attr_a}"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_${attr.attr_a}2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("alias_${attr.attr_a}") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (userSpec.attributes.isEmpty()) { + // All roles defined above use attributes. If there are no user attributes, we must get a restricted result. + assertFalse(result); + } else if (resolvedIndices.getAllIndices().contains("index_b1")) { + // index_b1 is not covered by any of the above roles, so there should be always a restriction + assertFalse(result); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + // For dfmEmptyOverridesAll == false, the presence only non_dls_role must be there for an unrestricted result. + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Parameterized.Parameters(name = "{0}; {1}; {2}; {3}") + public static Collection params() { + List result = new ArrayList<>(); + + for (UserSpec userSpec : Arrays.asList( + new UserSpec("non_dls_role", "non_dls_role"), // + new UserSpec("dls_role_1", "dls_role_1"), // + new UserSpec("dls_role_1 and dls_role_2", "dls_role_1", "dls_role_2"), // + new UserSpec("dls_role_1 and non_dls_role", "dls_role_1", "non_dls_role"), // + new UserSpec("non_dls_role, attributes", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1, attributes", "dls_role_1").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and dls_role_2, attributes", "dls_role_1", "dls_role_2").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and non_dls_role, attributes", "dls_role_1", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("no roles")// + )) { + for (IndicesSpec indicesSpec : Arrays.asList( + new IndicesSpec("index_a1"), // + new IndicesSpec("index_a2"), // + new IndicesSpec("index_b1"), // + new IndicesSpec("alias_a"), // + new IndicesSpec("index_a1", "index_a2"), // + new IndicesSpec("index_a1", "index_b1"), // + new IndicesSpec("alias_a", "index_b1") + )) { + for (Statefulness statefulness : Statefulness.values()) { + for (DfmEmptyOverridesAll dfmEmptyOverridesAll : DfmEmptyOverridesAll.values()) { + result.add(new Object[] { userSpec, indicesSpec, statefulness, dfmEmptyOverridesAll }); + } + } + } + } + return result; + } + + public IndicesAndAliases_isUnrestricted( + UserSpec userSpec, + IndicesSpec indicesSpec, + Statefulness statefulness, + DfmEmptyOverridesAll dfmEmptyOverridesAll + ) { + this.userSpec = userSpec; + this.indicesSpec = indicesSpec; + this.user = userSpec.buildUser(); + this.resolvedIndices = RESOLVER_REPLACER.resolveRequest(new IndicesRequest.Replaceable() { + + @Override + public String[] indices() { + return indicesSpec.indices.toArray(new String[0]); + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED; + } + + @Override + public IndicesRequest indices(String... strings) { + return this; + } + }); + this.context = new PrivilegesEvaluationContext( + this.user, + ImmutableSet.copyOf(userSpec.roles), + null, + null, + null, + RESOLVER_REPLACER, + INDEX_NAME_EXPRESSION_RESOLVER, + () -> CLUSTER_STATE + ); + this.statefulness = statefulness; + this.dfmEmptyOverridesAll = dfmEmptyOverridesAll == DfmEmptyOverridesAll.DFM_EMPTY_OVERRIDES_ALL_TRUE; + } + + private DocumentPrivileges createSubject(SecurityDynamicConfiguration roleConfig) { + return new DocumentPrivileges( + roleConfig, + statefulness == Statefulness.STATEFUL ? INDEX_METADATA.getIndicesLookup() : Map.of(), + xContentRegistry, + Settings.builder().put("plugins.security.dfm_empty_overrides_all", this.dfmEmptyOverridesAll).build() + ); + } + } + + @RunWith(Parameterized.class) + public static class DataStreams_getRestriction { + final static Metadata INDEX_METADATA = dataStreams("datastream_a1", "datastream_a2", "datastream_b1", "datastream_b2").build(); + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + final static IndexAbstraction.Index datastream_a1_backing = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup() + .get(".ds-datastream_a1-000001"); + final static IndexAbstraction.Index datastream_a2_backing = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup() + .get(".ds-datastream_a2-000001"); + final static IndexAbstraction.Index datastream_b1_backing = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup() + .get(".ds-datastream_b1-000001"); + + final Statefulness statefulness; + final UserSpec userSpec; + final User user; + final IndexSpec indexSpec; + final IndexAbstraction.Index index; + final PrivilegesEvaluationContext context; + final boolean dfmEmptyOverridesAll; + + @Test + public void wildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r1")).on("*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r2")).on("*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (!dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + fail("Unhandled case " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPattern() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("datastream_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("datastream_b*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("datastream_a*", "datastream_b*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + if (index == datastream_a1_backing || index == datastream_a2_backing) { + if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == datastream_b1_backing) { + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (!dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + fail("Unhandled case " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPattern_nonDlsRoleOnWildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("datastream_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("datastream_b*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + if (index == datastream_a1_backing || index == datastream_a2_backing) { + if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == datastream_b1_backing) { + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (!dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + fail("Unhandled case " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPatternTemplate() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("datastream_${attr.attr_a}1"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("datastream_${attr.attr_a}*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("datastream_${attr.attr_a}*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (index == datastream_b1_backing) { + // This test case never grants privileges to datastream_b1 + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.attributes.isEmpty()) { + // As all the roles in our roleConfig (see above) use user attributes, these won't work with + // users without attributes. Then, access should be also restricted + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.isEmpty()) { + assertThat(dlsRestriction, isFullyRestricted()); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + if (index == datastream_a1_backing) { + if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == datastream_a2_backing) { + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (!dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + fail("Unhandled case " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Parameterized.Parameters(name = "{0}; {1}; {2}; {3}") + public static Collection params() { + List result = new ArrayList<>(); + + for (UserSpec userSpec : Arrays.asList( + new UserSpec("non_dls_role", "non_dls_role"), // + new UserSpec("dls_role_1", "dls_role_1"), // + new UserSpec("dls_role_1 and dls_role_2", "dls_role_1", "dls_role_2"), // + new UserSpec("dls_role_1 and non_dls_role", "dls_role_1", "non_dls_role"), // + new UserSpec("non_dls_role, attributes", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1, attributes", "dls_role_1").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and dls_role_2, attributes", "dls_role_1", "dls_role_2").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and non_dls_role, attributes", "dls_role", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("no roles")// + )) { + for (IndexSpec indexSpec : Arrays.asList( + new IndexSpec(datastream_a1_backing.getName()), // + new IndexSpec(datastream_a2_backing.getName()), // + new IndexSpec(datastream_b1_backing.getName()) + )) { + for (Statefulness statefulness : Statefulness.values()) { + for (DfmEmptyOverridesAll dfmEmptyOverridesAll : DfmEmptyOverridesAll.values()) { + result.add(new Object[] { userSpec, indexSpec, statefulness, dfmEmptyOverridesAll }); + } + } + } + } + return result; + } + + private DocumentPrivileges createSubject(SecurityDynamicConfiguration roleConfig) { + return new DocumentPrivileges( + roleConfig, + statefulness == Statefulness.STATEFUL ? INDEX_METADATA.getIndicesLookup() : Map.of(), + xContentRegistry, + Settings.builder().put("plugins.security.dfm_empty_overrides_all", this.dfmEmptyOverridesAll).build() + ); + } + + public DataStreams_getRestriction( + UserSpec userSpec, + IndexSpec indexSpec, + Statefulness statefulness, + DfmEmptyOverridesAll dfmEmptyOverridesAll + ) { + this.userSpec = userSpec; + this.indexSpec = indexSpec; + this.user = userSpec.buildUser(); + this.index = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get(indexSpec.index); + this.context = new PrivilegesEvaluationContext( + this.user, + ImmutableSet.copyOf(userSpec.roles), + null, + null, + null, + null, + null, + () -> CLUSTER_STATE + ); + this.statefulness = statefulness; + this.dfmEmptyOverridesAll = dfmEmptyOverridesAll == DfmEmptyOverridesAll.DFM_EMPTY_OVERRIDES_ALL_TRUE; + } + + } + + /** + * Unit tests for the inner class DocumentPrivileges.DlsQuery + */ + public static class DlsQuery { + @Test(expected = PrivilegesConfigurationValidationException.class) + public void invalidQuery() throws Exception { + DocumentPrivileges.DlsQuery.create("{\"invalid\": \"totally\"}", xContentRegistry); + } + + @Test(expected = PrivilegesEvaluationException.class) + public void invalidTemplatedQuery() throws Exception { + DocumentPrivileges.DlsQuery.create("{\"invalid\": \"totally ${attr.foo}\"}", xContentRegistry) + .evaluate(new PrivilegesEvaluationContext(new User("test_user"), ImmutableSet.of(), null, null, null, null, null, null)); + } + + @Test + public void equals() throws Exception { + DocumentPrivileges.DlsQuery query1a = DocumentPrivileges.DlsQuery.create( + Strings.toString(MediaTypeRegistry.JSON, QueryBuilders.termQuery("foo", "1")), + xContentRegistry + ); + DocumentPrivileges.DlsQuery query1b = DocumentPrivileges.DlsQuery.create( + Strings.toString(MediaTypeRegistry.JSON, QueryBuilders.termQuery("foo", "1")), + xContentRegistry + ); + DocumentPrivileges.DlsQuery query2 = DocumentPrivileges.DlsQuery.create( + Strings.toString(MediaTypeRegistry.JSON, QueryBuilders.termQuery("foo", "2")), + xContentRegistry + ); + + assertEquals(query1a, query1a); + assertEquals(query1a, query1b); + assertNotEquals(query2, query1a); + assertFalse(query1a.equals(query1a.queryString)); + } + } + + static SecurityDynamicConfiguration roleConfig(TestSecurityConfig.Role... roles) { + return TestSecurityConfig.Role.toRolesConfiguration(roles); + } + + public static class UserSpec { + final List roles; + final String description; + final Map attributes = new HashMap<>(); + + UserSpec(String description, String... roles) { + this.description = description; + this.roles = Arrays.asList(roles); + } + + UserSpec attribute(String name, String value) { + this.attributes.put(name, value); + return this; + } + + User buildUser() { + User user = new User("test_user_" + description); + user.addAttributes(this.attributes); + return user; + } + + @Override + public String toString() { + return this.description; + } + } + + public static class IndexSpec { + final String index; + + IndexSpec(String index) { + this.index = index; + } + + @Override + public String toString() { + return this.index; + } + } + + public static class IndicesSpec { + final ImmutableList indices; + + IndicesSpec(String... indices) { + this.indices = ImmutableList.copyOf(indices); + } + + @Override + public String toString() { + return this.indices.toString(); + } + } + + /** + * Determines whether the stateful/denormalized data structure shall be created or not. + */ + static enum Statefulness { + STATEFUL, + NON_STATEFUL + } + + /** + * Reflects the value of the setting plugins.security.dfm_empty_overrides_all + */ + static enum DfmEmptyOverridesAll { + DFM_EMPTY_OVERRIDES_ALL_TRUE, + DFM_EMPTY_OVERRIDES_ALL_FALSE + } + + static DiagnosingMatcher isUnrestricted() { + return new DiagnosingMatcher() { + + @Override + public void describeTo(Description description) { + description.appendText("A DlsRestriction object that has no restrictions"); + } + + @Override + protected boolean matches(Object item, Description mismatchDescription) { + if (!(item instanceof DlsRestriction)) { + mismatchDescription.appendValue(item).appendText(" is not a DlsRestriction object"); + return false; + } + + DlsRestriction dlsRestriction = (DlsRestriction) item; + + if (dlsRestriction.isUnrestricted()) { + return true; + } else { + mismatchDescription.appendText("The DlsRestriction object is not unrestricted:").appendValue(dlsRestriction); + return false; + } + } + + }; + + } + + @SafeVarargs + static DiagnosingMatcher isRestricted(Matcher... queries) { + return new DiagnosingMatcher() { + + @Override + public void describeTo(Description description) { + description.appendText("A DlsRestriction object that has the restrictions: ") + .appendList("", "", ", ", Arrays.asList(queries)); + } + + @Override + protected boolean matches(Object item, Description mismatchDescription) { + if (!(item instanceof DlsRestriction)) { + mismatchDescription.appendValue(item).appendText(" is not a DlsRestriction object"); + return false; + } + + DlsRestriction dlsRestriction = (DlsRestriction) item; + + if (dlsRestriction.isUnrestricted()) { + mismatchDescription.appendText("The DlsRestriction object is not restricted:").appendValue(dlsRestriction); + return false; + + } + + Set> subMatchers = new HashSet<>(Arrays.asList(queries)); + Set unmatchedQueries = new HashSet<>(dlsRestriction.getQueries()); + + for (DocumentPrivileges.RenderedDlsQuery query : dlsRestriction.getQueries()) { + for (Matcher subMatcher : subMatchers) { + if (subMatcher.matches(query.getQueryBuilder())) { + unmatchedQueries.remove(query); + subMatchers.remove(subMatcher); + break; + } + } + } + + if (unmatchedQueries.isEmpty() && subMatchers.isEmpty()) { + return true; + } + + if (!unmatchedQueries.isEmpty()) { + mismatchDescription.appendText("The DlsRestriction contains unexpected queries:") + .appendValue(unmatchedQueries) + .appendText("\n"); + } + + if (!subMatchers.isEmpty()) { + mismatchDescription.appendText("The DlsRestriction does not contain expected queries: ") + .appendValue(subMatchers) + .appendText("\n"); + } + + return false; + } + + }; + } + + static DiagnosingMatcher isFullyRestricted() { + return new DiagnosingMatcher() { + + @Override + public void describeTo(Description description) { + description.appendText("A DlsRestriction object that has full restrictions"); + } + + @Override + protected boolean matches(Object item, Description mismatchDescription) { + if (!(item instanceof DlsRestriction)) { + mismatchDescription.appendValue(item).appendText(" is not a DlsRestriction object"); + return false; + } + + DlsRestriction dlsRestriction = (DlsRestriction) item; + + if (dlsRestriction.getQueries().size() != 0) { + for (DocumentPrivileges.RenderedDlsQuery query : dlsRestriction.getQueries()) { + if (!query.getQueryBuilder().equals(new MatchNoneQueryBuilder())) { + mismatchDescription.appendText("The DlsRestriction object is not fully restricted:") + .appendValue(dlsRestriction); + return false; + } + } + + return true; + } else { + mismatchDescription.appendText("The DlsRestriction object is not fully restricted:").appendValue(dlsRestriction); + return false; + } + } + + }; + } + + static BaseMatcher termQuery(String field, Object value) { + return new BaseMatcher() { + + @Override + public void describeTo(Description description) { + description.appendText("A TermQueryBuilder object with ").appendValue(field).appendText("=").appendValue(value); + } + + @Override + public boolean matches(Object item) { + if (!(item instanceof BaseTermQueryBuilder)) { + return false; + } + + BaseTermQueryBuilder queryBuilder = (BaseTermQueryBuilder) item; + + if (queryBuilder.fieldName().equals(field) && queryBuilder.value().equals(value)) { + return true; + } else { + return false; + } + } + }; + } + +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldMaskingTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldMaskingTest.java new file mode 100644 index 0000000000..7f4c5bacf2 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldMaskingTest.java @@ -0,0 +1,283 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import com.google.common.collect.ImmutableSet; +import org.apache.lucene.util.BytesRef; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.WildcardMatcher; +import org.opensearch.security.user.User; +import org.opensearch.test.framework.TestSecurityConfig; + +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +/** + * Unit tests on the FieldMasking class - top-level functionality is tested in FieldMaskingTest.Basic. The inner classes FieldMasking.Field + * and FieldMasking.FieldMaskingRule are tested in the correspondingly named inner test suites. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ FieldMaskingTest.Basic.class, FieldMaskingTest.Field.class, FieldMaskingTest.FieldMaskingRule.class }) +public class FieldMaskingTest { + + /** + * Top-level unit tests on the FieldMasking class. Note: This does just test the full functionality, as most of it + * is provided by the AbstractRuleBasedPrivileges super-class which is already covered by DocumentPrivilegesTest. + */ + public static class Basic { + final static Metadata INDEX_METADATA = // + indices("index_a1", "index_a2", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a1", "index_a2")// + .build(); + + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + @Test + public void indexPattern_simple() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fm_role_1").indexPermissions("*").maskedFields("masked_field_a").on("index_a*") + ); + + FieldMasking subject = createSubject(roleConfig); + + FieldMasking.FieldMaskingRule rule = subject.getRestriction(ctx("fm_role_1"), "index_a1"); + + assertEquals(new FieldMasking.FieldMaskingExpression("masked_field_a"), rule.get("masked_field_a").getExpression()); + assertNull("other_field_should be unrestricted", rule.get("other_field")); + } + + @Test + public void indexPattern_joined() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fm_role_1").indexPermissions("*").maskedFields("masked_field_a").on("index_a*"), + new TestSecurityConfig.Role("fm_role_2").indexPermissions("*").maskedFields("masked_field_a1_*").on("index_a1") + ); + + FieldMasking subject = createSubject(roleConfig); + + FieldMasking.FieldMaskingRule rule = subject.getRestriction(ctx("fm_role_1", "fm_role_2"), "index_a1"); + + assertEquals(new FieldMasking.FieldMaskingExpression("masked_field_a"), rule.get("masked_field_a").getExpression()); + assertEquals(new FieldMasking.FieldMaskingExpression("masked_field_a1_*"), rule.get("masked_field_a1_x").getExpression()); + + assertNull("other_field_should be unrestricted", rule.get("other_field")); + } + + @Test + public void indexPattern_unrestricted() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fm_role_1").indexPermissions("*").maskedFields("masked_field_a").on("index_a*"), + new TestSecurityConfig.Role("non_fm_role").indexPermissions("*").on("*") + ); + + FieldMasking subject = createSubject(roleConfig); + + FieldMasking.FieldMaskingRule rule = subject.getRestriction(ctx("fm_role_1", "non_fm_role"), "index_a1"); + assertNull("masked_field_a be unrestricted", rule.get("masked_field_a")); + } + + static SecurityDynamicConfiguration roleConfig(TestSecurityConfig.Role... roles) { + return TestSecurityConfig.Role.toRolesConfiguration(roles); + } + + static FieldMasking createSubject(SecurityDynamicConfiguration roleConfig) { + return new FieldMasking( + roleConfig, + INDEX_METADATA.getIndicesLookup(), + FieldMasking.Config.DEFAULT, + Settings.builder().put("plugins.security.dfm_empty_overrides_all", true).build() + ); + } + + static PrivilegesEvaluationContext ctx(String... roles) { + return new PrivilegesEvaluationContext( + new User("test_user"), + ImmutableSet.copyOf(roles), + null, + null, + null, + null, + null, + () -> CLUSTER_STATE + ); + } + } + + /** + * Unit tests on the FieldMasking.FieldMaskingRule.Field class. + */ + public static class Field { + @Test + public void simple() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression("field_*"); + assertEquals("field_*", expression.getSource()); + assertEquals(WildcardMatcher.from("field_*"), expression.getPattern()); + assertNull(expression.getAlgoName()); + assertNull(expression.getRegexReplacements()); + + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field(expression, FieldMasking.Config.DEFAULT); + assertEquals("96c8d1da7eb153db858d4f0585120319e17ed1162db9e94bee19fb10b6d19727", field.apply("foobar")); + } + + @Test + public void simple_deviatingDefaultAlgorithm() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression("field_*"); + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field( + expression, + FieldMasking.Config.fromSettings( + Settings.builder().put("plugins.security.masked_fields.algorithm.default", "SHA-256").build() + ) + ); + assertEquals("c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2", field.apply("foobar")); + } + + @Test + public void explicitAlgorithm() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression("field_*::SHA-256"); + assertEquals(WildcardMatcher.from("field_*"), expression.getPattern()); + assertEquals("SHA-256", expression.getAlgoName()); + assertEquals("field_*::SHA-256", expression.getSource()); + assertNull(expression.getRegexReplacements()); + + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field(expression, FieldMasking.Config.DEFAULT); + assertEquals("c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2", field.apply("foobar")); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void explicitAlgorithm_invalid() throws Exception { + new FieldMasking.FieldMaskingExpression("field_*::SHADY-777"); + } + + @Test + public void regex_single() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression("field_*:://::+masked+"); + assertEquals(WildcardMatcher.from("field_*"), expression.getPattern()); + assertNull(expression.getAlgoName()); + assertEquals(1, expression.getRegexReplacements().size()); + assertEquals("", expression.getRegexReplacements().get(0).getRegex().toString()); + assertEquals("+masked+", expression.getRegexReplacements().get(0).getReplacement()); + assertEquals("field_*:://::+masked+", expression.getSource()); + assertEquals( + Arrays.asList(new FieldMasking.FieldMaskingExpression.RegexReplacement("//", "+masked+")), + expression.getRegexReplacements() + ); + + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field(expression, FieldMasking.Config.DEFAULT); + assertEquals("foobar", field.apply("foobar")); + assertEquals("foo+masked+bar", field.apply("foobar")); + } + + @Test + public void regex_multi() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression( + "field_*:://::+masked+::/\\d/::*" + ); + assertEquals(WildcardMatcher.from("field_*"), expression.getPattern()); + assertNull(expression.getAlgoName()); + assertEquals(2, expression.getRegexReplacements().size()); + assertEquals("", expression.getRegexReplacements().get(0).getRegex().toString()); + assertEquals("+masked+", expression.getRegexReplacements().get(0).getReplacement()); + assertEquals("\\d", expression.getRegexReplacements().get(1).getRegex().toString()); + assertEquals("*", expression.getRegexReplacements().get(1).getReplacement()); + assertEquals("field_*:://::+masked+::/\\d/::*", expression.getSource()); + + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field(expression, FieldMasking.Config.DEFAULT); + assertEquals("foobar", field.apply("foobar")); + assertEquals("foo**bar", field.apply("foo42bar")); + assertEquals("foo+masked+bar**", field.apply("foobar42")); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void regex_oddParams() throws Exception { + new FieldMasking.FieldMaskingExpression("field_*::/a/::b::/c/"); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void regex_invalidRegex() throws Exception { + new FieldMasking.FieldMaskingExpression("field_*::/a\\/::b"); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void regex_missingSlashes() throws Exception { + new FieldMasking.FieldMaskingExpression("field_*::a::b"); + } + } + + /** + * Unit tests on the FieldMasking.FieldMaskingRule class. + */ + public static class FieldMaskingRule { + @Test + public void allowAll() { + assertTrue( + "FieldMasking.FieldMaskingRule.ALLOW_ALL identifies itself as such", + FieldMasking.FieldMaskingRule.ALLOW_ALL.isAllowAll() + ); + assertTrue( + "FieldMasking.FieldMaskingRule.ALLOW_ALL identifies itself as such", + FieldMasking.FieldMaskingRule.ALLOW_ALL.isUnrestricted() + ); + assertFalse("FieldMasking.FieldMaskingRule.ALLOW_ALL allows field", FieldMasking.FieldMaskingRule.ALLOW_ALL.isMasked("field")); + assertEquals("FM:[]", FieldMasking.FieldMaskingRule.ALLOW_ALL.toString()); + } + + @Test + public void allowAll_constructed() throws Exception { + FieldMasking.FieldMaskingRule rule = FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT); + assertTrue("FieldMasking.FieldMaskingRule without masked fields should return true for isAllowAll()", rule.isAllowAll()); + assertFalse("FieldMasking.FieldMaskingRule without masked fields allows field", rule.isMasked("field")); + assertEquals("FM:[]", rule.toString()); + } + + @Test + public void simple() throws Exception { + FieldMasking.FieldMaskingRule rule = FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT, "field_masked_*"); + assertFalse("FieldMasking.FieldMaskingRule should return false for isAllowAll()", rule.isAllowAll()); + assertTrue("Rule applies to field field_masked_1", rule.isMasked("field_masked_1")); + assertFalse("Rule does not apply to field field_other", rule.isMasked("field_other")); + assertEquals("96c8d1da7eb153db858d4f0585120319e17ed1162db9e94bee19fb10b6d19727", rule.get("field_masked_1").apply("foobar")); + assertEquals( + new BytesRef("96c8d1da7eb153db858d4f0585120319e17ed1162db9e94bee19fb10b6d19727".getBytes(StandardCharsets.UTF_8)), + rule.get("field_masked_1").apply(new BytesRef("foobar".getBytes(StandardCharsets.UTF_8))) + ); + assertEquals("FM:[field_masked_*]", rule.toString()); + } + + @Test + public void keyword() throws Exception { + FieldMasking.FieldMaskingRule rule = FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT, "field_masked"); + assertFalse("FieldMasking.FieldMaskingRule should return false for isAllowAll()", rule.isAllowAll()); + assertTrue("Rule applies to field field_masked_1", rule.isMasked("field_masked")); + assertTrue("Rule applies to field field_masked_1.keyword", rule.isMasked("field_masked.keyword")); + assertEquals( + "96c8d1da7eb153db858d4f0585120319e17ed1162db9e94bee19fb10b6d19727", + rule.get("field_masked.keyword").apply("foobar") + ); + } + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldPrivilegesTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldPrivilegesTest.java new file mode 100644 index 0000000000..54a32e9972 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldPrivilegesTest.java @@ -0,0 +1,296 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.Arrays; +import java.util.Collections; + +import com.google.common.collect.ImmutableSet; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.WildcardMatcher; +import org.opensearch.security.user.User; +import org.opensearch.test.framework.TestSecurityConfig; + +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** + * Unit tests on the FieldMasking class - top-level functionality is tested in FieldMaskingTest.Basic. The inner classes FieldMasking.Field + * and FieldMasking.FieldMaskingRule are tested in the correspondingly named inner test suites. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ FieldPrivilegesTest.Basic.class, FieldPrivilegesTest.FlsRule.class, FieldPrivilegesTest.FlsPattern.class }) +public class FieldPrivilegesTest { + public static class Basic { + final static Metadata INDEX_METADATA = // + indices("index_a1", "index_a2", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a1", "index_a2")// + .build(); + + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + @Test + public void indexPattern_simple_inclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("included_field_a").on("index_a*") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1"), "index_a1"); + assertTrue("included_field_a should be allowed", rule.isAllowed("included_field_a")); + assertFalse("Fields other than included_field_a should be not allowed", rule.isAllowed("other_field")); + } + + @Test + public void indexPattern_simple_exclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("~excluded_field_a").on("index_a*") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1"), "index_a1"); + assertFalse("excluded_field_a should be not allowed", rule.isAllowed("excluded_field_a")); + assertTrue("Fields other than included_field_a should be allowed", rule.isAllowed("other_field")); + } + + @Test + public void indexPattern_joined_inclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("included_field_a").on("index_a*"), + new TestSecurityConfig.Role("fls_role_2").indexPermissions("*").fls("included_field_a1_*").on("index_a1") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1", "fls_role_2"), "index_a1"); + assertTrue("included_field_a should be allowed", rule.isAllowed("included_field_a")); + assertTrue("included_field_a1_foo should be allowed", rule.isAllowed("included_field_a1_foo")); + assertFalse( + "Fields other than included_field_a and included_field_a1_foo should be not allowed", + rule.isAllowed("other_field") + ); + } + + @Test + public void indexPattern_joined_exclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("~excluded_field_a").on("index_a*"), + new TestSecurityConfig.Role("fls_role_2").indexPermissions("*").fls("~excluded_field_a1_*").on("index_a1") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1", "fls_role_2"), "index_a1"); + assertFalse("excluded_field_a should be not allowed", rule.isAllowed("excluded_field_a")); + assertFalse("excluded_field_a1_foo should be not allowed", rule.isAllowed("excluded_field_a1_foo")); + assertTrue("Fields other than included_field_a and included_field_a1_foo should be allowed", rule.isAllowed("other_field")); + } + + @Test + public void indexPattern_unrestricted_inclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("included_field_a").on("index_a*"), + new TestSecurityConfig.Role("non_fls_role").indexPermissions("*").on("*") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1", "non_fls_role"), "index_a1"); + assertTrue("included_field_a should be allowed", rule.isAllowed("included_field_a")); + assertTrue("other_field should be allowed", rule.isAllowed("other_field")); + } + + @Test + public void indexPattern_unrestricted_exclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("~excluded_field_a").on("index_a*"), + new TestSecurityConfig.Role("non_fls_role").indexPermissions("*").on("*") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1", "non_fls_role"), "index_a1"); + assertTrue("excluded_field_a should be allowed", rule.isAllowed("excluded_field_a")); + assertTrue("other_field should be allowed", rule.isAllowed("other_field")); + } + + static SecurityDynamicConfiguration roleConfig(TestSecurityConfig.Role... roles) { + return TestSecurityConfig.Role.toRolesConfiguration(roles); + } + + static FieldPrivileges createSubject(SecurityDynamicConfiguration roleConfig) { + return new FieldPrivileges( + roleConfig, + INDEX_METADATA.getIndicesLookup(), + Settings.builder().put("plugins.security.dfm_empty_overrides_all", true).build() + ); + } + + static PrivilegesEvaluationContext ctx(String... roles) { + return new PrivilegesEvaluationContext( + new User("test_user"), + ImmutableSet.copyOf(roles), + null, + null, + null, + null, + null, + () -> CLUSTER_STATE + ); + } + } + + public static class FlsRule { + @Test + public void simple_inclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("field_inclusive"); + assertFalse("FLS rule field_inclusive should be restricted", flsRule.isUnrestricted()); + assertTrue("field_inclusive is allowed", flsRule.isAllowed("field_inclusive")); + assertFalse("other_field is not allowed", flsRule.isAllowed("other_field")); + assertEquals("FLS:[field_inclusive]", flsRule.toString()); + assertEquals(Arrays.asList("field_inclusive"), flsRule.getSource()); + } + + @Test + public void simple_exclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("~field_exclusive"); + assertFalse("FLS rule field_exclusive should be restricted", flsRule.isUnrestricted()); + assertFalse("field_exclusive is not allowed", flsRule.isAllowed("field_exclusive")); + assertTrue("other_field is allowed", flsRule.isAllowed("other_field")); + } + + @Test + public void multi_inclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("field_inclusive_1", "field_inclusive_2"); + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertTrue("field_inclusive_1 is allowed", flsRule.isAllowed("field_inclusive_1")); + assertTrue("field_inclusive_2 is allowed", flsRule.isAllowed("field_inclusive_2")); + assertFalse("other_field is not allowed", flsRule.isAllowed("other_field")); + } + + @Test + public void multi_exclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("~field_exclusive_1", "~field_exclusive_2"); + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertFalse("field_exclusive_1 is not allowed", flsRule.isAllowed("field_exclusive_1")); + assertFalse("field_exclusive_1 is not allowed", flsRule.isAllowed("field_exclusive_2")); + assertTrue("other_field is allowed", flsRule.isAllowed("other_field")); + } + + @Test + public void multi_mixed() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("field_inclusive_1", "~field_exclusive_1"); + // This is one of the weird parts. This just REPLICATES the old behavior for backwards compat. + // The behavior is undocumented - if there are exclusions and inclusions, only exclusions are regarded. + // It might make sense to re-think this behavior. + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertFalse("field_exclusive_1 is not allowed", flsRule.isAllowed("field_exclusive_1")); + assertTrue("other_field is allowed", flsRule.isAllowed("other_field")); + } + + @Test + public void nested_inclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("a.b.c"); + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertTrue("a.b.c is allowed", flsRule.isAllowed("a.b.c")); + assertFalse("a.b is not allowed for non-objects", flsRule.isAllowed("a.b")); + assertTrue("a.b is not allowed for objects", flsRule.isObjectAllowed("a.b")); + assertFalse("other_field is not allowed", flsRule.isAllowed("other_field")); + assertFalse("a.b.other_field is not allowed", flsRule.isAllowed("a.b.other_field")); + } + + @Test + public void nested_exclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("~a.b.c"); + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertFalse("a.b.c is not allowed", flsRule.isAllowed("a.b.c")); + assertTrue("a.b is allowed", flsRule.isAllowed("a.b")); + assertTrue("a.b is allowed for objects", flsRule.isObjectAllowed("a.b")); + } + + @Test + public void wildcard_inclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("*"); + assertTrue("FLS rule * is unrestricted", flsRule.isUnrestricted()); + assertTrue("anything is allowed", flsRule.isAllowed("anything")); + assertEquals("FLS:*", flsRule.toString()); + } + + } + + public static class FlsPattern { + @Test + public void simple_inclusive() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("field_inclusive"); + assertFalse("field_inclusive should be not excluded", flsPattern.isExcluded()); + assertEquals(WildcardMatcher.from("field_inclusive"), flsPattern.getPattern()); + assertEquals("field_inclusive", flsPattern.getSource()); + assertEquals(Collections.emptyList(), flsPattern.getParentObjectPatterns()); + } + + @Test + public void simple_exclusive() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("~field_exclusive"); + assertTrue("field_exclusive should be excluded", flsPattern.isExcluded()); + assertEquals(WildcardMatcher.from("field_exclusive"), flsPattern.getPattern()); + assertEquals("~field_exclusive", flsPattern.getSource()); + assertEquals(Collections.emptyList(), flsPattern.getParentObjectPatterns()); + } + + @Test + public void simple_exclusive2() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("!field_exclusive"); + assertTrue("field_exclusive should be excluded", flsPattern.isExcluded()); + assertEquals(WildcardMatcher.from("field_exclusive"), flsPattern.getPattern()); + assertEquals("!field_exclusive", flsPattern.getSource()); + assertEquals(Collections.emptyList(), flsPattern.getParentObjectPatterns()); + } + + @Test + public void nested_inclusive() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("a.b.c_inclusive"); + assertEquals(WildcardMatcher.from("a.b.c_inclusive"), flsPattern.getPattern()); + assertEquals( + Arrays.asList(new FieldPrivileges.FlsPattern("a"), new FieldPrivileges.FlsPattern("a.b")), + flsPattern.getParentObjectPatterns() + ); + } + + @Test + public void nested_exclusive() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("~a.b.c_exclusive"); + assertTrue("a.b.c_exclusive should be excluded", flsPattern.isExcluded()); + assertEquals(WildcardMatcher.from("a.b.c_exclusive"), flsPattern.getPattern()); + // Exclusive patterns do not need an explicit inclusion of the parent objects. Thus, we get an empty list here + assertEquals(Collections.emptyList(), flsPattern.getParentObjectPatterns()); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void invalidRegex() throws Exception { + new FieldPrivileges.FlsPattern("/a\\/"); + } + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilterTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilterTest.java new file mode 100644 index 0000000000..f2cef0bc4e --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilterTest.java @@ -0,0 +1,303 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import com.google.common.collect.ImmutableSet; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.Assert; +import org.junit.Test; + +import static java.nio.charset.StandardCharsets.UTF_8; + +public class FlsDocumentFilterTest { + + @Test + public void identity() throws Exception { + String sourceDocument = "{\n" + + " \"big_integer\": 12345678901234567890123456789012345678901234567890,\n" + + " \"string\": \"x\",\n" + + " \"big_float\": 12345678901234567890123456789012345678901234567890.123456789,\n" + + " \"object\": {\n" + + " \"attribute\": \"x\",\n" + + " \"nested_object\": {\n" + + " \"x\": \"y\"\n" + + " },\n" + + " \"nested_array\": [1,2,3]\n" + + " },\n" + + " \"array\": [\n" + + " 1,\n" + + " \"x\",\n" + + " {\n" + + " \"foo\": \"bar\"\n" + + " },\n" + + " [1,2,3,4]\n" + + " ]\n" + + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.ALLOW_ALL, + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + assertJsonStructurallyEquivalent(sourceDocument, result); + } + + @Test + public void filterSimpleAttribute_exclusion() throws Exception { + String sourceDocument = "{\n" + " \"a\": 41,\n" + " \"b\": 42,\n" + " \"c\": 43\n" + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = "{\n" + " \"a\": 41,\n" + " \"c\": 43\n" + "}\n"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterSimpleAttribute_inclusion() throws Exception { + String sourceDocument = "{\n" + " \"a\": 41,\n" + " \"b\": 42,\n" + " \"c\": 43\n" + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("b"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = "{\n" + " \"b\": 42\n" + "}\n"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterObject_exclusion() throws Exception { + String sourceDocument = "{\n" + + " \"a\": 41,\n" + + " \"b\": {\n" + + " \"x\": 123,\n" + + " \"y\": 456\n" + + " },\n" + + " \"c\": 43\n" + + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = "{\n" + " \"a\": 41,\n" + " \"c\": 43\n" + "}\n"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterObjectAttribute_exclusion() throws Exception { + String sourceDocument = "{\n" + + " \"a\": 41,\n" + + " \"b\": {\n" + + " \"x\": 123,\n" + + " \"y\": 456\n" + + " },\n" + + " \"c\": 43\n" + + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b.x"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = "{\n" + + " \"a\": 41,\n" + + " \"b\": {\n" + + " \"y\": 456\n" + + " },\n" + + " \"c\": 43\n" + + "}\n"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterObjectAttribute_inclusion() throws Exception { + String sourceDocument = "{\n" + + " \"a\": 41,\n" + + " \"b\": {\n" + + " \"x\": 123,\n" + + " \"y\": 456\n" + + " },\n" + + " \"c\": 43,\n" + + " \"d\": {}\n" + + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("b.x", "c"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = "{\n" + + " \"b\": {\n" + + " \"x\": 123\n" + + " },\n" + + " \"c\": 43\n" + + " }"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterArrayContainingObject_exclusion() throws Exception { + String sourceDocument = "{\n" + + " \"a\": 41,\n" + + " \"b\": [\n" + + " {\"x\": 12, \"y\": 34},\n" + + " {\"x\": 56, \"y\": 78}\n" + + " ],\n" + + " \"c\": 43\n" + + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b.x"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = "{\n" + + " \"a\": 41,\n" + + " \"b\": [\n" + + " {\"y\": 34},\n" + + " {\"y\": 78}\n" + + " ],\n" + + " \"c\": 43\n" + + "}\n"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterArrayContainingObject_inclusion() throws Exception { + String sourceDocument = "{\n" + + " \"a\": 41,\n" + + " \"b\": [\n" + + " {\"x\": 12, \"y\": 34},\n" + + " {\"x\": 56, \"y\": 78}\n" + + " ],\n" + + " \"c\": 43\n" + + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("b.y", "c"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = "{\n" + + " \"b\": [\n" + + " {\"y\": 34},\n" + + " {\"y\": 78}\n" + + " ],\n" + + " \"c\": 43\n" + + "}\n"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void keepMetadata() throws Exception { + String sourceDocument = "{\n" + " \"a\": 41,\n" + " \"b\": 42,\n" + " \"c\": 43\n" + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of("b") + ); + + String expectedDocument = "{\n" + " \"a\": 41,\n" + " \"b\": 42,\n" + " \"c\": 43\n" + "}\n"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void maskSimpleAttribute() throws Exception { + String sourceDocument = "{\n" + " \"a\": \"x\",\n" + " \"b\": \"y\",\n" + " \"c\": \"z\"\n" + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.ALLOW_ALL, + FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT, "b"), + ImmutableSet.of() + ); + + String expectedDocument = "{\n" + + " \"a\": \"x\",\n" + + " \"b\": \"1147ddc9246d856b1ce322f1dc9eeda895b56d545c324510c2eca47a9dcc5d3f\",\n" + + " \"c\": \"z\"\n" + + "}\n"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void maskObjectAttribute() throws Exception { + String sourceDocument = "{\n" + + " \"a\": \"x\",\n" + + " \"b\": {\n" + + " \"b1\": \"y1\",\n" + + " \"b2\": \"y2\"\n" + + " },\n" + + " \"c\": \"z\"\n" + + "}\n"; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.ALLOW_ALL, + FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT, "b.b1"), + ImmutableSet.of() + ); + + String expectedDocument = "{\n" + + " \"a\": \"x\",\n" + + " \"b\": {\n" + + " \"b1\": \"19937da9d0b0fb38c3ce369bed130b647fa547914d675e09a62ba260a6d7811b\",\n" + + " \"b2\": \"y2\"\n" + + " },\n" + + " \"c\": \"z\"\n" + + "}\n"; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + private static void assertJsonStructurallyEquivalent(String expected, byte[] actual) throws Exception { + ObjectMapper objectMapper = new ObjectMapper(); + + JsonNode expectedTree = objectMapper.readTree(expected); + JsonNode actualTree = objectMapper.readTree(actual); + + Assert.assertEquals("JSON is not structurally equivalent", expectedTree, actualTree); + } + +} diff --git a/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java b/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java index c41b5f4cda..ade540f85c 100644 --- a/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java +++ b/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java @@ -77,7 +77,7 @@ public class WhoAmITests { protected final String expectedAuthorizedBody = "{\"dn\":null,\"is_admin\":false,\"is_node_certificate_request\":false}"; protected final String expectedUnuauthorizedBody = - "no permissions for [security:whoamiprotected] and User [name=who_am_i_user_no_perm, backend_roles=[], requestedTenant=null]"; + "no permissions for [any of [cluster:admin/opendistro_security/whoamiprotected, security:whoamiprotected]] and User [name=who_am_i_user_no_perm, backend_roles=[], requestedTenant=null]"; public static final String WHOAMI_ENDPOINT = "_plugins/_security/whoami"; public static final String WHOAMI_PROTECTED_ENDPOINT = "_plugins/_security/whoamiprotected"; diff --git a/src/integrationTest/java/org/opensearch/security/util/MockIndexMetadataBuilder.java b/src/integrationTest/java/org/opensearch/security/util/MockIndexMetadataBuilder.java new file mode 100644 index 0000000000..cb0e4f32c4 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/util/MockIndexMetadataBuilder.java @@ -0,0 +1,134 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.util; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.opensearch.Version; +import org.opensearch.cluster.metadata.AliasMetadata; +import org.opensearch.cluster.metadata.DataStream; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.index.Index; + +/** + * Creates mocks of org.opensearch.cluster.metadata.IndexAbstraction maps. Useful for unit testing code which + * operates on index metadata. + */ +public class MockIndexMetadataBuilder { + + private static final Settings INDEX_SETTINGS = Settings.builder() + .put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.CURRENT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + + private Metadata.Builder delegate = new Metadata.Builder(); + private Map nameToIndexMetadataBuilderMap = new HashMap<>(); + + public static MockIndexMetadataBuilder indices(String... indices) { + MockIndexMetadataBuilder builder = new MockIndexMetadataBuilder(); + + for (String index : indices) { + builder.index(index); + } + + return builder; + } + + public static MockIndexMetadataBuilder dataStreams(String... dataStreams) { + MockIndexMetadataBuilder builder = new MockIndexMetadataBuilder(); + + for (String dataStream : dataStreams) { + builder.dataStream(dataStream); + } + + return builder; + } + + public Metadata build() { + for (IndexMetadata.Builder indexMetadataBuilder : nameToIndexMetadataBuilderMap.values()) { + this.delegate.put(indexMetadataBuilder); + } + + return this.delegate.build(); + } + + public MockIndexMetadataBuilder index(String indexName) { + return index(indexName, IndexMetadata.State.OPEN); + } + + public MockIndexMetadataBuilder index(String indexName, IndexMetadata.State state) { + getIndexMetadataBuilder(indexName, state); + return this; + } + + public AliasBuilder alias(String alias) { + return new AliasBuilder(alias); + } + + public MockIndexMetadataBuilder dataStream(String dataStream) { + return dataStream(dataStream, 3); + } + + public MockIndexMetadataBuilder dataStream(String dataStream, int generations) { + List backingIndices = new ArrayList<>(); + + for (int i = 1; i <= generations; i++) { + String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, i); + backingIndices.add(new Index(backingIndexName, backingIndexName)); + getIndexMetadataBuilder(backingIndexName, IndexMetadata.State.OPEN); + } + + DataStream dataStreamMetadata = new DataStream(dataStream, new DataStream.TimestampField("@timestamp"), backingIndices); + this.delegate.put(dataStreamMetadata); + + return this; + } + + private IndexMetadata.Builder getIndexMetadataBuilder(String indexName, IndexMetadata.State state) { + IndexMetadata.Builder result = this.nameToIndexMetadataBuilderMap.get(indexName); + + if (result != null) { + return result; + } + + result = new IndexMetadata.Builder(indexName).state(state) + .settings(Settings.builder().put(INDEX_SETTINGS).put(IndexMetadata.SETTING_INDEX_UUID, indexName).build()); + + this.nameToIndexMetadataBuilderMap.put(indexName, result); + + return result; + } + + public class AliasBuilder { + private String aliasName; + + private AliasBuilder(String alias) { + this.aliasName = alias; + } + + public MockIndexMetadataBuilder of(String... indices) { + AliasMetadata aliasMetadata = new AliasMetadata.Builder(aliasName).build(); + + for (String index : indices) { + IndexMetadata.Builder indexMetadataBuilder = getIndexMetadataBuilder(index, IndexMetadata.State.OPEN); + indexMetadataBuilder.putAlias(aliasMetadata); + } + + return MockIndexMetadataBuilder.this; + } + } +} diff --git a/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java b/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java index 9edf77f75c..38def260ed 100644 --- a/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java +++ b/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java @@ -42,6 +42,7 @@ import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -57,11 +58,19 @@ import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.query.QueryBuilder; import org.opensearch.security.hasher.PasswordHasher; import org.opensearch.security.hasher.PasswordHasherFactory; import org.opensearch.security.securityconf.impl.CType; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.ActionGroupsV7; +import org.opensearch.security.securityconf.impl.v7.ConfigV7; +import org.opensearch.security.securityconf.impl.v7.InternalUserV7; +import org.opensearch.security.securityconf.impl.v7.RoleMappingsV7; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.support.ConfigConstants; import org.opensearch.test.framework.cluster.OpenSearchClientProvider.UserCredentialsHolder; @@ -160,6 +169,13 @@ public TestSecurityConfig user(User user) { return this; } + public TestSecurityConfig users(User... users) { + for (User user : users) { + this.user(user); + } + return this; + } + public TestSecurityConfig withRestAdminUser(final String name, final String... permissions) { if (!internalUsers.containsKey(name)) { user(new User(name, "REST Admin with permissions: " + Arrays.toString(permissions)).reserved(true)); @@ -517,6 +533,10 @@ public Object getAttribute(String attributeName) { return attributes.get(attributeName); } + public Map getAttributes() { + return this.attributes; + } + @Override public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException { xContentBuilder.startObject(); @@ -665,6 +685,22 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(name, clusterPermissions, indexPermissions, hidden, reserved, description); } + + public static SecurityDynamicConfiguration toRolesConfiguration( + TestSecurityConfig.Role... roles + ) { + try { + return SecurityDynamicConfiguration.fromJson( + configToJson(CType.ROLES, Stream.of(roles).collect(Collectors.toMap(r -> r.name, r -> r))), + CType.ROLES, + 2, + 0, + 0 + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } } public static class RoleMapping implements ToXContentObject { @@ -778,6 +814,11 @@ public IndexPermission dls(String dlsQuery) { return this; } + public IndexPermission dls(QueryBuilder dlsQuery) { + this.dlsQuery = Strings.toString(MediaTypeRegistry.JSON, dlsQuery); + return this; + } + public IndexPermission fls(String... fls) { this.fls = Arrays.asList(fls); return this; @@ -1023,6 +1064,52 @@ public void updateInternalUsersConfiguration(Client client, List users) { updateConfigInIndex(client, CType.INTERNALUSERS, userMap); } + public SecurityDynamicConfiguration getSecurityConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson( + singleEntryConfigToJson(CType.CONFIG, CType.CONFIG.toLCString(), config), + CType.CONFIG, + 2, + 0, + 0 + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public SecurityDynamicConfiguration getInternalUserConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson(configToJson(CType.INTERNALUSERS, internalUsers), CType.INTERNALUSERS, 2, 0, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public SecurityDynamicConfiguration getRolesConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson(configToJson(CType.ROLES, roles), CType.ROLES, 2, 0, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public SecurityDynamicConfiguration getRoleMappingsConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson(configToJson(CType.ROLESMAPPING, rolesMapping), CType.ROLESMAPPING, 2, 0, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public SecurityDynamicConfiguration geActionGroupsConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson(configToJson(CType.ACTIONGROUPS, actionGroups), CType.ACTIONGROUPS, 2, 0, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + static String hashPassword(final String clearTextPassword) { return passwordHasher.hash(clearTextPassword.toCharArray()); } @@ -1095,25 +1182,30 @@ private static String configToJson(CType configType, Map configType, String configurationRoot, ToXContentObject config) + throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + + builder.startObject(); + builder.startObject("_meta"); + builder.field("type", configType.toLCString()); + builder.field("config_version", 2); + builder.endObject(); + + builder.field(configurationRoot, config); + + builder.endObject(); + + return builder.toString(); + } + private void writeSingleEntryConfigToIndex(Client client, CType configType, ToXContentObject config) { writeSingleEntryConfigToIndex(client, configType, configType.toLCString(), config); } private void writeSingleEntryConfigToIndex(Client client, CType configType, String configurationRoot, ToXContentObject config) { try { - XContentBuilder builder = XContentFactory.jsonBuilder(); - - builder.startObject(); - builder.startObject("_meta"); - builder.field("type", configType.toLCString()); - builder.field("config_version", 2); - builder.endObject(); - - builder.field(configurationRoot, config); - - builder.endObject(); - - String json = builder.toString(); + String json = singleEntryConfigToJson(configType, configurationRoot, config); log.info("Writing security plugin configuration into index " + configType + ":\n" + json); diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java index d2c53c1de7..5ae8c0b125 100644 --- a/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java +++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java @@ -31,6 +31,7 @@ import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -446,6 +447,14 @@ public Builder indices(TestIndex... indices) { return this; } + /** + * Specifies test indices that shall be created upon startup of the cluster. + */ + public Builder indices(Collection indices) { + this.testIndices.addAll(indices); + return this; + } + public Builder users(TestSecurityConfig.User... users) { for (TestSecurityConfig.User user : users) { testSecurityConfig.user(user); diff --git a/src/main/java/org/opensearch/security/OpenSearchSecurityPlugin.java b/src/main/java/org/opensearch/security/OpenSearchSecurityPlugin.java index 80df34cb83..206a356fab 100644 --- a/src/main/java/org/opensearch/security/OpenSearchSecurityPlugin.java +++ b/src/main/java/org/opensearch/security/OpenSearchSecurityPlugin.java @@ -41,7 +41,6 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -151,7 +150,6 @@ import org.opensearch.security.configuration.DlsFlsRequestValve; import org.opensearch.security.configuration.DlsFlsValveImpl; import org.opensearch.security.configuration.PrivilegesInterceptorImpl; -import org.opensearch.security.configuration.Salt; import org.opensearch.security.configuration.SecurityFlsDlsIndexSearcherWrapper; import org.opensearch.security.dlic.rest.api.Endpoint; import org.opensearch.security.dlic.rest.api.SecurityRestApiActions; @@ -166,9 +164,12 @@ import org.opensearch.security.http.XFFResolver; import org.opensearch.security.identity.NoopPluginSubject; import org.opensearch.security.identity.SecurityTokenManager; +import org.opensearch.security.privileges.ActionPrivileges; +import org.opensearch.security.privileges.PrivilegesEvaluationException; import org.opensearch.security.privileges.PrivilegesEvaluator; import org.opensearch.security.privileges.PrivilegesInterceptor; import org.opensearch.security.privileges.RestLayerPrivilegesEvaluator; +import org.opensearch.security.privileges.dlsfls.DlsFlsBaseContext; import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.rest.DashboardsInfoAction; import org.opensearch.security.rest.SecurityConfigUpdateAction; @@ -177,6 +178,7 @@ import org.opensearch.security.rest.SecurityWhoAmIAction; import org.opensearch.security.rest.TenantInfoAction; import org.opensearch.security.securityconf.DynamicConfigFactory; +import org.opensearch.security.securityconf.impl.CType; import org.opensearch.security.setting.OpensearchDynamicSetting; import org.opensearch.security.setting.TransportPassiveAuthSetting; import org.opensearch.security.ssl.ExternalSecurityKeyStore; @@ -193,8 +195,6 @@ import org.opensearch.security.support.ModuleInfo; import org.opensearch.security.support.ReflectionHelper; import org.opensearch.security.support.SecuritySettings; -import org.opensearch.security.support.SecurityUtils; -import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.transport.DefaultInterClusterRequestEvaluator; import org.opensearch.security.transport.InterClusterRequestEvaluator; import org.opensearch.security.transport.SecurityInterceptor; @@ -262,9 +262,9 @@ public final class OpenSearchSecurityPlugin extends OpenSearchSecuritySSLPlugin private volatile IndexResolverReplacer irr; private final AtomicReference namedXContentRegistry = new AtomicReference<>(NamedXContentRegistry.EMPTY);; private volatile DlsFlsRequestValve dlsFlsValve = null; - private volatile Salt salt; private volatile OpensearchDynamicSetting transportPassiveAuthSetting; private volatile PasswordHasher passwordHasher; + private volatile DlsFlsBaseContext dlsFlsBaseContext; public static boolean isActionTraceEnabled() { @@ -701,7 +701,8 @@ public void onIndexModule(IndexModule indexModule) { auditLog, ciol, evaluator, - salt + dlsFlsValve::getCurrentConfig, + dlsFlsBaseContext ) ); indexModule.forceQueryCacheProvider((indexSettings, nodeCache) -> new QueryCache() { @@ -723,28 +724,18 @@ public void clear(String reason) { @Override public Weight doCache(Weight weight, QueryCachingPolicy policy) { - @SuppressWarnings("unchecked") - final Map> allowedFlsFields = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER - ); - - if (SecurityUtils.evalMap(allowedFlsFields, index().getName()) != null) { - return weight; - } else { - @SuppressWarnings("unchecked") - final Map> maskedFieldsMap = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER - ); - - if (SecurityUtils.evalMap(maskedFieldsMap, index().getName()) != null) { + try { + if (dlsFlsValve.hasFlsOrFieldMasking(index().getName())) { + // Do not cache return weight; } else { return nodeCache.doCache(weight, policy); } + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating FLS configuration", e); + // We fall back to no caching + return weight; } - } }); @@ -817,17 +808,16 @@ public void onQueryPhase(SearchContext searchContext, long tookInNanos) { return; } - @SuppressWarnings("unchecked") - final Map> maskedFieldsMap = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER - ); - final String maskedEval = SecurityUtils.evalMap(maskedFieldsMap, indexModule.getIndex().getName()); - if (maskedEval != null) { - final Set mf = maskedFieldsMap.get(maskedEval); - if (mf != null && !mf.isEmpty()) { + try { + if (dlsFlsValve.hasFieldMasking(indexModule.getIndex().getName())) { dlsFlsValve.onQueryPhase(queryResult); } + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating field masking config", e); + // It is safe to call the code nevertheless, as this code does not enforce any privileges. + // Rather, it performs some fixes to keep aggregations happy after field masking has been + // applied. If no field masking has been applied, this should be a no-op. + dlsFlsValve.onQueryPhase(queryResult); } } }.toListener()); @@ -1061,10 +1051,9 @@ public Collection createComponents( final ClusterInfoHolder cih = new ClusterInfoHolder(this.cs.getClusterName().value()); this.cs.addListener(cih); - this.salt = Salt.from(settings); final IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(threadPool.getThreadContext()); - irr = new IndexResolverReplacer(resolver, clusterService, cih); + irr = new IndexResolverReplacer(resolver, clusterService::state, cih); final String DEFAULT_INTERCLUSTER_REQUEST_EVALUATOR_CLASS = DefaultInterClusterRequestEvaluator.class.getName(); InterClusterRequestEvaluator interClusterRequestEvaluator = new DefaultInterClusterRequestEvaluator(settings); @@ -1082,18 +1071,9 @@ public Collection createComponents( namedXContentRegistry.set(xContentRegistry); if (SSLConfig.isSslOnlyMode()) { - dlsFlsValve = new DlsFlsRequestValve.NoopDlsFlsRequestValve(); auditLog = new NullAuditLog(); privilegesInterceptor = new PrivilegesInterceptor(resolver, clusterService, localClient, threadPool); } else { - dlsFlsValve = new DlsFlsValveImpl( - settings, - localClient, - clusterService, - resolver, - xContentRegistry, - threadPool.getThreadContext() - ); auditLog = new AuditLogImpl(settings, configPath, localClient, threadPool, resolver, clusterService, environment); privilegesInterceptor = new PrivilegesInterceptorImpl(resolver, clusterService, localClient, threadPool); } @@ -1116,7 +1096,9 @@ public Collection createComponents( evaluator = new PrivilegesEvaluator( clusterService, + clusterService::state, threadPool, + threadPool.getThreadContext(), cr, resolver, auditLog, @@ -1127,6 +1109,23 @@ public Collection createComponents( namedXContentRegistry.get() ); + dlsFlsBaseContext = new DlsFlsBaseContext(evaluator, threadPool.getThreadContext(), adminDns); + + if (SSLConfig.isSslOnlyMode()) { + dlsFlsValve = new DlsFlsRequestValve.NoopDlsFlsRequestValve(); + } else { + dlsFlsValve = new DlsFlsValveImpl( + settings, + localClient, + clusterService, + resolver, + xContentRegistry, + threadPool, + dlsFlsBaseContext + ); + cr.subscribeOnChange(configMap -> { ((DlsFlsValveImpl) dlsFlsValve).updateConfiguration(cr.getConfiguration(CType.ROLES)); }); + } + sf = new SecurityFilter(settings, evaluator, adminDns, dlsFlsValve, auditLog, threadPool, cs, compatConfig, irr, xffResolver); final String principalExtractorClass = settings.get(SSLConfigConstants.SECURITY_SSL_TRANSPORT_PRINCIPAL_EXTRACTOR_CLASS, null); @@ -1137,7 +1136,7 @@ public Collection createComponents( principalExtractor = ReflectionHelper.instantiatePrincipalExtractor(principalExtractorClass); } - restLayerEvaluator = new RestLayerPrivilegesEvaluator(clusterService, threadPool); + restLayerEvaluator = new RestLayerPrivilegesEvaluator(evaluator); securityRestHandler = new SecurityRestFilter( backendRegistry, @@ -1156,16 +1155,12 @@ public Collection createComponents( dcf.registerDCFListener(irr); dcf.registerDCFListener(xffResolver); dcf.registerDCFListener(evaluator); - dcf.registerDCFListener(restLayerEvaluator); dcf.registerDCFListener(securityRestHandler); dcf.registerDCFListener(tokenManager); if (!(auditLog instanceof NullAuditLog)) { // Don't register if advanced modules is disabled in which case auditlog is instance of NullAuditLog dcf.registerDCFListener(auditLog); } - if (dlsFlsValve instanceof DlsFlsValveImpl) { - dcf.registerDCFListener(dlsFlsValve); - } cr.setDynamicConfigFactory(dcf); @@ -2042,6 +2037,9 @@ public List> getSettings() { Property.Filtered ) ); + + // Privileges evaluation + settings.add(ActionPrivileges.PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE); } return settings; @@ -2088,43 +2086,18 @@ public Collection> getGuiceServiceClasses() @Override public Function> getFieldFilter() { return index -> { - if (threadPool == null) { + if (threadPool == null || dlsFlsValve == null) { return field -> true; } - @SuppressWarnings("unchecked") - final Map> allowedFlsFields = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER - ); - - final String eval = SecurityUtils.evalMap(allowedFlsFields, index); - - if (eval == null) { - return field -> true; - } else { - final Set includesExcludes = allowedFlsFields.get(eval); - final Set includesSet = new HashSet<>(includesExcludes.size()); - final Set excludesSet = new HashSet<>(includesExcludes.size()); - - for (final String incExc : includesExcludes) { - final char firstChar = incExc.charAt(0); - - if (firstChar == '!' || firstChar == '~') { - excludesSet.add(incExc.substring(1)); - } else { - includesSet.add(incExc); - } - } - - if (!excludesSet.isEmpty()) { - WildcardMatcher excludeMatcher = WildcardMatcher.from(excludesSet); - return field -> !excludeMatcher.test(handleKeyword(field)); - } else { - WildcardMatcher includeMatcher = WildcardMatcher.from(includesSet); - return field -> includeMatcher.test(handleKeyword(field)); + return field -> { + try { + return dlsFlsValve.isFieldAllowed(index, field); + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating FLS for {}.{}", index, field, e); + return false; } - } + }; }; } @@ -2138,13 +2111,6 @@ public Collection getSystemIndexDescriptors(Settings sett return Collections.singletonList(systemIndexDescriptor); } - private static String handleKeyword(final String field) { - if (field != null && field.endsWith(KEYWORD)) { - return field.substring(0, field.length() - KEYWORD.length()); - } - return field; - } - @Override public Subject getCurrentSubject() { // Not supported diff --git a/src/main/java/org/opensearch/security/compliance/FieldReadCallback.java b/src/main/java/org/opensearch/security/compliance/FieldReadCallback.java index bcbf73ee09..60368afd8c 100644 --- a/src/main/java/org/opensearch/security/compliance/FieldReadCallback.java +++ b/src/main/java/org/opensearch/security/compliance/FieldReadCallback.java @@ -32,10 +32,10 @@ import org.opensearch.index.mapper.Uid; import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.dlic.rest.support.Utils; +import org.opensearch.security.privileges.dlsfls.FieldMasking; import org.opensearch.security.support.HeaderHelper; import org.opensearch.security.support.JsonFlattener; import org.opensearch.security.support.SourceFieldsContext; -import org.opensearch.security.support.WildcardMatcher; //TODO We need to deal with caching!! //Currently we disable caching (and realtime requests) when FLS or DLS is applied @@ -49,7 +49,7 @@ public final class FieldReadCallback { // private final ThreadContext threadContext; // private final ClusterService clusterService; private final Index index; - private final WildcardMatcher maskedFieldsMatcher; + private final FieldMasking.FieldMaskingRule fmRule; private final AuditLog auditLog; private Function, Map> filterFunction; private SourceFieldsContext sfc; @@ -61,7 +61,7 @@ public FieldReadCallback( final IndexService indexService, final ClusterService clusterService, final AuditLog auditLog, - final WildcardMatcher maskedFieldsMatcher, + final FieldMasking.FieldMaskingRule fmRule, ShardId shardId ) { super(); @@ -69,7 +69,7 @@ public FieldReadCallback( // this.clusterService = Objects.requireNonNull(clusterService); this.index = Objects.requireNonNull(indexService).index(); this.auditLog = auditLog; - this.maskedFieldsMatcher = maskedFieldsMatcher; + this.fmRule = fmRule; this.shardId = shardId; try { sfc = (SourceFieldsContext) HeaderHelper.deserializeSafeFromHeader(threadContext, "_opendistro_security_source_field_context"); @@ -88,7 +88,8 @@ public FieldReadCallback( } private boolean recordField(final String fieldName, boolean isStringField) { - return !(isStringField && maskedFieldsMatcher.test(fieldName)) + // We do not record fields in read history if they are masked. + return !(isStringField && fmRule.isMasked(fieldName)) && auditLog.getComplianceConfig().readHistoryEnabledForField(index.getName(), fieldName); } diff --git a/src/main/java/org/opensearch/security/configuration/ConfigurationChangeListener.java b/src/main/java/org/opensearch/security/configuration/ConfigurationChangeListener.java index cc410c0158..761cc989d1 100644 --- a/src/main/java/org/opensearch/security/configuration/ConfigurationChangeListener.java +++ b/src/main/java/org/opensearch/security/configuration/ConfigurationChangeListener.java @@ -29,6 +29,7 @@ /** * Callback function on change particular configuration */ +@FunctionalInterface public interface ConfigurationChangeListener { /** diff --git a/src/main/java/org/opensearch/security/configuration/ConfigurationLoaderSecurity7.java b/src/main/java/org/opensearch/security/configuration/ConfigurationLoaderSecurity7.java index 571bb802db..19a11a86cb 100644 --- a/src/main/java/org/opensearch/security/configuration/ConfigurationLoaderSecurity7.java +++ b/src/main/java/org/opensearch/security/configuration/ConfigurationLoaderSecurity7.java @@ -139,7 +139,8 @@ public void singleFailure(Failure failure) { "Failure {} retrieving configuration for {} (index={})", failure == null ? null : failure.getMessage(), Arrays.toString(events), - securityIndex + securityIndex, + failure.getFailure() ); } diff --git a/src/main/java/org/opensearch/security/configuration/ConfigurationRepository.java b/src/main/java/org/opensearch/security/configuration/ConfigurationRepository.java index f8fe70ca1a..84392018c1 100644 --- a/src/main/java/org/opensearch/security/configuration/ConfigurationRepository.java +++ b/src/main/java/org/opensearch/security/configuration/ConfigurationRepository.java @@ -567,7 +567,7 @@ public synchronized void subscribeOnChange(ConfigurationChangeListener listener) private synchronized void notifyAboutChanges(ConfigurationMap typeToConfig) { for (ConfigurationChangeListener listener : configurationChangedListener) { try { - LOGGER.debug("Notify {} listener about change configuration with type {}", listener); + LOGGER.debug("Notify {} listener about change configuration with type {}", listener, typeToConfig); listener.onChange(typeToConfig); } catch (Exception e) { LOGGER.error("{} listener errored: " + e, listener, e); diff --git a/src/main/java/org/opensearch/security/configuration/DlsFilterLevelActionHandler.java b/src/main/java/org/opensearch/security/configuration/DlsFilterLevelActionHandler.java index 59beae6861..08e06436aa 100644 --- a/src/main/java/org/opensearch/security/configuration/DlsFilterLevelActionHandler.java +++ b/src/main/java/org/opensearch/security/configuration/DlsFilterLevelActionHandler.java @@ -59,12 +59,13 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.security.privileges.DocumentAllowList; import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.dlsfls.DlsRestriction; +import org.opensearch.security.privileges.dlsfls.DocumentPrivileges; +import org.opensearch.security.privileges.dlsfls.IndexToRuleMap; import org.opensearch.security.queries.QueryBuilderTraverser; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.securityconf.EvaluatedDlsFlsConfig; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.ReflectiveAttributeAccessors; -import org.opensearch.security.support.SecurityUtils; public class DlsFilterLevelActionHandler { private static final Logger log = LogManager.getLogger(DlsFilterLevelActionHandler.class); @@ -76,13 +77,12 @@ public class DlsFilterLevelActionHandler { public static boolean handle( PrivilegesEvaluationContext context, - EvaluatedDlsFlsConfig evaluatedDlsFlsConfig, + IndexToRuleMap dlsRestrictionMap, ActionListener listener, Client nodeClient, ClusterService clusterService, IndicesService indicesService, IndexNameExpressionResolver resolver, - DlsQueryParser dlsQueryParser, ThreadContext threadContext ) { @@ -115,13 +115,12 @@ public static boolean handle( return new DlsFilterLevelActionHandler( context, - evaluatedDlsFlsConfig, + dlsRestrictionMap, listener, nodeClient, clusterService, indicesService, resolver, - dlsQueryParser, threadContext ).handle(); } @@ -129,11 +128,10 @@ public static boolean handle( private final String action; private final ActionRequest request; private final ActionListener listener; - private final EvaluatedDlsFlsConfig evaluatedDlsFlsConfig; + private final IndexToRuleMap dlsRestrictionMap; private final Resolved resolved; private final boolean requiresIndexScoping; private final Client nodeClient; - private final DlsQueryParser dlsQueryParser; private final ClusterService clusterService; private final IndicesService indicesService; private final ThreadContext threadContext; @@ -143,24 +141,22 @@ public static boolean handle( DlsFilterLevelActionHandler( PrivilegesEvaluationContext context, - EvaluatedDlsFlsConfig evaluatedDlsFlsConfig, + IndexToRuleMap dlsRestrictionMap, ActionListener listener, Client nodeClient, ClusterService clusterService, IndicesService indicesService, IndexNameExpressionResolver resolver, - DlsQueryParser dlsQueryParser, ThreadContext threadContext ) { this.action = context.getAction(); this.request = context.getRequest(); this.listener = listener; - this.evaluatedDlsFlsConfig = evaluatedDlsFlsConfig; + this.dlsRestrictionMap = dlsRestrictionMap; this.resolved = context.getResolvedRequest(); this.nodeClient = nodeClient; this.clusterService = clusterService; this.indicesService = indicesService; - this.dlsQueryParser = dlsQueryParser; this.threadContext = threadContext; this.resolver = resolver; @@ -174,7 +170,7 @@ private boolean handle() { threadContext.putHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE, request.toString()); try { - if (!createQueryExtension()) { + if (!modifyQuery()) { return true; } @@ -221,7 +217,7 @@ private boolean handle(SearchRequest searchRequest, StoredContext ctx) { if (localClusterAlias != null) { try { - createQueryExtension(localClusterAlias); + modifyQuery(localClusterAlias); } catch (Exception e) { log.error("Unable to handle filter level DLS", e); listener.onFailure(new OpenSearchSecurityException("Unable to handle filter level DLS", e)); @@ -459,12 +455,12 @@ private GetResult searchHitToGetResult(SearchHit hit) { ); } - private boolean createQueryExtension() throws IOException { - return createQueryExtension(null); + private boolean modifyQuery() throws IOException { + return modifyQuery(null); } - private boolean createQueryExtension(String localClusterAlias) throws IOException { - Map> filterLevelQueries = evaluatedDlsFlsConfig.getDlsQueriesByIndex(); + private boolean modifyQuery(String localClusterAlias) throws IOException { + Map filterLevelQueries = dlsRestrictionMap.getIndexMap(); BoolQueryBuilder dlsQueryBuilder = QueryBuilders.boolQuery().minimumShouldMatch(1); DocumentAllowList documentAllowlist = new DocumentAllowList(); @@ -474,8 +470,6 @@ private boolean createQueryExtension(String localClusterAlias) throws IOExceptio Set indices = resolved.getAllIndicesResolved(clusterService, resolver); for (String index : indices) { - String dlsEval = SecurityUtils.evalMap(filterLevelQueries, index); - String prefixedIndex; if (localClusterAlias != null) { @@ -484,18 +478,9 @@ private boolean createQueryExtension(String localClusterAlias) throws IOExceptio prefixedIndex = index; } - if (dlsEval == null) { - if (requiresIndexScoping) { - // This index has no DLS configured, thus it is unrestricted. - // To allow the index in a complex query, we need to add the query below to let the index pass. - dlsQueryBuilder.should(QueryBuilders.termQuery("_index", prefixedIndex)); - } - continue; - } - - Set unparsedDlsQueries = filterLevelQueries.get(dlsEval); + DlsRestriction dlsRestriction = filterLevelQueries.get(index); - if (unparsedDlsQueries == null || unparsedDlsQueries.isEmpty()) { + if (dlsRestriction == null || dlsRestriction.isUnrestricted()) { if (requiresIndexScoping) { // This index has no DLS configured, thus it is unrestricted. // To allow the index in a complex query, we need to add the query below to let the index pass. @@ -504,22 +489,22 @@ private boolean createQueryExtension(String localClusterAlias) throws IOExceptio continue; } - for (String unparsedDlsQuery : unparsedDlsQueries) { + for (DocumentPrivileges.RenderedDlsQuery parsedDlsQuery : dlsRestriction.getQueries()) { queryCount++; - QueryBuilder parsedDlsQuery = dlsQueryParser.parse(unparsedDlsQuery); - if (!requiresIndexScoping) { - dlsQueryBuilder.should(parsedDlsQuery); + dlsQueryBuilder.should(parsedDlsQuery.getQueryBuilder()); } else { // The original request referred to several indices. That's why we have to scope each query to the index it is meant for dlsQueryBuilder.should( - QueryBuilders.boolQuery().must(QueryBuilders.termQuery("_index", prefixedIndex)).must(parsedDlsQuery) + QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("_index", prefixedIndex)) + .must(parsedDlsQuery.getQueryBuilder()) ); } Set queryBuilders = QueryBuilderTraverser.findAll( - parsedDlsQuery, + parsedDlsQuery.getQueryBuilder(), (q) -> (q instanceof TermsQueryBuilder) && ((TermsQueryBuilder) q).termsLookup() != null ); diff --git a/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java b/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java index b85542393b..bddf4731bb 100644 --- a/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java +++ b/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java @@ -17,18 +17,11 @@ //https://github.com/salyh/elasticsearch-security-plugin/blob/4b53974a43b270ae77ebe79d635e2484230c9d01/src/main/java/org/elasticsearch/plugins/security/filter/DlsWriteFilter.java import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.ListIterator; -import java.util.Map; -import java.util.Optional; import java.util.Set; -import java.util.function.Function; -import com.google.common.base.Joiner; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.BinaryDocValues; @@ -65,157 +58,73 @@ import org.opensearch.ExceptionsHelper; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.collect.Tuple; import org.opensearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.common.xcontent.support.XContentMapValues; -import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexService; import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.compliance.ComplianceConfig; import org.opensearch.security.compliance.FieldReadCallback; -import org.opensearch.security.dlic.rest.support.Utils; +import org.opensearch.security.privileges.dlsfls.FieldMasking; +import org.opensearch.security.privileges.dlsfls.FieldPrivileges; +import org.opensearch.security.privileges.dlsfls.FlsStoredFieldVisitor; import org.opensearch.security.support.ConfigConstants; -import org.opensearch.security.support.HeaderHelper; -import org.opensearch.security.support.MapUtils; -import org.opensearch.security.support.SecurityUtils; -import org.opensearch.security.support.WildcardMatcher; class DlsFlsFilterLeafReader extends SequentialStoredFieldsLeafReader { private static final String KEYWORD = ".keyword"; - private static final String[] EMPTY_STRING_ARRAY = new String[0]; - private final Set includesSet; - private final Set excludesSet; private final FieldInfos flsFieldInfos; - private final boolean flsEnabled; - private String[] includes; - private String[] excludes; - private boolean canOptimize = true; - private Function, Map> filterFunction; private final IndexService indexService; private final ThreadContext threadContext; private final ClusterService clusterService; private final AuditLog auditlog; - private final MaskedFieldsMap maskedFieldsMap; private final ShardId shardId; - private final boolean maskFields; - private final Salt salt; - private final String maskingAlgorithmDefault; + private final FieldPrivileges.FlsRule flsRule; + private final FieldMasking.FieldMaskingRule fmRule; + private final Set metaFields; private DlsGetEvaluator dge = null; DlsFlsFilterLeafReader( final LeafReader delegate, - final Set includesExcludes, + final FieldPrivileges.FlsRule flsRule, final Query dlsQuery, final IndexService indexService, final ThreadContext threadContext, final ClusterService clusterService, final AuditLog auditlog, - final Set maskedFields, + final FieldMasking.FieldMaskingRule fmRule, final ShardId shardId, - final Salt salt + final Set metaFields ) { super(delegate); - maskFields = (maskedFields != null && maskedFields.size() > 0); - this.indexService = indexService; this.threadContext = threadContext; this.clusterService = clusterService; this.auditlog = auditlog; - this.salt = salt; - this.maskingAlgorithmDefault = clusterService.getSettings().get(ConfigConstants.SECURITY_MASKED_FIELDS_ALGORITHM_DEFAULT); - this.maskedFieldsMap = MaskedFieldsMap.extractMaskedFields(maskFields, maskedFields, salt, maskingAlgorithmDefault); this.shardId = shardId; - flsEnabled = includesExcludes != null && !includesExcludes.isEmpty(); - - if (flsEnabled) { - - final FieldInfos infos = delegate.getFieldInfos(); - this.includesSet = new HashSet<>(includesExcludes.size()); - this.excludesSet = new HashSet<>(includesExcludes.size()); - - for (final String incExc : includesExcludes) { - if (canOptimize && (incExc.indexOf('.') > -1 || incExc.indexOf('*') > -1)) { - canOptimize = false; - } - - final char firstChar = incExc.charAt(0); - - if (firstChar == '!' || firstChar == '~') { - excludesSet.add(incExc.substring(1)); - excludesSet.add(incExc.substring(1) + KEYWORD); - } else { - includesSet.add(incExc); - } - } - - int i = 0; - final FieldInfo[] fa = new FieldInfo[infos.size()]; - - if (canOptimize) { - if (!excludesSet.isEmpty()) { - for (final FieldInfo info : infos) { - if (!excludesSet.contains(info.name)) { - fa[i++] = info; - } - } - } else { - for (final String inc : includesSet) { - FieldInfo f; - if ((f = infos.fieldInfo(inc)) != null) { - fa[i++] = f; - } - } - } - } else { - if (!excludesSet.isEmpty()) { - WildcardMatcher matcher = WildcardMatcher.from(excludesSet); - for (final FieldInfo info : infos) { - if (!matcher.test(info.name)) { - fa[i++] = info; - } - } + this.flsRule = flsRule; + this.fmRule = fmRule; + this.metaFields = metaFields; - this.excludes = excludesSet.toArray(EMPTY_STRING_ARRAY); + try { + if (!flsRule.isAllowAll()) { + FieldInfos originalFieldInfos = delegate.getFieldInfos(); + List restrictedFieldInfos = new ArrayList<>(originalFieldInfos.size()); - } else { - WildcardMatcher matcher = WildcardMatcher.from(includesSet); - for (final FieldInfo info : infos) { - if (matcher.test(info.name)) { - fa[i++] = info; - } + for (FieldInfo fieldInfo : originalFieldInfos) { + if (metaFields.contains(fieldInfo.name) || flsRule.isAllowed(fieldInfo.name)) { + restrictedFieldInfos.add(fieldInfo); } - - this.includes = includesSet.toArray(EMPTY_STRING_ARRAY); } - if (!excludesSet.isEmpty()) { - filterFunction = XContentMapValues.filter(null, excludes); - } else { - filterFunction = XContentMapValues.filter(includes, null); - } + this.flsFieldInfos = new FieldInfos(restrictedFieldInfos.toArray(new FieldInfo[restrictedFieldInfos.size()])); + } else { + this.flsFieldInfos = delegate.getFieldInfos(); } - final FieldInfo[] tmp = new FieldInfo[i]; - System.arraycopy(fa, 0, tmp, 0, i); - this.flsFieldInfos = new FieldInfos(tmp); - - } else { - this.includesSet = null; - this.excludesSet = null; - this.flsFieldInfos = null; - } - - try { dge = new DlsGetEvaluator(dlsQuery, in, applyDlsHere()); } catch (IOException e) { throw ExceptionsHelper.convertToOpenSearchException(e); @@ -287,91 +196,53 @@ public boolean hasDeletions() { } } - private static class MaskedFieldsMap { - private final Map maskedFieldsMap; - - private MaskedFieldsMap(Map maskedFieldsMap) { - this.maskedFieldsMap = maskedFieldsMap; - } - - public static MaskedFieldsMap extractMaskedFields( - boolean maskFields, - Set maskedFields, - final Salt salt, - String algorithmDefault - ) { - if (maskFields) { - return new MaskedFieldsMap( - maskedFields.stream() - .map(mf -> new MaskedField(mf, salt, algorithmDefault)) - .collect(ImmutableMap.toImmutableMap(mf -> WildcardMatcher.from(mf.getName()), Function.identity())) - ); - } else { - return new MaskedFieldsMap(Collections.emptyMap()); - } - } - - public Optional getMaskedField(String fieldName) { - return maskedFieldsMap.entrySet().stream().filter(entry -> entry.getKey().test(fieldName)).map(Map.Entry::getValue).findFirst(); - } - - public boolean anyMatch(String fieldName) { - return maskedFieldsMap.keySet().stream().anyMatch(m -> m.test(fieldName)); - } - - public WildcardMatcher getMatcher() { - return WildcardMatcher.from(maskedFieldsMap.keySet()); - } - - } - private static class DlsFlsSubReaderWrapper extends FilterDirectoryReader.SubReaderWrapper { - private final Set includes; + private final FieldPrivileges.FlsRule flsRule; private final Query dlsQuery; private final IndexService indexService; private final ThreadContext threadContext; private final ClusterService clusterService; private final AuditLog auditlog; - private final Set maskedFields; + private final FieldMasking.FieldMaskingRule fmRule; private final ShardId shardId; - private final Salt salt; + private final Set metaFields; public DlsFlsSubReaderWrapper( - final Set includes, + final FieldPrivileges.FlsRule flsRule, final Query dlsQuery, final IndexService indexService, final ThreadContext threadContext, final ClusterService clusterService, final AuditLog auditlog, - final Set maskedFields, + final FieldMasking.FieldMaskingRule fmRule, ShardId shardId, - final Salt salt + final Set metaFields ) { - this.includes = includes; + this.flsRule = flsRule; this.dlsQuery = dlsQuery; this.indexService = indexService; this.threadContext = threadContext; this.clusterService = clusterService; this.auditlog = auditlog; - this.maskedFields = maskedFields; + this.fmRule = fmRule; this.shardId = shardId; - this.salt = salt; + this.metaFields = metaFields; } @Override public LeafReader wrap(final LeafReader reader) { return new DlsFlsFilterLeafReader( reader, - includes, + flsRule, dlsQuery, indexService, threadContext, clusterService, auditlog, - maskedFields, + fmRule, shardId, - salt + metaFields ); } @@ -379,66 +250,66 @@ public LeafReader wrap(final LeafReader reader) { static class DlsFlsDirectoryReader extends FilterDirectoryReader { - private final Set includes; + private final FieldPrivileges.FlsRule flsRule; private final Query dlsQuery; private final IndexService indexService; private final ThreadContext threadContext; private final ClusterService clusterService; private final AuditLog auditlog; - private final Set maskedFields; + private final FieldMasking.FieldMaskingRule fmRule; private final ShardId shardId; - private final Salt salt; + private final Set metaFields; public DlsFlsDirectoryReader( final DirectoryReader in, - final Set includes, + final FieldPrivileges.FlsRule flsRule, final Query dlsQuery, final IndexService indexService, final ThreadContext threadContext, final ClusterService clusterService, final AuditLog auditlog, - final Set maskedFields, + final FieldMasking.FieldMaskingRule fmRule, ShardId shardId, - final Salt salt + final Set metaFields ) throws IOException { super( in, new DlsFlsSubReaderWrapper( - includes, + flsRule, dlsQuery, indexService, threadContext, clusterService, auditlog, - maskedFields, + fmRule, shardId, - salt + metaFields ) ); - this.includes = includes; + this.flsRule = flsRule; this.dlsQuery = dlsQuery; this.indexService = indexService; this.threadContext = threadContext; this.clusterService = clusterService; this.auditlog = auditlog; - this.maskedFields = maskedFields; + this.fmRule = fmRule; this.shardId = shardId; - this.salt = salt; + this.metaFields = metaFields; } @Override protected DirectoryReader doWrapDirectoryReader(final DirectoryReader in) throws IOException { return new DlsFlsDirectoryReader( in, - includes, + flsRule, dlsQuery, indexService, threadContext, clusterService, auditlog, - maskedFields, + fmRule, shardId, - salt + metaFields ); } @@ -509,21 +380,15 @@ private StoredFieldVisitor getDlsFlsVisitor(StoredFieldVisitor visitor) { if (complianceConfig != null && complianceConfig.readHistoryEnabledForIndex(indexService.index().getName())) { visitor = new ComplianceAwareStoredFieldVisitor(visitor); } - if (maskFields) { - visitor = new HashingStoredFieldVisitor(visitor); - } - if (flsEnabled) { - visitor = new FlsStoredFieldVisitor(visitor); + if (!flsRule.isAllowAll() || !fmRule.isAllowAll()) { + visitor = new FlsStoredFieldVisitor(visitor, flsRule, fmRule, metaFields); } return visitor; } private void finishVisitor(StoredFieldVisitor visitor) { if (visitor instanceof FlsStoredFieldVisitor) { - visitor = ((FlsStoredFieldVisitor) visitor).delegate; - } - if (visitor instanceof HashingStoredFieldVisitor) { - visitor = ((HashingStoredFieldVisitor) visitor).delegate; + visitor = ((FlsStoredFieldVisitor) visitor).delegate(); } if (visitor instanceof ComplianceAwareStoredFieldVisitor) { ((ComplianceAwareStoredFieldVisitor) visitor).finished(); @@ -540,23 +405,18 @@ public void document(final int docID, StoredFieldVisitor visitor) throws IOExcep } } - private boolean isFls(final BytesRef termAsFiledName) { - return isFls(termAsFiledName.utf8ToString()); + private boolean isAllowed(BytesRef term) { + return isAllowed(term.utf8ToString()); } - private boolean isFls(final String name) { - - if (!flsEnabled) { - return true; - } - - return flsFieldInfos.fieldInfo(name) != null; + private boolean isAllowed(String fieldName) { + return this.metaFields.contains(fieldName) || flsRule.isAllowed(fieldName); } @Override public FieldInfos getFieldInfos() { - if (!flsEnabled) { + if (flsRule.isAllowAll()) { return in.getFieldInfos(); } @@ -571,7 +431,7 @@ private class ComplianceAwareStoredFieldVisitor extends StoredFieldVisitor { indexService, clusterService, auditlog, - maskedFieldsMap.getMatcher(), + fmRule, shardId ); @@ -637,207 +497,11 @@ public void finished() { } - private class FlsStoredFieldVisitor extends StoredFieldVisitor { - - private final StoredFieldVisitor delegate; - - public FlsStoredFieldVisitor(final StoredFieldVisitor delegate) { - super(); - this.delegate = delegate; - } - - @Override - public void binaryField(final FieldInfo fieldInfo, final byte[] value) throws IOException { - - if (fieldInfo.name.equals("_source")) { - Map filteredSource = Utils.byteArrayToMutableJsonMap(value); - - if (!canOptimize) { - filteredSource = filterFunction.apply(filteredSource); - } else { - if (!excludesSet.isEmpty()) { - filteredSource.keySet().removeAll(excludesSet); - } else { - filteredSource.keySet().retainAll(includesSet); - } - } - - delegate.binaryField(fieldInfo, Utils.jsonMapToByteArray(filteredSource)); - } else if (shouldInclude(fieldInfo.name)) { - delegate.binaryField(fieldInfo, value); - } - } - - private boolean shouldInclude(String field) { - if (excludesSet != null && !excludesSet.isEmpty()) { - return !excludesSet.contains(field); - } else if (includesSet != null && !includesSet.isEmpty()) { - return includesSet.contains(field); - } - return true; - } - - @Override - public Status needsField(final FieldInfo fieldInfo) throws IOException { - return isFls(fieldInfo.name) ? delegate.needsField(fieldInfo) : Status.NO; - } - - @Override - public int hashCode() { - return delegate.hashCode(); - } - - @Override - public void intField(final FieldInfo fieldInfo, final int value) throws IOException { - delegate.intField(fieldInfo, value); - } - - @Override - public void longField(final FieldInfo fieldInfo, final long value) throws IOException { - delegate.longField(fieldInfo, value); - } - - @Override - public void floatField(final FieldInfo fieldInfo, final float value) throws IOException { - delegate.floatField(fieldInfo, value); - } - - @Override - public void doubleField(final FieldInfo fieldInfo, final double value) throws IOException { - delegate.doubleField(fieldInfo, value); - } - - @Override - public boolean equals(final Object obj) { - return delegate.equals(obj); - } - - @Override - public String toString() { - return delegate.toString(); - } - } - - private class HashingStoredFieldVisitor extends StoredFieldVisitor { - - private final StoredFieldVisitor delegate; - - public HashingStoredFieldVisitor(final StoredFieldVisitor delegate) { - super(); - this.delegate = delegate; - } - - @Override - public void binaryField(final FieldInfo fieldInfo, final byte[] value) throws IOException { - - if (fieldInfo.name.equals("_source")) { - final BytesReference bytesRef = new BytesArray(value); - final Tuple> bytesRefTuple = XContentHelper.convertToMap( - bytesRef, - false, - XContentType.JSON - ); - Map filteredSource = bytesRefTuple.v2(); - MapUtils.deepTraverseMap(filteredSource, HASH_CB); - final XContentBuilder xBuilder = XContentBuilder.builder(bytesRefTuple.v1().xContent()).map(filteredSource); - delegate.binaryField(fieldInfo, BytesReference.toBytes(BytesReference.bytes(xBuilder))); - } else { - final MaskedField mf = maskedFieldsMap.getMaskedField(fieldInfo.name).orElse(null); - if (mf != null) { - delegate.binaryField(fieldInfo, mf.mask(value)); - } else { - delegate.binaryField(fieldInfo, value); - } - } - } - - @Override - public Status needsField(final FieldInfo fieldInfo) throws IOException { - return delegate.needsField(fieldInfo); - } - - @Override - public int hashCode() { - return delegate.hashCode(); - } - - @Override - public void intField(final FieldInfo fieldInfo, final int value) throws IOException { - delegate.intField(fieldInfo, value); - } - - @Override - public void longField(final FieldInfo fieldInfo, final long value) throws IOException { - delegate.longField(fieldInfo, value); - } - - @Override - public void floatField(final FieldInfo fieldInfo, final float value) throws IOException { - delegate.floatField(fieldInfo, value); - } - - @Override - public void doubleField(final FieldInfo fieldInfo, final double value) throws IOException { - delegate.doubleField(fieldInfo, value); - } - - @Override - public boolean equals(final Object obj) { - return delegate.equals(obj); - } - - @Override - public String toString() { - return delegate.toString(); - } - } - - private final MapUtils.Callback HASH_CB = new HashingCallback(); - - private class HashingCallback implements MapUtils.Callback { - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Override - public void call(String key, Map map, List stack) { - Object v = map.get(key); - - if (v instanceof List) { - final String field = stack.isEmpty() ? key : Joiner.on('.').join(stack) + "." + key; - final MaskedField mf = maskedFieldsMap.getMaskedField(field).orElse(null); - if (mf != null) { - final List listField = (List) v; - for (ListIterator iterator = listField.listIterator(); iterator.hasNext();) { - final Object listFieldItem = iterator.next(); - - if (listFieldItem instanceof String) { - iterator.set(mf.mask(((String) listFieldItem))); - } else if (listFieldItem instanceof byte[]) { - iterator.set(mf.mask(((byte[]) listFieldItem))); - } - } - } - } - - if (v != null && (v instanceof String || v instanceof byte[])) { - - final String field = stack.isEmpty() ? key : Joiner.on('.').join(stack) + "." + key; - final MaskedField mf = maskedFieldsMap.getMaskedField(field).orElse(null); - if (mf != null) { - if (v instanceof String) { - map.replace(key, mf.mask(((String) v))); - } else { - map.replace(key, mf.mask(((byte[]) v))); - } - } - } - } - - } - @Override public Fields getTermVectors(final int docID) throws IOException { final Fields fields = in.getTermVectors(docID); - if (!flsEnabled || fields == null) { + if (flsRule.isAllowAll() || fields == null) { return fields; } @@ -845,13 +509,13 @@ public Fields getTermVectors(final int docID) throws IOException { @Override public Iterator iterator() { - return Iterators.filter(fields.iterator(), input -> isFls(input)); + return Iterators.filter(fields.iterator(), input -> isAllowed(input)); } @Override public Terms terms(final String field) throws IOException { - if (!isFls(field)) { + if (!isAllowed(field)) { return null; } @@ -869,232 +533,223 @@ public int size() { @Override public NumericDocValues getNumericDocValues(final String field) throws IOException { - return isFls(field) ? in.getNumericDocValues(field) : null; + return isAllowed(field) ? in.getNumericDocValues(field) : null; } @Override public BinaryDocValues getBinaryDocValues(final String field) throws IOException { - return isFls(field) ? wrapBinaryDocValues(field, in.getBinaryDocValues(field)) : null; + return isAllowed(field) ? wrapBinaryDocValues(field, in.getBinaryDocValues(field)) : null; } private BinaryDocValues wrapBinaryDocValues(final String field, final BinaryDocValues binaryDocValues) { + FieldMasking.FieldMaskingRule.Field fmRuleField = fmRule.get(field); - final MaskedFieldsMap maskedFieldsMap; - - if (binaryDocValues != null && ((maskedFieldsMap = getRuntimeMaskedFieldInfo()) != null)) { - final MaskedField mf = maskedFieldsMap.getMaskedField(handleKeyword(field)).orElse(null); + if (binaryDocValues == null || fmRuleField == null) { + return binaryDocValues; + } - if (mf != null) { - return new BinaryDocValues() { + return new BinaryDocValues() { - @Override - public int nextDoc() throws IOException { - return binaryDocValues.nextDoc(); - } + @Override + public int nextDoc() throws IOException { + return binaryDocValues.nextDoc(); + } - @Override - public int docID() { - return binaryDocValues.docID(); - } + @Override + public int docID() { + return binaryDocValues.docID(); + } - @Override - public long cost() { - return binaryDocValues.cost(); - } + @Override + public long cost() { + return binaryDocValues.cost(); + } - @Override - public int advance(int target) throws IOException { - return binaryDocValues.advance(target); - } + @Override + public int advance(int target) throws IOException { + return binaryDocValues.advance(target); + } - @Override - public boolean advanceExact(int target) throws IOException { - return binaryDocValues.advanceExact(target); - } + @Override + public boolean advanceExact(int target) throws IOException { + return binaryDocValues.advanceExact(target); + } - @Override - public BytesRef binaryValue() throws IOException { - return mf.mask(binaryDocValues.binaryValue()); - } - }; + @Override + public BytesRef binaryValue() throws IOException { + return fmRuleField.apply(binaryDocValues.binaryValue()); } - } - return binaryDocValues; + }; + } @Override public SortedDocValues getSortedDocValues(final String field) throws IOException { - return isFls(field) ? wrapSortedDocValues(field, in.getSortedDocValues(field)) : null; + return isAllowed(field) ? wrapSortedDocValues(field, in.getSortedDocValues(field)) : null; } private SortedDocValues wrapSortedDocValues(final String field, final SortedDocValues sortedDocValues) { + FieldMasking.FieldMaskingRule.Field fmRuleField = fmRule.get(field); - final MaskedFieldsMap maskedFieldsMap; - - if (sortedDocValues != null && (maskedFieldsMap = getRuntimeMaskedFieldInfo()) != null) { - final MaskedField mf = maskedFieldsMap.getMaskedField(handleKeyword(field)).orElse(null); + if (sortedDocValues == null || fmRuleField == null) { + return sortedDocValues; + } - if (mf != null) { - return new SortedDocValues() { + return new SortedDocValues() { - @Override - public int lookupTerm(BytesRef key) throws IOException { - return sortedDocValues.lookupTerm(key); - } + @Override + public int lookupTerm(BytesRef key) throws IOException { + return sortedDocValues.lookupTerm(key); + } - @Override - public TermsEnum termsEnum() throws IOException { - return new MaskedTermsEnum(sortedDocValues.termsEnum(), mf); - } + @Override + public TermsEnum termsEnum() throws IOException { + return new MaskedTermsEnum(sortedDocValues.termsEnum(), fmRuleField); + } - @Override - public TermsEnum intersect(CompiledAutomaton automaton) throws IOException { - return new MaskedTermsEnum(sortedDocValues.intersect(automaton), mf); - } + @Override + public TermsEnum intersect(CompiledAutomaton automaton) throws IOException { + return new MaskedTermsEnum(sortedDocValues.intersect(automaton), fmRuleField); + } - @Override - public int nextDoc() throws IOException { - return sortedDocValues.nextDoc(); - } + @Override + public int nextDoc() throws IOException { + return sortedDocValues.nextDoc(); + } - @Override - public int docID() { - return sortedDocValues.docID(); - } + @Override + public int docID() { + return sortedDocValues.docID(); + } - @Override - public long cost() { - return sortedDocValues.cost(); - } + @Override + public long cost() { + return sortedDocValues.cost(); + } - @Override - public int advance(int target) throws IOException { - return sortedDocValues.advance(target); - } + @Override + public int advance(int target) throws IOException { + return sortedDocValues.advance(target); + } - @Override - public boolean advanceExact(int target) throws IOException { - return sortedDocValues.advanceExact(target); - } + @Override + public boolean advanceExact(int target) throws IOException { + return sortedDocValues.advanceExact(target); + } - @Override - public int ordValue() throws IOException { - return sortedDocValues.ordValue(); - } + @Override + public int ordValue() throws IOException { + return sortedDocValues.ordValue(); + } - @Override - public BytesRef lookupOrd(int ord) throws IOException { - return mf.mask(sortedDocValues.lookupOrd(ord)); - } + @Override + public BytesRef lookupOrd(int ord) throws IOException { + return fmRuleField.apply(sortedDocValues.lookupOrd(ord)); + } - @Override - public int getValueCount() { - return sortedDocValues.getValueCount(); - } - }; + @Override + public int getValueCount() { + return sortedDocValues.getValueCount(); } - } - return sortedDocValues; + }; + } @Override public SortedNumericDocValues getSortedNumericDocValues(final String field) throws IOException { - return isFls(field) ? in.getSortedNumericDocValues(field) : null; + return isAllowed(field) ? in.getSortedNumericDocValues(field) : null; } @Override public SortedSetDocValues getSortedSetDocValues(final String field) throws IOException { - return isFls(field) ? wrapSortedSetDocValues(field, in.getSortedSetDocValues(field)) : null; + return isAllowed(field) ? wrapSortedSetDocValues(field, in.getSortedSetDocValues(field)) : null; } private SortedSetDocValues wrapSortedSetDocValues(final String field, final SortedSetDocValues sortedSetDocValues) { + FieldMasking.FieldMaskingRule.Field fmRuleField = fmRule.get(field); - final MaskedFieldsMap maskedFieldsMap; - - if (sortedSetDocValues != null && ((maskedFieldsMap = getRuntimeMaskedFieldInfo()) != null)) { - MaskedField mf = maskedFieldsMap.getMaskedField(handleKeyword(field)).orElse(null); + if (sortedSetDocValues == null || fmRuleField == null) { + return sortedSetDocValues; + } - if (mf != null) { - return new SortedSetDocValues() { + return new SortedSetDocValues() { - @Override - public long lookupTerm(BytesRef key) throws IOException { - return sortedSetDocValues.lookupTerm(key); - } + @Override + public long lookupTerm(BytesRef key) throws IOException { + return sortedSetDocValues.lookupTerm(key); + } - @Override - public TermsEnum termsEnum() throws IOException { - return new MaskedTermsEnum(sortedSetDocValues.termsEnum(), mf); - } + @Override + public TermsEnum termsEnum() throws IOException { + return new MaskedTermsEnum(sortedSetDocValues.termsEnum(), fmRuleField); + } - @Override - public TermsEnum intersect(CompiledAutomaton automaton) throws IOException { - return new MaskedTermsEnum(sortedSetDocValues.intersect(automaton), mf); - } + @Override + public TermsEnum intersect(CompiledAutomaton automaton) throws IOException { + return new MaskedTermsEnum(sortedSetDocValues.intersect(automaton), fmRuleField); + } - @Override - public int nextDoc() throws IOException { - return sortedSetDocValues.nextDoc(); - } + @Override + public int nextDoc() throws IOException { + return sortedSetDocValues.nextDoc(); + } - @Override - public int docID() { - return sortedSetDocValues.docID(); - } + @Override + public int docID() { + return sortedSetDocValues.docID(); + } - @Override - public long cost() { - return sortedSetDocValues.cost(); - } + @Override + public long cost() { + return sortedSetDocValues.cost(); + } - @Override - public int advance(int target) throws IOException { - return sortedSetDocValues.advance(target); - } + @Override + public int advance(int target) throws IOException { + return sortedSetDocValues.advance(target); + } - @Override - public boolean advanceExact(int target) throws IOException { - return sortedSetDocValues.advanceExact(target); - } + @Override + public boolean advanceExact(int target) throws IOException { + return sortedSetDocValues.advanceExact(target); + } - @Override - public long nextOrd() throws IOException { - return sortedSetDocValues.nextOrd(); - } + @Override + public long nextOrd() throws IOException { + return sortedSetDocValues.nextOrd(); + } - @Override - public int docValueCount() { - return sortedSetDocValues.docValueCount(); - } + @Override + public int docValueCount() { + return sortedSetDocValues.docValueCount(); + } - @Override - public BytesRef lookupOrd(long ord) throws IOException { - return mf.mask(sortedSetDocValues.lookupOrd(ord)); - } + @Override + public BytesRef lookupOrd(long ord) throws IOException { + return fmRuleField.apply(sortedSetDocValues.lookupOrd(ord)); + } - @Override - public long getValueCount() { - return sortedSetDocValues.getValueCount(); - } - }; + @Override + public long getValueCount() { + return sortedSetDocValues.getValueCount(); } - } - return sortedSetDocValues; + }; + } @Override public NumericDocValues getNormValues(final String field) throws IOException { - return isFls(field) ? in.getNormValues(field) : null; + return isAllowed(field) ? in.getNormValues(field) : null; } @Override public PointValues getPointValues(String field) throws IOException { - return isFls(field) ? in.getPointValues(field) : null; + return isAllowed(field) ? in.getPointValues(field) : null; } @Override public Terms terms(String field) throws IOException { - return isFls(field) ? wrapTerms(field, in.terms(field)) : null; + return isAllowed(field) ? wrapTerms(field, in.terms(field)) : null; } private Terms wrapTerms(final String field, Terms terms) throws IOException { @@ -1103,8 +758,7 @@ private Terms wrapTerms(final String field, Terms terms) throws IOException { return null; } - MaskedFieldsMap maskedFieldInfo = getRuntimeMaskedFieldInfo(); - if (maskedFieldInfo != null && maskedFieldInfo.anyMatch(handleKeyword(field))) { + if (fmRule.isMasked(field)) { return null; } @@ -1124,7 +778,7 @@ public BytesRef next() throws IOException { // wind forward in the sequence of terms until we reached the end or we find a allowed term(=field name) // so that calling this method never return a term which is not allowed by fls rules for (BytesRef nextBytesRef = in.next(); nextBytesRef != null; nextBytesRef = in.next()) { - if (!isFls((nextBytesRef))) { + if (!isAllowed((nextBytesRef))) { continue; } else { return nextBytesRef; @@ -1141,7 +795,7 @@ public SeekStatus seekCeil(BytesRef text) throws IOException { // So delegateStatus here is either FOUND or NOT_FOUND // check if the current term (=field name) is allowed // If so just return current seek status - if (delegateStatus != SeekStatus.END && isFls((in.term()))) { + if (delegateStatus != SeekStatus.END && isAllowed((in.term()))) { return delegateStatus; } else if (delegateStatus == SeekStatus.END) { // If we hit the end just return END @@ -1159,7 +813,7 @@ public SeekStatus seekCeil(BytesRef text) throws IOException { @Override public boolean seekExact(BytesRef term) throws IOException { - return isFls(term) && in.seekExact(term); + return isAllowed(term) && in.seekExact(term); } @Override @@ -1220,41 +874,15 @@ public boolean hasDeletions() { return dge.hasDeletions(); } - @SuppressWarnings("unchecked") - private MaskedFieldsMap getRuntimeMaskedFieldInfo() { - final Map> maskedFieldsMap = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadContext, - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER - ); - final String maskedEval = SecurityUtils.evalMap(maskedFieldsMap, indexService.index().getName()); - - if (maskedEval != null) { - final Set mf = maskedFieldsMap.get(maskedEval); - if (mf != null && !mf.isEmpty()) { - return MaskedFieldsMap.extractMaskedFields(true, mf, salt, maskingAlgorithmDefault); - } - - } - - return null; - } - - private String handleKeyword(final String field) { - if (field != null && field.endsWith(KEYWORD)) { - return field.substring(0, field.length() - KEYWORD.length()); - } - return field; - } - private static class MaskedTermsEnum extends TermsEnum { private final TermsEnum delegate; - private final MaskedField mf; + private final FieldMasking.FieldMaskingRule.Field fmRuleField; - public MaskedTermsEnum(TermsEnum delegate, MaskedField mf) { + public MaskedTermsEnum(TermsEnum delegate, FieldMasking.FieldMaskingRule.Field fmRuleField) { super(); this.delegate = delegate; - this.mf = mf; + this.fmRuleField = fmRuleField; } @Override @@ -1289,7 +917,7 @@ public void seekExact(BytesRef term, TermState state) throws IOException { @Override public BytesRef term() throws IOException { - return mf.mask(delegate.term()); + return fmRuleField.apply(delegate.term()); } @Override @@ -1350,4 +978,5 @@ private boolean applyDlsHere() { // (a get for example) return !action.startsWith("indices:data/read/search"); } + } diff --git a/src/main/java/org/opensearch/security/configuration/DlsFlsRequestValve.java b/src/main/java/org/opensearch/security/configuration/DlsFlsRequestValve.java index d629cbaff3..2565057afd 100644 --- a/src/main/java/org/opensearch/security/configuration/DlsFlsRequestValve.java +++ b/src/main/java/org/opensearch/security/configuration/DlsFlsRequestValve.java @@ -31,6 +31,8 @@ import org.opensearch.search.internal.SearchContext; import org.opensearch.search.query.QuerySearchResult; import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.privileges.dlsfls.DlsFlsProcessedConfig; import org.opensearch.threadpool.ThreadPool; public interface DlsFlsRequestValve { @@ -41,6 +43,14 @@ public interface DlsFlsRequestValve { void onQueryPhase(QuerySearchResult queryResult); + DlsFlsProcessedConfig getCurrentConfig(); + + boolean hasFlsOrFieldMasking(String index) throws PrivilegesEvaluationException; + + boolean hasFieldMasking(String index) throws PrivilegesEvaluationException; + + boolean isFieldAllowed(String index, String field) throws PrivilegesEvaluationException; + public static class NoopDlsFlsRequestValve implements DlsFlsRequestValve { @Override @@ -57,6 +67,26 @@ public void handleSearchContext(SearchContext context, ThreadPool threadPool, Na public void onQueryPhase(QuerySearchResult queryResult) { } + + @Override + public DlsFlsProcessedConfig getCurrentConfig() { + return null; + } + + @Override + public boolean hasFlsOrFieldMasking(String index) { + return false; + } + + @Override + public boolean hasFieldMasking(String index) { + return false; + } + + @Override + public boolean isFieldAllowed(String index, String field) { + return true; + } } } diff --git a/src/main/java/org/opensearch/security/configuration/DlsFlsValveImpl.java b/src/main/java/org/opensearch/security/configuration/DlsFlsValveImpl.java index 4141a3f8f5..498b908e5d 100644 --- a/src/main/java/org/opensearch/security/configuration/DlsFlsValveImpl.java +++ b/src/main/java/org/opensearch/security/configuration/DlsFlsValveImpl.java @@ -11,15 +11,13 @@ package org.opensearch.security.configuration; -import java.io.Serializable; import java.lang.reflect.Field; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Comparator; import java.util.List; -import java.util.Map; import java.util.Objects; -import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.StreamSupport; @@ -35,12 +33,9 @@ import org.opensearch.OpenSearchSecurityException; import org.opensearch.SpecialPermission; import org.opensearch.action.ActionRequest; -import org.opensearch.action.DocWriteRequest; import org.opensearch.action.RealtimeRequest; -import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.opensearch.action.admin.indices.shrink.ResizeRequest; import org.opensearch.action.bulk.BulkItemRequest; -import org.opensearch.action.bulk.BulkRequest; import org.opensearch.action.bulk.BulkShardRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.update.UpdateRequest; @@ -72,18 +67,22 @@ import org.opensearch.search.internal.SearchContext; import org.opensearch.search.query.QuerySearchResult; import org.opensearch.security.OpenSearchSecurityPlugin; +import org.opensearch.security.privileges.DocumentAllowList; import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.privileges.dlsfls.DlsFlsBaseContext; +import org.opensearch.security.privileges.dlsfls.DlsFlsLegacyHeaders; +import org.opensearch.security.privileges.dlsfls.DlsFlsProcessedConfig; +import org.opensearch.security.privileges.dlsfls.DlsRestriction; +import org.opensearch.security.privileges.dlsfls.FieldMasking; +import org.opensearch.security.privileges.dlsfls.IndexToRuleMap; import org.opensearch.security.resolver.IndexResolverReplacer; -import org.opensearch.security.securityconf.ConfigModel; -import org.opensearch.security.securityconf.EvaluatedDlsFlsConfig; -import org.opensearch.security.support.Base64Helper; +import org.opensearch.security.securityconf.DynamicConfigFactory; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.support.ConfigConstants; -import org.opensearch.security.support.HeaderHelper; -import org.opensearch.security.support.SecurityUtils; import org.opensearch.threadpool.ThreadPool; -import org.greenrobot.eventbus.Subscribe; - public class DlsFlsValveImpl implements DlsFlsRequestValve { private static final String MAP_EXECUTION_HINT = "map"; @@ -93,11 +92,12 @@ public class DlsFlsValveImpl implements DlsFlsRequestValve { private final ClusterService clusterService; private final ThreadContext threadContext; private final Mode mode; - private final DlsQueryParser dlsQueryParser; private final IndexNameExpressionResolver resolver; - private final boolean dfmEmptyOverwritesAll; private final NamedXContentRegistry namedXContentRegistry; - private volatile ConfigModel configModel; + private final DlsFlsBaseContext dlsFlsBaseContext; + private final AtomicReference dlsFlsProcessedConfig = new AtomicReference<>(); + private final FieldMasking.Config fieldMaskingConfig; + private final Settings settings; public DlsFlsValveImpl( Settings settings, @@ -105,22 +105,27 @@ public DlsFlsValveImpl( ClusterService clusterService, IndexNameExpressionResolver resolver, NamedXContentRegistry namedXContentRegistry, - ThreadContext threadContext + ThreadPool threadPool, + DlsFlsBaseContext dlsFlsBaseContext ) { super(); this.nodeClient = nodeClient; this.clusterService = clusterService; this.resolver = resolver; - this.threadContext = threadContext; + this.threadContext = threadPool.getThreadContext(); this.mode = Mode.get(settings); - this.dlsQueryParser = new DlsQueryParser(namedXContentRegistry); - this.dfmEmptyOverwritesAll = settings.getAsBoolean(ConfigConstants.SECURITY_DFM_EMPTY_OVERRIDES_ALL, false); this.namedXContentRegistry = namedXContentRegistry; - } + this.fieldMaskingConfig = FieldMasking.Config.fromSettings(settings); + this.dlsFlsBaseContext = dlsFlsBaseContext; + this.settings = settings; + + clusterService.addListener(event -> { + DlsFlsProcessedConfig config = dlsFlsProcessedConfig.get(); - @Subscribe - public void onConfigModelChanged(ConfigModel configModel) { - this.configModel = configModel; + if (config != null) { + config.updateClusterStateMetadataAsync(clusterService, threadPool); + } + }); } /** @@ -130,269 +135,295 @@ public void onConfigModelChanged(ConfigModel configModel) { */ @Override public boolean invoke(PrivilegesEvaluationContext context, final ActionListener listener) { - - EvaluatedDlsFlsConfig evaluatedDlsFlsConfig = configModel.getSecurityRoles() - .filter(context.getMappedRoles()) - .getDlsFls(context.getUser(), dfmEmptyOverwritesAll, resolver, clusterService, namedXContentRegistry); - + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); ActionRequest request = context.getRequest(); IndexResolverReplacer.Resolved resolved = context.getResolvedRequest(); - if (log.isDebugEnabled()) { - log.debug( - "DlsFlsValveImpl.invoke()\nrequest: " - + request - + "\nevaluatedDlsFlsConfig: " - + evaluatedDlsFlsConfig - + "\nresolved: " - + resolved - + "\nmode: " - + mode - ); - } - - if (evaluatedDlsFlsConfig == null || evaluatedDlsFlsConfig.isEmpty()) { - return true; - } + try { + boolean hasDlsRestrictions = !config.getDocumentPrivileges().isUnrestricted(context, resolved); + boolean hasFlsRestrictions = !config.getFieldPrivileges().isUnrestricted(context, resolved); + boolean hasFieldMasking = !config.getFieldMasking().isUnrestricted(context, resolved); - if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE) != null) { - if (log.isDebugEnabled()) { - log.debug("DLS is already done for: " + threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE)); + if (!hasDlsRestrictions && !hasFlsRestrictions && !hasFieldMasking) { + return true; } - return true; - } - - EvaluatedDlsFlsConfig filteredDlsFlsConfig = evaluatedDlsFlsConfig.filter(resolved); + if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE) != null) { + if (log.isDebugEnabled()) { + log.debug( + "DLS is already done for: {}", + threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE) + ); + } - boolean doFilterLevelDls; + return true; + } - if (mode == Mode.FILTER_LEVEL) { - doFilterLevelDls = true; - } else if (mode == Mode.LUCENE_LEVEL) { - doFilterLevelDls = false; - } else { // mode == Mode.ADAPTIVE - Mode modeByHeader = getDlsModeHeader(); + IndexToRuleMap dlsRestrictionMap = null; + boolean doFilterLevelDls; - if (modeByHeader == Mode.FILTER_LEVEL) { + if (mode == Mode.FILTER_LEVEL) { doFilterLevelDls = true; - log.debug("Doing filter-level DLS due to header"); - } else { - doFilterLevelDls = dlsQueryParser.containsTermLookupQuery(filteredDlsFlsConfig.getAllQueries()); - - if (doFilterLevelDls) { - setDlsModeHeader(Mode.FILTER_LEVEL); - log.debug("Doing filter-level DLS because the query contains a TLQ"); + dlsRestrictionMap = config.getDocumentPrivileges() + .getRestrictions(context, resolved.getAllIndicesResolved(clusterService, context.getIndexNameExpressionResolver())); + } else if (mode == Mode.LUCENE_LEVEL) { + doFilterLevelDls = false; + } else { // mode == Mode.ADAPTIVE + Mode modeByHeader = getDlsModeHeader(); + dlsRestrictionMap = config.getDocumentPrivileges() + .getRestrictions(context, resolved.getAllIndicesResolved(clusterService, context.getIndexNameExpressionResolver())); + + if (modeByHeader == Mode.FILTER_LEVEL) { + doFilterLevelDls = true; + log.debug("Doing filter-level DLS due to header"); } else { - log.debug("Doing lucene-level DLS because the query does not contain a TLQ"); + doFilterLevelDls = dlsRestrictionMap.containsAny(DlsRestriction::containsTermLookupQuery); + + if (doFilterLevelDls) { + setDlsModeHeader(Mode.FILTER_LEVEL); + log.debug("Doing filter-level DLS because the query contains a TLQ"); + } else { + log.debug("Doing lucene-level DLS because the query does not contain a TLQ"); + } } } - } - - if (!doFilterLevelDls) { - setDlsHeaders(evaluatedDlsFlsConfig, request); - } - setFlsHeaders(evaluatedDlsFlsConfig, request); - - if (filteredDlsFlsConfig.isEmpty()) { - return true; - } + if (DlsFlsLegacyHeaders.possiblyRequired(clusterService)) { + DlsFlsLegacyHeaders.prepare(threadContext, context, config, clusterService.state().metadata(), doFilterLevelDls); + } - if (request instanceof RealtimeRequest) { - ((RealtimeRequest) request).realtime(Boolean.FALSE); - } + if (request instanceof RealtimeRequest) { + ((RealtimeRequest) request).realtime(Boolean.FALSE); + } - if (request instanceof SearchRequest) { + if (request instanceof SearchRequest) { - SearchRequest searchRequest = ((SearchRequest) request); + SearchRequest searchRequest = ((SearchRequest) request); - // When we encounter a terms or sampler aggregation with masked fields activated we forcibly - // need to switch off global ordinals because field masking can break ordering - // CS-SUPPRESS-SINGLE: RegexpSingleline Ignore term inside of url - // https://www.elastic.co/guide/en/elasticsearch/reference/master/eager-global-ordinals.html#_avoiding_global_ordinal_loading - // CS-ENFORCE-SINGLE - if (evaluatedDlsFlsConfig.hasFieldMasking()) { + // When we encounter a terms or sampler aggregation with masked fields activated we forcibly + // need to switch off global ordinals because field masking can break ordering + // CS-SUPPRESS-SINGLE: RegexpSingleline Ignore term inside of url + // https://www.elastic.co/guide/en/elasticsearch/reference/master/eager-global-ordinals.html#_avoiding_global_ordinal_loading + // CS-ENFORCE-SINGLE + if (hasFieldMasking) { - if (searchRequest.source() != null && searchRequest.source().aggregations() != null) { - for (AggregationBuilder aggregationBuilder : searchRequest.source().aggregations().getAggregatorFactories()) { - if (aggregationBuilder instanceof TermsAggregationBuilder) { - ((TermsAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); - } + if (searchRequest.source() != null && searchRequest.source().aggregations() != null) { + for (AggregationBuilder aggregationBuilder : searchRequest.source().aggregations().getAggregatorFactories()) { + if (aggregationBuilder instanceof TermsAggregationBuilder) { + ((TermsAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); + } - if (aggregationBuilder instanceof SignificantTermsAggregationBuilder) { - ((SignificantTermsAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); - } + if (aggregationBuilder instanceof SignificantTermsAggregationBuilder) { + ((SignificantTermsAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); + } - if (aggregationBuilder instanceof DiversifiedAggregationBuilder) { - ((DiversifiedAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); + if (aggregationBuilder instanceof DiversifiedAggregationBuilder) { + ((DiversifiedAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); + } } } } - } - if (!evaluatedDlsFlsConfig.hasFls() && !evaluatedDlsFlsConfig.hasDls() && searchRequest.source().aggregations() != null) { + if (!hasFlsRestrictions && !hasDlsRestrictions && searchRequest.source().aggregations() != null) { - boolean cacheable = true; + boolean cacheable = true; - for (AggregationBuilder af : searchRequest.source().aggregations().getAggregatorFactories()) { + for (AggregationBuilder af : searchRequest.source().aggregations().getAggregatorFactories()) { - if (!af.getType().equals("cardinality") && !af.getType().equals("count")) { - cacheable = false; - continue; - } + if (!af.getType().equals("cardinality") && !af.getType().equals("count")) { + cacheable = false; + continue; + } - StringBuilder sb = new StringBuilder(); + StringBuilder sb = new StringBuilder(); - if (searchRequest.source() != null) { - sb.append(Strings.toString(MediaTypeRegistry.JSON, searchRequest.source()) + System.lineSeparator()); - } + if (searchRequest.source() != null) { + sb.append(Strings.toString(MediaTypeRegistry.JSON, searchRequest.source()) + System.lineSeparator()); + } - sb.append(Strings.toString(MediaTypeRegistry.JSON, af) + System.lineSeparator()); + sb.append(Strings.toString(MediaTypeRegistry.JSON, af) + System.lineSeparator()); - LogManager.getLogger("debuglogger").error(sb.toString()); + LogManager.getLogger("debuglogger").error(sb.toString()); - } + } + + if (!cacheable) { + searchRequest.requestCache(Boolean.FALSE); + } else { + LogManager.getLogger("debuglogger") + .error( + "Shard requestcache enabled for " + + (searchRequest.source() == null + ? "" + : Strings.toString(MediaTypeRegistry.JSON, searchRequest.source())) + ); + } - if (!cacheable) { - searchRequest.requestCache(Boolean.FALSE); } else { - LogManager.getLogger("debuglogger") - .error( - "Shard requestcache enabled for " - + (searchRequest.source() == null - ? "" - : Strings.toString(MediaTypeRegistry.JSON, searchRequest.source())) - ); + searchRequest.requestCache(Boolean.FALSE); } - - } else { - searchRequest.requestCache(Boolean.FALSE); } - } - - if (request instanceof UpdateRequest) { - listener.onFailure(new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated")); - return false; - } - if (request instanceof BulkRequest) { - for (DocWriteRequest inner : ((BulkRequest) request).requests()) { - if (inner instanceof UpdateRequest) { - listener.onFailure( - new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated") - ); - return false; - } + if (request instanceof UpdateRequest) { + listener.onFailure(new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated")); + return false; } - } - if (request instanceof BulkShardRequest) { - for (BulkItemRequest inner : ((BulkShardRequest) request).items()) { - if (inner.request() instanceof UpdateRequest) { - listener.onFailure( - new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated") - ); - return false; + if (request instanceof BulkShardRequest) { + for (BulkItemRequest inner : ((BulkShardRequest) request).items()) { + if (inner.request() instanceof UpdateRequest) { + listener.onFailure( + new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated") + ); + return false; + } } } - } - if (request instanceof ResizeRequest) { - listener.onFailure(new OpenSearchSecurityException("Resize is not supported when FLS or DLS or Fieldmasking is activated")); - return false; - } - - if (context.getAction().contains("plugins/replication")) { - listener.onFailure( - new OpenSearchSecurityException( - "Cross Cluster Replication is not supported when FLS or DLS or Fieldmasking is activated", - RestStatus.FORBIDDEN - ) - ); - return false; - } + if (request instanceof ResizeRequest) { + listener.onFailure(new OpenSearchSecurityException("Resize is not supported when FLS or DLS or Fieldmasking is activated")); + return false; + } - if (evaluatedDlsFlsConfig.hasDls()) { - if (request instanceof SearchRequest) { + if (context.getAction().contains("plugins/replication")) { + listener.onFailure( + new OpenSearchSecurityException( + "Cross Cluster Replication is not supported when FLS or DLS or Fieldmasking is activated", + RestStatus.FORBIDDEN + ) + ); + return false; + } - final SearchSourceBuilder source = ((SearchRequest) request).source(); - if (source != null) { - AggregatorFactories.Builder aggregations = source.aggregations(); - if (aggregations != null) { - for (AggregationBuilder factory : aggregations.getAggregatorFactories()) { - if (factory instanceof TermsAggregationBuilder && ((TermsAggregationBuilder) factory).minDocCount() == 0) { - listener.onFailure(new OpenSearchException("min_doc_count 0 is not supported when DLS is activated")); - return false; + if (hasDlsRestrictions) { + if (request instanceof SearchRequest) { + + final SearchSourceBuilder source = ((SearchRequest) request).source(); + if (source != null) { + AggregatorFactories.Builder aggregations = source.aggregations(); + if (aggregations != null) { + for (AggregationBuilder factory : aggregations.getAggregatorFactories()) { + if (factory instanceof TermsAggregationBuilder && ((TermsAggregationBuilder) factory).minDocCount() == 0) { + listener.onFailure(new OpenSearchException("min_doc_count 0 is not supported when DLS is activated")); + return false; + } } } - } - if (source.profile()) { - listener.onFailure(new OpenSearchSecurityException("Profiling is not supported when DLS is activated")); - return false; - } + if (source.profile()) { + listener.onFailure(new OpenSearchSecurityException("Profiling is not supported when DLS is activated")); + return false; + } + } } } - } - if (doFilterLevelDls && filteredDlsFlsConfig.hasDls()) { - return DlsFilterLevelActionHandler.handle( - context, - evaluatedDlsFlsConfig, - listener, - nodeClient, - clusterService, - OpenSearchSecurityPlugin.GuiceHolder.getIndicesService(), - resolver, - dlsQueryParser, - threadContext - ); - } else { - return true; + if (doFilterLevelDls && hasDlsRestrictions) { + return DlsFilterLevelActionHandler.handle( + context, + dlsRestrictionMap, + listener, + nodeClient, + clusterService, + OpenSearchSecurityPlugin.GuiceHolder.getIndicesService(), + resolver, + threadContext + ); + } else { + return true; + } + + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating DLS/FLS privileges", e); + listener.onFailure(new OpenSearchSecurityException("Error while evaluating DLS/FLS privileges")); + return false; + } catch (RuntimeException e) { + log.error(e); + throw e; } } @Override - public void handleSearchContext(SearchContext context, ThreadPool threadPool, NamedXContentRegistry namedXContentRegistry) { + public void handleSearchContext(SearchContext searchContext, ThreadPool threadPool, NamedXContentRegistry namedXContentRegistry) { try { - @SuppressWarnings("unchecked") - final Map> queries = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER - ); + String index = searchContext.indexShard().indexSettings().getIndex().getName(); + + if (log.isTraceEnabled()) { + log.trace("handleSearchContext(); index: {}", index); + } - final String dlsEval = SecurityUtils.evalMap(queries, context.indexShard().indexSettings().getIndex().getName()); + if (searchContext.suggest() != null) { + return; + } - if (dlsEval != null) { + if (dlsFlsBaseContext.isDlsDoneOnFilterLevel() || mode == Mode.FILTER_LEVEL) { + // For filter level DLS, the query was already modified to include the DLS restrictions. + // Thus, we can exist here early. + log.trace("handleSearchContext(): DLS is done on the filter level; no further handling necessary"); + return; + } - if (context.suggest() != null) { - return; - } + if (dlsFlsBaseContext.isPrivilegedConfigRequest()) { + // Requests with the header OPENDISTRO_SECURITY_CONF_REQUEST_HEADER set bypass any access controls. + // This follows the logic from + // https://github.com/opensearch-project/security/blob/1c898dcc4a92e8d4aa8b18c3fed761b5f6e52d4f/src/main/java/org/opensearch/security/filter/SecurityFilter.java#L209 + // In the old DLS/FLS implementation, that check in SecurityFilter would also affect this code. + // Now it does not any more, thus we need this additional check here. + return; + } - assert context.parsedQuery() != null; + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); + if (privilegesEvaluationContext == null) { + return; + } - final Set unparsedDlsQueries = queries.get(dlsEval); + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); - if (unparsedDlsQueries != null && !unparsedDlsQueries.isEmpty()) { - BooleanQuery.Builder queryBuilder = dlsQueryParser.parse( - unparsedDlsQueries, - context.getQueryShardContext(), - (q) -> new ConstantScoreQuery(q) - ); + DlsRestriction dlsRestriction = config.getDocumentPrivileges().getRestriction(privilegesEvaluationContext, index); + + if (log.isTraceEnabled()) { + log.trace("handleSearchContext(); index: {}; dlsRestriction: {}", index, dlsRestriction); + } - queryBuilder.add(context.parsedQuery().query(), Occur.MUST); + DocumentAllowList documentAllowList = DocumentAllowList.get(threadContext); - ParsedQuery dlsQuery = new ParsedQuery(queryBuilder.build()); + if (documentAllowList.isEntryForIndexPresent(index)) { + // The documentAllowList is needed for two cases: + // - DLS rules which use "term lookup queries" and thus need to access indices for which no privileges are present + // - Dashboards multi tenancy which can redirect index accesses to indices for which no normal index privileges are present - if (dlsQuery != null) { - context.parsedQuery(dlsQuery); - context.preProcess(true); - } + if (!dlsRestriction.isUnrestricted() && documentAllowList.isAllowed(index, "*")) { + dlsRestriction = DlsRestriction.NONE; + log.debug("Lifting DLS for {} due to present document allowlist", index); + } + } + + if (!dlsRestriction.isUnrestricted()) { + if (mode == Mode.ADAPTIVE && dlsRestriction.containsTermLookupQuery()) { + // Special case for scroll operations: + // Normally, the check dlsFlsBaseContext.isDlsDoneOnFilterLevel() already aborts early if DLS filter level mode + // has been activated. However, this is not the case for scroll operations, as these lose the thread context value + // on which dlsFlsBaseContext.isDlsDoneOnFilterLevel() is based on. Thus, we need to check here again the deeper + // conditions. + log.trace("DlsRestriction: contains TLQ."); + return; } + + assert searchContext.parsedQuery() != null; + + BooleanQuery.Builder queryBuilder = dlsRestriction.toBooleanQueryBuilder( + searchContext.getQueryShardContext(), + (q) -> new ConstantScoreQuery(q) + ); + + queryBuilder.add(searchContext.parsedQuery().query(), Occur.MUST); + + searchContext.parsedQuery(new ParsedQuery(queryBuilder.build())); + searchContext.preProcess(true); } } catch (Exception e) { + log.error("Error in handleSearchContext()", e); throw new RuntimeException("Error evaluating dls for a search query: " + e, e); } } @@ -411,6 +442,45 @@ public void onQueryPhase(QuerySearchResult queryResult) { ); } + @Override + public DlsFlsProcessedConfig getCurrentConfig() { + return dlsFlsProcessedConfig.get(); + } + + @Override + public boolean hasFlsOrFieldMasking(String index) throws PrivilegesEvaluationException { + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); + if (privilegesEvaluationContext == null) { + return false; + } + + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); + return !config.getFieldPrivileges().isUnrestricted(privilegesEvaluationContext, index) + || !config.getFieldMasking().isUnrestricted(privilegesEvaluationContext, index); + } + + @Override + public boolean hasFieldMasking(String index) throws PrivilegesEvaluationException { + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); + if (privilegesEvaluationContext == null) { + return false; + } + + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); + return !config.getFieldMasking().isUnrestricted(privilegesEvaluationContext, index); + } + + @Override + public boolean isFieldAllowed(String index, String field) throws PrivilegesEvaluationException { + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); + if (privilegesEvaluationContext == null) { + return true; + } + + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); + return config.getFieldPrivileges().getRestriction(privilegesEvaluationContext, index).isAllowed(field); + } + private static InternalAggregation aggregateBuckets(InternalAggregation aggregation) { if (aggregation instanceof StringTerms) { StringTerms stringTerms = (StringTerms) aggregation; @@ -441,42 +511,6 @@ private static List mergeBuckets( return buckets; } - private void setDlsHeaders(EvaluatedDlsFlsConfig dlsFls, ActionRequest request) { - if (!dlsFls.getDlsQueriesByIndex().isEmpty()) { - Map> dlsQueries = dlsFls.getDlsQueriesByIndex(); - - if (request instanceof ClusterSearchShardsRequest && HeaderHelper.isTrustedClusterRequest(threadContext)) { - threadContext.addResponseHeader( - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER, - Base64Helper.serializeObject((Serializable) dlsQueries) - ); - if (log.isDebugEnabled()) { - log.debug("added response header for DLS info: {}", dlsQueries); - } - } else { - if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER) != null) { - Object deserializedDlsQueries = Base64Helper.deserializeObject( - threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER), - threadContext.getTransient(ConfigConstants.USE_JDK_SERIALIZATION) - ); - if (!dlsQueries.equals(deserializedDlsQueries)) { - throw new OpenSearchSecurityException( - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER + " does not match (SG 900D)" - ); - } - } else { - threadContext.putHeader( - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER, - Base64Helper.serializeObject((Serializable) dlsQueries) - ); - if (log.isDebugEnabled()) { - log.debug("attach DLS info: {}", dlsQueries); - } - } - } - } - } - private void setDlsModeHeader(Mode mode) { String modeString = mode.name(); @@ -504,95 +538,6 @@ private Mode getDlsModeHeader() { } } - private void setFlsHeaders(EvaluatedDlsFlsConfig dlsFls, ActionRequest request) { - if (!dlsFls.getFieldMaskingByIndex().isEmpty()) { - Map> maskedFieldsMap = dlsFls.getFieldMaskingByIndex(); - - if (request instanceof ClusterSearchShardsRequest && HeaderHelper.isTrustedClusterRequest(threadContext)) { - threadContext.addResponseHeader( - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER, - Base64Helper.serializeObject((Serializable) maskedFieldsMap) - ); - if (log.isDebugEnabled()) { - log.debug("added response header for masked fields info: {}", maskedFieldsMap); - } - } else { - - if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER) != null) { - if (!maskedFieldsMap.equals( - Base64Helper.deserializeObject( - threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER), - threadContext.getTransient(ConfigConstants.USE_JDK_SERIALIZATION) - ) - )) { - throw new OpenSearchSecurityException( - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER + " does not match (SG 901D)" - ); - } else { - if (log.isDebugEnabled()) { - log.debug(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER + " already set"); - } - } - } else { - threadContext.putHeader( - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER, - Base64Helper.serializeObject((Serializable) maskedFieldsMap) - ); - if (log.isDebugEnabled()) { - log.debug("attach masked fields info: {}", maskedFieldsMap); - } - } - } - } - - if (!dlsFls.getFlsByIndex().isEmpty()) { - Map> flsFields = dlsFls.getFlsByIndex(); - - if (request instanceof ClusterSearchShardsRequest && HeaderHelper.isTrustedClusterRequest(threadContext)) { - threadContext.addResponseHeader( - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER, - Base64Helper.serializeObject((Serializable) flsFields) - ); - if (log.isDebugEnabled()) { - log.debug("added response header for FLS info: {}", flsFields); - } - } else { - if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER) != null) { - if (!flsFields.equals( - Base64Helper.deserializeObject( - threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER), - threadContext.getTransient(ConfigConstants.USE_JDK_SERIALIZATION) - ) - )) { - throw new OpenSearchSecurityException( - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER - + " does not match (SG 901D) " - + flsFields - + "---" - + Base64Helper.deserializeObject( - threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER), - threadContext.getTransient(ConfigConstants.USE_JDK_SERIALIZATION) - ) - ); - } else { - if (log.isDebugEnabled()) { - log.debug(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER + " already set"); - } - } - } else { - threadContext.putHeader( - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER, - Base64Helper.serializeObject((Serializable) flsFields) - ); - if (log.isDebugEnabled()) { - log.debug("attach FLS info: {}", flsFields); - } - } - } - - } - } - private static class BucketMerger implements Consumer { private Comparator comparator; private StringTerms.Bucket bucket = null; @@ -730,4 +675,26 @@ static Mode get(Settings settings) { } } } + + public void updateConfiguration(SecurityDynamicConfiguration rolesConfiguration) { + try { + if (rolesConfiguration != null) { + DlsFlsProcessedConfig oldConfig = this.dlsFlsProcessedConfig.getAndSet( + new DlsFlsProcessedConfig( + DynamicConfigFactory.addStatics(rolesConfiguration.clone()), + clusterService.state().metadata().getIndicesLookup(), + namedXContentRegistry, + settings, + fieldMaskingConfig + ) + ); + + if (oldConfig != null) { + oldConfig.shutdown(); + } + } + } catch (Exception e) { + log.error("Error while updating DLS/FLS configuration with {}", rolesConfiguration, e); + } + } } diff --git a/src/main/java/org/opensearch/security/configuration/DlsQueryParser.java b/src/main/java/org/opensearch/security/configuration/DlsQueryParser.java deleted file mode 100644 index 9640abcd8e..0000000000 --- a/src/main/java/org/opensearch/security/configuration/DlsQueryParser.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.configuration; - -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; - -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.PrefixQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.join.BitSetProducer; -import org.apache.lucene.search.join.ToChildBlockJoinQuery; - -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.xcontent.DeprecationHandler; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.query.AbstractQueryBuilder; -import org.opensearch.index.query.ParsedQuery; -import org.opensearch.index.query.QueryBuilder; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.TermsQueryBuilder; -import org.opensearch.security.queries.QueryBuilderTraverser; - -public final class DlsQueryParser { - - private static final Logger log = LogManager.getLogger(DlsQueryParser.class); - private static final Query NON_NESTED_QUERY; - - static { - // Match all documents but not the nested ones - // Nested document types start with __ - // https://discuss.elastic.co/t/whats-nested-documents-layout-inside-the-lucene/59944/9 - NON_NESTED_QUERY = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), Occur.FILTER) - .add(new PrefixQuery(new Term("_type", "__")), Occur.MUST_NOT) - .build(); - } - - private static Cache parsedQueryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(4, TimeUnit.HOURS) - .build(); - private static Cache queryContainsTlqCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(4, TimeUnit.HOURS) - .build(); - - private final NamedXContentRegistry namedXContentRegistry; - - public DlsQueryParser(NamedXContentRegistry namedXContentRegistry) { - this.namedXContentRegistry = namedXContentRegistry; - } - - public BooleanQuery.Builder parse(Set unparsedDlsQueries, QueryShardContext queryShardContext) { - return parse(unparsedDlsQueries, queryShardContext, null); - } - - public BooleanQuery.Builder parse( - Set unparsedDlsQueries, - QueryShardContext queryShardContext, - Function queryMapFunction - ) { - - if (unparsedDlsQueries == null || unparsedDlsQueries.isEmpty()) { - return null; - } - - boolean hasNestedMapping = queryShardContext.getMapperService().hasNested(); - - BooleanQuery.Builder dlsQueryBuilder = new BooleanQuery.Builder(); - dlsQueryBuilder.setMinimumNumberShouldMatch(1); - - for (String unparsedDlsQuery : unparsedDlsQueries) { - ParsedQuery parsedQuery = queryShardContext.toQuery(parse(unparsedDlsQuery)); - Query dlsQuery = parsedQuery.query(); - - if (queryMapFunction != null) { - dlsQuery = queryMapFunction.apply(dlsQuery); - } - - dlsQueryBuilder.add(dlsQuery, Occur.SHOULD); - - if (hasNestedMapping) { - handleNested(queryShardContext, dlsQueryBuilder, dlsQuery); - } - } - - return dlsQueryBuilder; - } - - private static void handleNested( - final QueryShardContext queryShardContext, - final BooleanQuery.Builder dlsQueryBuilder, - final Query parentQuery - ) { - final BitSetProducer parentDocumentsFilter = queryShardContext.bitsetFilter(NON_NESTED_QUERY); - dlsQueryBuilder.add(new ToChildBlockJoinQuery(parentQuery, parentDocumentsFilter), Occur.SHOULD); - } - - public QueryBuilder parse(String unparsedDlsQuery) { - try { - final QueryBuilder qb = parsedQueryCache.get(unparsedDlsQuery, new Callable() { - - @Override - public QueryBuilder call() throws Exception { - final XContentParser parser = JsonXContent.jsonXContent.createParser( - namedXContentRegistry, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - unparsedDlsQuery - ); - return AbstractQueryBuilder.parseInnerQueryBuilder(parser); - } - - }); - - return qb; - } catch (ExecutionException e) { - throw new RuntimeException("Error while parsing " + unparsedDlsQuery, e.getCause()); - } - } - - boolean containsTermLookupQuery(Set unparsedQueries) { - for (String query : unparsedQueries) { - if (containsTermLookupQuery(query)) { - if (log.isDebugEnabled()) { - log.debug("containsTermLookupQuery() returns true due to " + query + "\nqueries: " + unparsedQueries); - } - - return true; - } - } - - if (log.isDebugEnabled()) { - log.debug("containsTermLookupQuery() returns false\nqueries: " + unparsedQueries); - } - - return false; - } - - boolean containsTermLookupQuery(String query) { - try { - return queryContainsTlqCache.get(query, () -> { - QueryBuilder queryBuilder = parse(query); - - return QueryBuilderTraverser.exists( - queryBuilder, - (q) -> (q instanceof TermsQueryBuilder) && ((TermsQueryBuilder) q).termsLookup() != null - ); - }); - } catch (ExecutionException e) { - throw new RuntimeException("Error handling parsing " + query, e.getCause()); - } - } - -} diff --git a/src/main/java/org/opensearch/security/configuration/MaskedField.java b/src/main/java/org/opensearch/security/configuration/MaskedField.java deleted file mode 100644 index 579b9f476d..0000000000 --- a/src/main/java/org/opensearch/security/configuration/MaskedField.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.configuration; - -import java.nio.charset.StandardCharsets; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -import com.google.common.base.Splitter; -import org.apache.commons.lang3.StringUtils; -import org.apache.lucene.util.BytesRef; -import org.bouncycastle.util.encoders.Hex; - -import com.rfksystems.blake2b.Blake2b; - -public class MaskedField { - - private final String name; - private String algo = null; - private List regexReplacements; - private final byte[] defaultSalt; - private final String defaultAlgorithm; - - public MaskedField(final String value, final Salt salt, final String defaultAlgorithm) { - this.defaultSalt = salt.getSalt16(); - this.defaultAlgorithm = defaultAlgorithm; - final List tokens = Splitter.on("::").splitToList(Objects.requireNonNull(value)); - final int tokenCount = tokens.size(); - if (tokenCount == 1) { - name = tokens.get(0); - } else if (tokenCount == 2) { - name = tokens.get(0); - algo = tokens.get(1); - } else if (tokenCount >= 3 && tokenCount % 2 == 1) { - name = tokens.get(0); - regexReplacements = new ArrayList<>((tokenCount - 1) / 2); - for (int i = 1; i < tokenCount - 1; i = i + 2) { - regexReplacements.add(new RegexReplacement(tokens.get(i), tokens.get(i + 1))); - } - } else { - throw new IllegalArgumentException("Expected 1 or 2 or >=3 (but then odd count) tokens, got " + tokenCount); - } - } - - public final void isValid() throws Exception { - mask(new byte[] { 1, 2, 3, 4, 5 }); - } - - public byte[] mask(byte[] value) { - if (algo != null) { - return customHash(value, algo); - } else if (regexReplacements != null) { - String cur = new String(value, StandardCharsets.UTF_8); - for (RegexReplacement rr : regexReplacements) { - cur = cur.replaceAll(rr.getRegex(), rr.getReplacement()); - } - return cur.getBytes(StandardCharsets.UTF_8); - } else if (StringUtils.isNotEmpty(defaultAlgorithm)) { - return customHash(value, defaultAlgorithm); - } else { - return blake2bHash(value); - } - } - - public String mask(String value) { - return new String(mask(value.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); - } - - public BytesRef mask(BytesRef value) { - if (value == null) { - return null; - } - final BytesRef copy = BytesRef.deepCopyOf(value); - return new BytesRef(mask(copy.bytes)); - } - - public String getName() { - return name; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((algo == null) ? 0 : algo.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((regexReplacements == null) ? 0 : regexReplacements.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - MaskedField other = (MaskedField) obj; - if (algo == null) { - if (other.algo != null) return false; - } else if (!algo.equals(other.algo)) return false; - if (name == null) { - if (other.name != null) return false; - } else if (!name.equals(other.name)) return false; - if (regexReplacements == null) { - if (other.regexReplacements != null) return false; - } else if (!regexReplacements.equals(other.regexReplacements)) return false; - return true; - } - - @Override - public String toString() { - return "MaskedField [name=" - + name - + ", algo=" - + algo - + ", regexReplacements=" - + regexReplacements - + ", defaultSalt=" - + Arrays.toString(defaultSalt) - + ", defaultAlgorithm=" - + defaultAlgorithm - + ", isDefault()=" - + isDefault() - + "]"; - } - - private boolean isDefault() { - return regexReplacements == null && algo == null; - } - - private static byte[] customHash(byte[] in, final String algorithm) { - try { - MessageDigest digest = MessageDigest.getInstance(algorithm); - return Hex.encode(digest.digest(in)); - } catch (NoSuchAlgorithmException e) { - throw new IllegalArgumentException(e); - } - } - - private byte[] blake2bHash(byte[] in) { - // Salt is passed incorrectly but order of parameters is retained at present to ensure full backwards compatibility - // Tracking with https://github.com/opensearch-project/security/issues/4274 - final Blake2b hash = new Blake2b(null, 32, null, defaultSalt); - hash.update(in, 0, in.length); - final byte[] out = new byte[hash.getDigestSize()]; - hash.digest(out, 0); - return Hex.encode(out); - } - - private static class RegexReplacement { - private final String regex; - private final String replacement; - - public RegexReplacement(String regex, String replacement) { - super(); - this.regex = regex.substring(1).substring(0, regex.length() - 2); - this.replacement = replacement; - } - - public String getRegex() { - return regex; - } - - public String getReplacement() { - return replacement; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((regex == null) ? 0 : regex.hashCode()); - result = prime * result + ((replacement == null) ? 0 : replacement.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - RegexReplacement other = (RegexReplacement) obj; - if (regex == null) { - if (other.regex != null) return false; - } else if (!regex.equals(other.regex)) return false; - if (replacement == null) { - if (other.replacement != null) return false; - } else if (!replacement.equals(other.replacement)) return false; - return true; - } - - @Override - public String toString() { - return "RegexReplacement [regex=" + regex + ", replacement=" + replacement + "]"; - } - - } -} diff --git a/src/main/java/org/opensearch/security/configuration/PrivilegesInterceptorImpl.java b/src/main/java/org/opensearch/security/configuration/PrivilegesInterceptorImpl.java index e4d75c5611..4a0b25bdce 100644 --- a/src/main/java/org/opensearch/security/configuration/PrivilegesInterceptorImpl.java +++ b/src/main/java/org/opensearch/security/configuration/PrivilegesInterceptorImpl.java @@ -46,6 +46,7 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.security.privileges.DocumentAllowList; import org.opensearch.security.privileges.PrivilegesInterceptor; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; import org.opensearch.security.securityconf.DynamicConfigModel; @@ -203,6 +204,11 @@ && isTenantAllowed(request, action, user, tenants, requestedTenant)) { // to avoid security issue final String tenantIndexName = toUserIndexName(dashboardsIndexName, requestedTenant); + + // The new DLS/FLS implementation defaults to a "deny all" pattern in case no roles are configured + // for an index. As the PrivilegeInterceptor grants access to indices bypassing index privileges, + // we need to allow-list these indices. + applyDocumentAllowList(tenantIndexName); return newAccessGrantedReplaceResult(replaceIndex(request, dashboardsIndexName, tenantIndexName, action)); } else if (!user.getName().equals(dashboardsServerUsername)) { @@ -218,6 +224,20 @@ && isTenantAllowed(request, action, user, tenants, requestedTenant)) { return CONTINUE_EVALUATION_REPLACE_RESULT; } + private void applyDocumentAllowList(String indexName) { + DocumentAllowList documentAllowList = new DocumentAllowList(); + documentAllowList.add(indexName, "*"); + IndexAbstraction indexAbstraction = clusterService.state().getMetadata().getIndicesLookup().get(indexName); + + if (indexAbstraction instanceof IndexAbstraction.Alias) { + for (IndexMetadata index : ((IndexAbstraction.Alias) indexAbstraction).getIndices()) { + documentAllowList.add(index.getIndex().getName(), "*"); + } + } + + documentAllowList.applyTo(threadPool.getThreadContext()); + } + private String getConcreteIndexName(String name, Map indicesLookup) { for (int i = 1; i < Integer.MAX_VALUE; i++) { String concreteName = name.concat("_" + i); diff --git a/src/main/java/org/opensearch/security/configuration/Salt.java b/src/main/java/org/opensearch/security/configuration/Salt.java index 3799fa846f..e13a430c79 100644 --- a/src/main/java/org/opensearch/security/configuration/Salt.java +++ b/src/main/java/org/opensearch/security/configuration/Salt.java @@ -69,7 +69,7 @@ private Salt(final String saltAsString) { * Returns a new salt array every time it is called. * @return salt in bytes */ - byte[] getSalt16() { + public byte[] getSalt16() { return salt16; } diff --git a/src/main/java/org/opensearch/security/configuration/SecurityFlsDlsIndexSearcherWrapper.java b/src/main/java/org/opensearch/security/configuration/SecurityFlsDlsIndexSearcherWrapper.java index e889368315..4f7a412097 100644 --- a/src/main/java/org/opensearch/security/configuration/SecurityFlsDlsIndexSearcherWrapper.java +++ b/src/main/java/org/opensearch/security/configuration/SecurityFlsDlsIndexSearcherWrapper.java @@ -15,17 +15,17 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; -import java.util.Map; import java.util.Set; import java.util.function.LongSupplier; +import java.util.function.Supplier; -import com.google.common.collect.Sets; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; +import org.opensearch.OpenSearchException; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -36,10 +36,16 @@ import org.opensearch.index.shard.ShardUtils; import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.compliance.ComplianceIndexingOperationListener; +import org.opensearch.security.privileges.DocumentAllowList; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; import org.opensearch.security.privileges.PrivilegesEvaluator; +import org.opensearch.security.privileges.dlsfls.DlsFlsBaseContext; +import org.opensearch.security.privileges.dlsfls.DlsFlsProcessedConfig; +import org.opensearch.security.privileges.dlsfls.DlsRestriction; +import org.opensearch.security.privileges.dlsfls.FieldMasking; +import org.opensearch.security.privileges.dlsfls.FieldPrivileges; import org.opensearch.security.support.ConfigConstants; -import org.opensearch.security.support.HeaderHelper; -import org.opensearch.security.support.SecurityUtils; public class SecurityFlsDlsIndexSearcherWrapper extends SystemIndexSearcherWrapper { @@ -53,8 +59,8 @@ public class SecurityFlsDlsIndexSearcherWrapper extends SystemIndexSearcherWrapp private final IndexService indexService; private final AuditLog auditlog; private final LongSupplier nowInMillis; - private final DlsQueryParser dlsQueryParser; - private final Salt salt; + private final Supplier dlsFlsProcessedConfigSupplier; + private final DlsFlsBaseContext dlsFlsBaseContext; public SecurityFlsDlsIndexSearcherWrapper( final IndexService indexService, @@ -64,7 +70,8 @@ public SecurityFlsDlsIndexSearcherWrapper( final AuditLog auditlog, final ComplianceIndexingOperationListener ciol, final PrivilegesEvaluator evaluator, - final Salt salt + final Supplier dlsFlsProcessedConfigSupplier, + final DlsFlsBaseContext dlsFlsBaseContext ) { super(indexService, settings, adminDNs, evaluator); Set metadataFieldsCopy; @@ -87,7 +94,6 @@ public SecurityFlsDlsIndexSearcherWrapper( this.clusterService = clusterService; this.indexService = indexService; this.auditlog = auditlog; - this.dlsQueryParser = new DlsQueryParser(indexService.xContentRegistry()); final boolean allowNowinDlsQueries = settings.getAsBoolean(ConfigConstants.SECURITY_UNSUPPORTED_ALLOW_NOW_IN_DLS, false); if (allowNowinDlsQueries) { nowInMillis = () -> System.currentTimeMillis(); @@ -95,7 +101,8 @@ public SecurityFlsDlsIndexSearcherWrapper( nowInMillis = () -> { throw new IllegalArgumentException("'now' is not allowed in DLS queries"); }; } log.debug("FLS/DLS {} enabled for index {}", this, indexService.index().getName()); - this.salt = salt; + this.dlsFlsProcessedConfigSupplier = dlsFlsProcessedConfigSupplier; + this.dlsFlsBaseContext = dlsFlsBaseContext; } @SuppressWarnings("unchecked") @@ -103,62 +110,97 @@ public SecurityFlsDlsIndexSearcherWrapper( protected DirectoryReader dlsFlsWrap(final DirectoryReader reader, boolean isAdmin) throws IOException { final ShardId shardId = ShardUtils.extractShardId(reader); + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); - Set flsFields = null; - Set maskedFields = null; - Query dlsQuery = null; - - if (!isAdmin) { + if (log.isTraceEnabled()) { + log.trace("dlsFlsWrap(); index: {}; privilegeEvaluationContext: {}", index.getName(), privilegesEvaluationContext); + } - final Map> allowedFlsFields = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadContext, - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER - ); - final Map> queries = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadContext, - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER - ); - final Map> maskedFieldsMap = (Map>) HeaderHelper.deserializeSafeFromHeader( + if (isAdmin || privilegesEvaluationContext == null) { + return new DlsFlsFilterLeafReader.DlsFlsDirectoryReader( + reader, + FieldPrivileges.FlsRule.ALLOW_ALL, + null, + indexService, threadContext, - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER + clusterService, + auditlog, + FieldMasking.FieldMaskingRule.ALLOW_ALL, + shardId, + metaFields ); + } + + try { + + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfigSupplier.get(); + DlsRestriction dlsRestriction; + + if (!this.dlsFlsBaseContext.isDlsDoneOnFilterLevel()) { + dlsRestriction = config.getDocumentPrivileges().getRestriction(privilegesEvaluationContext, index.getName()); + } else { + dlsRestriction = DlsRestriction.NONE; + } - final String flsEval = SecurityUtils.evalMap(allowedFlsFields, index.getName()); - final String dlsEval = SecurityUtils.evalMap(queries, index.getName()); - final String maskedEval = SecurityUtils.evalMap(maskedFieldsMap, index.getName()); + FieldPrivileges.FlsRule flsRule = config.getFieldPrivileges().getRestriction(privilegesEvaluationContext, index.getName()); + FieldMasking.FieldMaskingRule fmRule = config.getFieldMasking().getRestriction(privilegesEvaluationContext, index.getName()); - if (flsEval != null) { - flsFields = Sets.union(metaFields, allowedFlsFields.get(flsEval)); + Query dlsQuery; + + if (dlsRestriction.isUnrestricted()) { + dlsQuery = null; + } else { + QueryShardContext queryShardContext = this.indexService.newQueryShardContext(shardId.getId(), null, nowInMillis, null); + dlsQuery = new ConstantScoreQuery(dlsRestriction.toBooleanQueryBuilder(queryShardContext, null).build()); } - if (dlsEval != null) { - Set unparsedDlsQueries = queries.get(dlsEval); + DocumentAllowList documentAllowList = DocumentAllowList.get(threadContext); + + if (documentAllowList.isEntryForIndexPresent(index.getName())) { + // The documentAllowList is needed for two cases: + // - DLS rules which use "term lookup queries" and thus need to access indices for which no privileges are present + // - Dashboards multi tenancy which can redirect index accesses to indices for which no normal index privileges are present + + if (!dlsRestriction.isUnrestricted() && documentAllowList.isAllowed(index.getName(), "*")) { + dlsRestriction = DlsRestriction.NONE; + log.debug("Lifting DLS for {} due to present document allowlist", index.getName()); + dlsQuery = null; - if (unparsedDlsQueries != null && !unparsedDlsQueries.isEmpty()) { - QueryShardContext queryShardContext = this.indexService.newQueryShardContext(shardId.getId(), null, nowInMillis, null); - // no need for scoring here, so its possible to wrap this in a - // ConstantScoreQuery - dlsQuery = new ConstantScoreQuery(dlsQueryParser.parse(unparsedDlsQueries, queryShardContext).build()); + } + + if (!flsRule.isAllowAll() || !fmRule.isAllowAll()) { + log.debug("Lifting FLS/FM for {} due to present document allowlist", index.getName()); + flsRule = FieldPrivileges.FlsRule.ALLOW_ALL; + fmRule = FieldMasking.FieldMaskingRule.ALLOW_ALL; } } - if (maskedEval != null) { - maskedFields = new HashSet<>(); - maskedFields.addAll(maskedFieldsMap.get(maskedEval)); + if (log.isTraceEnabled()) { + log.trace( + "dlsFlsWrap(); index: {}; dlsRestriction: {}; flsRule: {}; fmRule: {}", + index.getName(), + dlsRestriction, + flsRule, + fmRule + ); } - } - return new DlsFlsFilterLeafReader.DlsFlsDirectoryReader( - reader, - flsFields, - dlsQuery, - indexService, - threadContext, - clusterService, - auditlog, - maskedFields, - shardId, - salt - ); + return new DlsFlsFilterLeafReader.DlsFlsDirectoryReader( + reader, + flsRule, + dlsQuery, + indexService, + threadContext, + clusterService, + auditlog, + fmRule, + shardId, + metaFields + ); + + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating DLS/FLS for {}", this.index.getName(), e); + throw new OpenSearchException("Error while evaluating DLS/FLS", e); + } } } diff --git a/src/main/java/org/opensearch/security/configuration/SystemIndexSearcherWrapper.java b/src/main/java/org/opensearch/security/configuration/SystemIndexSearcherWrapper.java index 8e89b60712..b87c92c356 100644 --- a/src/main/java/org/opensearch/security/configuration/SystemIndexSearcherWrapper.java +++ b/src/main/java/org/opensearch/security/configuration/SystemIndexSearcherWrapper.java @@ -40,9 +40,11 @@ import org.opensearch.core.index.Index; import org.opensearch.index.IndexService; import org.opensearch.indices.SystemIndexRegistry; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; import org.opensearch.security.privileges.PrivilegesEvaluator; +import org.opensearch.security.privileges.PrivilegesEvaluatorResponse; +import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.securityconf.ConfigModel; -import org.opensearch.security.securityconf.SecurityRoles; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.HeaderHelper; import org.opensearch.security.support.WildcardMatcher; @@ -165,10 +167,13 @@ protected final boolean isBlockedSystemIndexRequest() { // allow request without user from plugin. return systemIndexMatcher.test(index.getName()) || matchesSystemIndexRegisteredWithCore; } - final TransportAddress caller = threadContext.getTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS); - final Set mappedRoles = evaluator.mapRoles(user, caller); - final SecurityRoles securityRoles = evaluator.getSecurityRoles(mappedRoles); - return !securityRoles.isPermittedOnSystemIndex(index.getName()); + + String permission = ConfigConstants.SYSTEM_INDEX_PERMISSION; + PrivilegesEvaluationContext context = evaluator.createContext(user, permission); + PrivilegesEvaluatorResponse result = evaluator.getActionPrivileges() + .hasExplicitIndexPrivilege(context, Set.of(permission), IndexResolverReplacer.Resolved.ofIndex(index.getName())); + + return !result.isAllowed(); } return true; } diff --git a/src/main/java/org/opensearch/security/dlic/rest/api/RolesApiAction.java b/src/main/java/org/opensearch/security/dlic/rest/api/RolesApiAction.java index 10abd83f7b..3c9f58a522 100644 --- a/src/main/java/org/opensearch/security/dlic/rest/api/RolesApiAction.java +++ b/src/main/java/org/opensearch/security/dlic/rest/api/RolesApiAction.java @@ -29,14 +29,13 @@ import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestRequest.Method; -import org.opensearch.security.configuration.MaskedField; import org.opensearch.security.configuration.Salt; import org.opensearch.security.dlic.rest.validation.EndpointValidator; import org.opensearch.security.dlic.rest.validation.RequestContentValidator; import org.opensearch.security.dlic.rest.validation.RequestContentValidator.DataType; import org.opensearch.security.dlic.rest.validation.ValidationResult; +import org.opensearch.security.privileges.dlsfls.FieldMasking; import org.opensearch.security.securityconf.impl.CType; -import org.opensearch.security.support.ConfigConstants; import org.opensearch.threadpool.ThreadPool; import static org.opensearch.security.dlic.rest.api.RequestHandler.methodNotImplementedHandler; @@ -92,11 +91,7 @@ private ValidationResult validateMaskedFields(final JsonNode content) private Pair validateMaskedFieldSyntax(final JsonNode maskedFieldNode) { try { - new MaskedField( - maskedFieldNode.asText(), - SALT, - validationContext.settings().get(ConfigConstants.SECURITY_MASKED_FIELDS_ALGORITHM_DEFAULT) - ).isValid(); + new FieldMasking.FieldMaskingExpression(maskedFieldNode.asText()); } catch (Exception e) { return Pair.of(maskedFieldNode.asText(), e.getMessage()); } diff --git a/src/main/java/org/opensearch/security/filter/SecurityFilter.java b/src/main/java/org/opensearch/security/filter/SecurityFilter.java index f0ab7bb487..3323c9e38a 100644 --- a/src/main/java/org/opensearch/security/filter/SecurityFilter.java +++ b/src/main/java/org/opensearch/security/filter/SecurityFilter.java @@ -461,7 +461,7 @@ public void onFailure(Exception e) { ? String.format( "no permissions for %s and associated roles %s", pres.getMissingPrivileges(), - pres.getResolvedSecurityRoles() + context.getMappedRoles() ) : String.format("no permissions for %s and %s", pres.getMissingPrivileges(), user); } diff --git a/src/main/java/org/opensearch/security/filter/SecurityRestFilter.java b/src/main/java/org/opensearch/security/filter/SecurityRestFilter.java index b56f3e951d..c9d10ee2fa 100644 --- a/src/main/java/org/opensearch/security/filter/SecurityRestFilter.java +++ b/src/main/java/org/opensearch/security/filter/SecurityRestFilter.java @@ -27,12 +27,14 @@ package org.opensearch.security.filter; import java.nio.file.Path; +import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; import javax.net.ssl.SSLPeerUnverifiedException; +import com.google.common.collect.ImmutableSet; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -225,17 +227,12 @@ void authorizeRequest(RestHandler original, SecurityRequestChannel request, User if (routeSupportsRestAuthorization) { PrivilegesEvaluatorResponse pres = new PrivilegesEvaluatorResponse(); NamedRoute route = ((NamedRoute) handler.get()); - // if actionNames are present evaluate those first - Set actionNames = route.actionNames(); - if (actionNames != null && !actionNames.isEmpty()) { - pres = evaluator.evaluate(user, actionNames); - } - - // now if pres.allowed is still false check for the NamedRoute name as a permission - if (!pres.isAllowed()) { - String action = route.name(); - pres = evaluator.evaluate(user, Set.of(action)); - } + // Check both route.actionNames() and route.name(). The presence of either is sufficient. + Set actionNames = ImmutableSet.builder() + .addAll(route.actionNames() != null ? route.actionNames() : Collections.emptySet()) + .add(route.name()) + .build(); + pres = evaluator.evaluate(user, route.name(), actionNames); if (log.isDebugEnabled()) { log.debug(pres.toString()); diff --git a/src/main/java/org/opensearch/security/privileges/ActionPrivileges.java b/src/main/java/org/opensearch/security/privileges/ActionPrivileges.java new file mode 100644 index 0000000000..87ac32d090 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/ActionPrivileges.java @@ -0,0 +1,1141 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.privileges; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.DataStream; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.securityconf.FlattenedActionGroups; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.WildcardMatcher; + +import com.selectivem.collections.CheckTable; +import com.selectivem.collections.CompactMapGroupBuilder; +import com.selectivem.collections.DeduplicatingCompactSubSetBuilder; +import com.selectivem.collections.ImmutableCompactSubSet; + +/** + * This class converts role configuration into pre-computed, optimized data structures for checking privileges. + *

+ * With the exception of the statefulIndex property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role and action group configuration. If the role or + * action group configuration is changed, a new instance needs to be built. + */ +public class ActionPrivileges extends ClusterStateMetadataDependentPrivileges { + + /** + * This setting controls the allowed heap size of the precomputed index privileges (in the inner class StatefulIndexPrivileges). + * If the size of the indices exceed the amount of bytes configured here, it will be truncated. Privileges evaluation will + * continue to work correctly, but it will be slower. + *

+ * This settings defaults to 10 MB. This is a generous limit. Experiments have shown that an example setup with + * 10,000 indices and 1,000 roles requires about 1 MB of heap. 100,000 indices and 100 roles require about 9 MB of heap. + * (Of course, these numbers can vary widely based on the actual role configuration). + */ + public static Setting PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE = Setting.memorySizeSetting( + "plugins.security.privileges_evaluation.precomputed_privileges.max_heap_size", + new ByteSizeValue(10, ByteSizeUnit.MB), + Setting.Property.NodeScope + ); + + private static final Logger log = LogManager.getLogger(ActionPrivileges.class); + + private final ClusterPrivileges cluster; + private final IndexPrivileges index; + private final SecurityDynamicConfiguration roles; + private final FlattenedActionGroups actionGroups; + private final ImmutableSet wellKnownClusterActions; + private final ImmutableSet wellKnownIndexActions; + private final Supplier> indexMetadataSupplier; + private final ByteSizeValue statefulIndexMaxHeapSize; + + private final AtomicReference statefulIndex = new AtomicReference<>(); + + public ActionPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + Supplier> indexMetadataSupplier, + Settings settings, + ImmutableSet wellKnownClusterActions, + ImmutableSet wellKnownIndexActions, + ImmutableSet explicitlyRequiredIndexActions + ) { + this.cluster = new ClusterPrivileges(roles, actionGroups, wellKnownClusterActions); + this.index = new IndexPrivileges(roles, actionGroups, wellKnownIndexActions, explicitlyRequiredIndexActions); + this.roles = roles; + this.actionGroups = actionGroups; + this.wellKnownClusterActions = wellKnownClusterActions; + this.wellKnownIndexActions = wellKnownIndexActions; + this.indexMetadataSupplier = indexMetadataSupplier; + this.statefulIndexMaxHeapSize = PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE.get(settings); + } + + public ActionPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + Supplier> indexMetadataSupplier, + Settings settings + ) { + this( + roles, + actionGroups, + indexMetadataSupplier, + settings, + WellKnownActions.CLUSTER_ACTIONS, + WellKnownActions.INDEX_ACTIONS, + WellKnownActions.EXPLICITLY_REQUIRED_INDEX_ACTIONS + ); + } + + public PrivilegesEvaluatorResponse hasClusterPrivilege(PrivilegesEvaluationContext context, String action) { + return cluster.providesPrivilege(context, action, context.getMappedRoles()); + } + + public PrivilegesEvaluatorResponse hasAnyClusterPrivilege(PrivilegesEvaluationContext context, Set actions) { + return cluster.providesAnyPrivilege(context, actions, context.getMappedRoles()); + } + + /** + * Checks whether this instance provides explicit privileges for the combination of the provided action and the + * provided roles. + *

+ * Explicit means here that the privilege is not granted via a "*" action privilege wildcard. Other patterns + * are possible. See also: https://github.com/opensearch-project/security/pull/2411 and https://github.com/opensearch-project/security/issues/3038 + *

+ * Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + * Otherwise, allowed will be false and missingPrivileges will contain the name of the given action. + */ + public PrivilegesEvaluatorResponse hasExplicitClusterPrivilege(PrivilegesEvaluationContext context, String action) { + return cluster.providesExplicitPrivilege(context, action, context.getMappedRoles()); + } + + /** + * Checks whether this instance provides privileges for the combination of the provided action, + * the provided indices and the provided roles. + *

+ * Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + *

+ * If privileges are only available for a sub-set of indices, isPartiallyOk() will return true + * and the indices for which privileges are available are returned by getAvailableIndices(). This allows the + * do_not_fail_on_forbidden behaviour. + */ + public PrivilegesEvaluatorResponse hasIndexPrivilege( + PrivilegesEvaluationContext context, + Set actions, + IndexResolverReplacer.Resolved resolvedIndices + ) { + PrivilegesEvaluatorResponse response = this.index.providesWildcardPrivilege(context, actions); + if (response != null) { + return response; + } + + if (!resolvedIndices.isLocalAll() && resolvedIndices.getAllIndices().isEmpty()) { + // This is necessary for requests which operate on remote indices. + // Access control for the remote indices will be performed on the remote cluster. + log.debug("No local indices; grant the request"); + return PrivilegesEvaluatorResponse.ok(); + } + + // TODO one might want to consider to create a semantic wrapper for action in order to be better tell apart + // what's the action and what's the index in the generic parameters of CheckTable. + CheckTable checkTable = CheckTable.create( + resolvedIndices.getAllIndicesResolved(context.getClusterStateSupplier(), context.getIndexNameExpressionResolver()), + actions + ); + + StatefulIndexPrivileges statefulIndex = this.statefulIndex.get(); + PrivilegesEvaluatorResponse resultFromStatefulIndex = null; + + Map indexMetadata = this.indexMetadataSupplier.get(); + + if (statefulIndex != null) { + resultFromStatefulIndex = statefulIndex.providesPrivilege(actions, resolvedIndices, context, checkTable, indexMetadata); + + if (resultFromStatefulIndex != null) { + // If we get a result from statefulIndex, we are done. + return resultFromStatefulIndex; + } + + // Otherwise, we need to carry on checking privileges using the non-stateful object. + // Note: statefulIndex.hasPermission() modifies as a side effect the checkTable. + // We can carry on using this as an intermediate result and further complete checkTable below. + } + + return this.index.providesPrivilege(context, actions, resolvedIndices, checkTable, indexMetadata); + } + + /** + * Checks whether this instance provides explicit privileges for the combination of the provided action, + * the provided indices and the provided roles. + *

+ * Explicit means here that the privilege is not granted via a "*" action privilege wildcard. Other patterns + * are possible. See also: https://github.com/opensearch-project/security/pull/2411 and https://github.com/opensearch-project/security/issues/3038 + */ + public PrivilegesEvaluatorResponse hasExplicitIndexPrivilege( + PrivilegesEvaluationContext context, + Set actions, + IndexResolverReplacer.Resolved resolvedIndices + ) { + CheckTable checkTable = CheckTable.create(resolvedIndices.getAllIndices(), actions); + return this.index.providesExplicitPrivilege(context, actions, resolvedIndices, checkTable, this.indexMetadataSupplier.get()); + } + + /** + * Updates the stateful index configuration with the given indices. Should be normally only called by + * updateStatefulIndexPrivilegesAsync(). Package visible for testing. + */ + void updateStatefulIndexPrivileges(Map indices, long metadataVersion) { + StatefulIndexPrivileges statefulIndex = this.statefulIndex.get(); + + indices = StatefulIndexPrivileges.relevantOnly(indices); + + if (statefulIndex == null || !statefulIndex.indices.equals(indices)) { + long start = System.currentTimeMillis(); + this.statefulIndex.set( + new StatefulIndexPrivileges(roles, actionGroups, wellKnownIndexActions, indices, metadataVersion, statefulIndexMaxHeapSize) + ); + long duration = System.currentTimeMillis() - start; + log.debug("Updating StatefulIndexPrivileges took {} ms", duration); + } else { + synchronized (this) { + // Even if the indices did not change, update the metadataVersion in statefulIndex to reflect + // that the instance is up-to-date. + if (statefulIndex.metadataVersion < metadataVersion) { + statefulIndex.metadataVersion = metadataVersion; + } + } + } + } + + @Override + protected void updateClusterStateMetadata(Metadata metadata) { + this.updateStatefulIndexPrivileges(metadata.getIndicesLookup(), metadata.version()); + } + + @Override + protected long getCurrentlyUsedMetadataVersion() { + StatefulIndexPrivileges statefulIndex = this.statefulIndex.get(); + return statefulIndex != null ? statefulIndex.metadataVersion : 0; + } + + int getEstimatedStatefulIndexByteSize() { + StatefulIndexPrivileges statefulIndex = this.statefulIndex.get(); + + if (statefulIndex != null) { + return statefulIndex.estimatedByteSize; + } else { + return 0; + } + } + + /** + * Pre-computed, optimized cluster privilege maps. Instances of this class are immutable. + *

+ * The data structures in this class are optimized for answering the question + * "I have action A and roles [x,y,z]. Do I have authorization to execute the action?". + *

+ * The check will be possible in time O(1) for "well-known" actions when the user actually has the privileges. + */ + static class ClusterPrivileges { + + /** + * Maps names of actions to the roles that provide a privilege for the respective action. + * Note that the mapping is not comprehensive, additionally the data structures rolesWithWildcardPermissions + * and rolesToActionMatcher need to be considered for a full view of the privileges. + *

+ * This does not include privileges obtained via "*" action patterns. This is both meant as a + * optimization and to support explicit privileges. + */ + private final ImmutableMap> actionToRoles; + + /** + * This contains all role names that provide wildcard (*) privileges for cluster actions. + * This avoids a blow-up of the actionToRoles object by such roles. + */ + private final ImmutableSet rolesWithWildcardPermissions; + + /** + * This maps role names to a matcher which matches the action names this role provides privileges for. + * This is only used as a last resort if the test with actionToRole and rolesWithWildcardPermissions failed. + * This is only necessary for actions which are not contained in the list of "well-known" actions provided + * during construction. + * + * This does not include privileges obtained via "*" action patterns. This is both meant as a + * optimization and to support explicit privileges. + */ + private final ImmutableMap rolesToActionMatcher; + + private final ImmutableSet wellKnownClusterActions; + + /** + * Creates pre-computed cluster privileges based on the given parameters. + *

+ * This constructor will not throw an exception if it encounters any invalid configuration (that is, + * in particular, unparseable regular expressions). Rather, it will just log an error. This is okay, as it + * just results in fewer available privileges. However, having a proper error reporting mechanism would be + * kind of nice. + */ + ClusterPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + ImmutableSet wellKnownClusterActions + ) { + DeduplicatingCompactSubSetBuilder roleSetBuilder = new DeduplicatingCompactSubSetBuilder<>( + roles.getCEntries().keySet() + ); + Map> actionToRoles = new HashMap<>(); + ImmutableSet.Builder rolesWithWildcardPermissions = ImmutableSet.builder(); + ImmutableMap.Builder rolesToActionMatcher = ImmutableMap.builder(); + + for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + roleSetBuilder.next(roleName); + + ImmutableSet permissionPatterns = actionGroups.resolve(role.getCluster_permissions()); + + // This list collects all the matchers for action names that will be found for the current role + List wildcardMatchers = new ArrayList<>(); + + for (String permission : permissionPatterns) { + // If we have a permission which does not use any pattern, we just simply add it to the + // "actionToRoles" map. + // Otherwise, we match the pattern against the provided well-known cluster actions and add + // these to the "actionToRoles" map. Additionally, for the case that the well-known cluster + // actions are not complete, we also collect the matcher to be used as a last resort later. + + if (WildcardMatcher.isExact(permission)) { + actionToRoles.computeIfAbsent(permission, k -> roleSetBuilder.createSubSetBuilder()).add(roleName); + } else if (permission.equals("*")) { + // Special case: Roles with a wildcard "*" giving privileges for all actions. We will not resolve + // this stuff, but just note separately that this role just gets all the cluster privileges. + rolesWithWildcardPermissions.add(roleName); + } else { + WildcardMatcher wildcardMatcher = WildcardMatcher.from(permission); + Set matchedActions = wildcardMatcher.getMatchAny( + wellKnownClusterActions, + Collectors.toUnmodifiableSet() + ); + + for (String action : matchedActions) { + actionToRoles.computeIfAbsent(action, k -> roleSetBuilder.createSubSetBuilder()).add(roleName); + } + + wildcardMatchers.add(wildcardMatcher); + } + } + + if (!wildcardMatchers.isEmpty()) { + rolesToActionMatcher.put(roleName, WildcardMatcher.from(wildcardMatchers)); + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry.getKey(), e); + } + } + + DeduplicatingCompactSubSetBuilder.Completed completedRoleSetBuilder = roleSetBuilder.build(); + + this.actionToRoles = actionToRoles.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().build(completedRoleSetBuilder))); + this.rolesWithWildcardPermissions = rolesWithWildcardPermissions.build(); + this.rolesToActionMatcher = rolesToActionMatcher.build(); + this.wellKnownClusterActions = wellKnownClusterActions; + } + + /** + * Checks whether this instance provides privileges for the combination of the provided action and the + * provided roles. Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + * Otherwise, allowed will be false and missingPrivileges will contain the name of the given action. + */ + PrivilegesEvaluatorResponse providesPrivilege(PrivilegesEvaluationContext context, String action, Set roles) { + + // 1: Check roles with wildcards + if (CollectionUtils.containsAny(roles, this.rolesWithWildcardPermissions)) { + return PrivilegesEvaluatorResponse.ok(); + } + + // 2: Check well-known actions - this should cover most cases + ImmutableCompactSubSet rolesWithPrivileges = this.actionToRoles.get(action); + + if (rolesWithPrivileges != null && rolesWithPrivileges.containsAny(roles)) { + return PrivilegesEvaluatorResponse.ok(); + } + + // 3: Only if everything else fails: Check the matchers in case we have a non-well-known action + if (!this.wellKnownClusterActions.contains(action)) { + for (String role : roles) { + WildcardMatcher matcher = this.rolesToActionMatcher.get(role); + + if (matcher != null && matcher.test(action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + } + + return PrivilegesEvaluatorResponse.insufficient(action); + } + + /** + * Checks whether this instance provides explicit privileges for the combination of the provided action and the + * provided roles. + *

+ * Explicit means here that the privilege is not granted via a "*" action privilege wildcard. Other patterns + * are possible. See also: https://github.com/opensearch-project/security/pull/2411 and https://github.com/opensearch-project/security/issues/3038 + *

+ * Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + * Otherwise, allowed will be false and missingPrivileges will contain the name of the given action. + */ + PrivilegesEvaluatorResponse providesExplicitPrivilege(PrivilegesEvaluationContext context, String action, Set roles) { + + // 1: Check well-known actions - this should cover most cases + ImmutableCompactSubSet rolesWithPrivileges = this.actionToRoles.get(action); + + if (rolesWithPrivileges != null && rolesWithPrivileges.containsAny(roles)) { + return PrivilegesEvaluatorResponse.ok(); + } + + // 2: Only if everything else fails: Check the matchers in case we have a non-well-known action + if (!this.wellKnownClusterActions.contains(action)) { + for (String role : roles) { + WildcardMatcher matcher = this.rolesToActionMatcher.get(role); + + if (matcher != null && matcher.test(action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + } + + return PrivilegesEvaluatorResponse.insufficient(action); + } + + /** + * Checks whether this instance provides privileges for the combination of any of the provided actions and the + * provided roles. Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + * Otherwise, allowed will be false and missingPrivileges will contain the name of the given action. + */ + PrivilegesEvaluatorResponse providesAnyPrivilege(PrivilegesEvaluationContext context, Set actions, Set roles) { + // 1: Check roles with wildcards + if (CollectionUtils.containsAny(roles, this.rolesWithWildcardPermissions)) { + return PrivilegesEvaluatorResponse.ok(); + } + + // 2: Check well-known actions - this should cover most cases + for (String action : actions) { + ImmutableCompactSubSet rolesWithPrivileges = this.actionToRoles.get(action); + + if (rolesWithPrivileges != null && rolesWithPrivileges.containsAny(roles)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + + // 3: Only if everything else fails: Check the matchers in case we have a non-well-known action + for (String action : actions) { + if (!this.wellKnownClusterActions.contains(action)) { + for (String role : roles) { + WildcardMatcher matcher = this.rolesToActionMatcher.get(role); + + if (matcher != null && matcher.test(action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + } + } + + if (actions.size() == 1) { + return PrivilegesEvaluatorResponse.insufficient(actions.iterator().next()); + } else { + return PrivilegesEvaluatorResponse.insufficient("any of " + actions); + } + } + } + + /** + * Partially pre-computed, optimized index privilege maps. Instances of this class are immutable. + *

+ * This class is independent of the actual indices present in the cluster. See StatefulIndexPermissions for a class + * that also takes actual indices into account and is thus fully pre-computed. + *

+ * Purposes of this class: + *

+ * 1. Answer the question "given an action and a set of roles, do I have wildcard index privileges" in O(1) + *

+ * 2. Pre-compute the data structures as far as possible in cases that StatefulIndexPermissions cannot check the + * permissions. This is the case when: + *

+ * a) StatefulIndexPermissions does not cover all indices + * b) The requested index does not exist (especially the case for create index actions) + * c) The index patterns use placeholders like "${user.name}" - these can be only resolved when the User object is present. + * d) The action is not among the "well known" actions. + */ + static class IndexPrivileges { + /** + * Maps role names to concrete action names to IndexPattern objects which define the indices the privileges apply to. + */ + private final ImmutableMap> rolesToActionToIndexPattern; + + /** + * Maps role names to action names matchers to IndexPattern objects which define the indices the privileges apply to. + * This is especially for "non-well-known" actions. + */ + private final ImmutableMap> rolesToActionPatternToIndexPattern; + + /** + * Maps action names to the roles which provide wildcard ("*") index privileges for the respective action. + * This allows to answer the question "given an action and a set of roles, do I have wildcard index privileges" + * in O(1) + */ + private final ImmutableMap> actionToRolesWithWildcardIndexPrivileges; + + /** + * A pre-defined set of action names that is used to pre-compute the result of action patterns. + */ + private final ImmutableSet wellKnownIndexActions; + + /** + * A pre-defined set of action names that is included in the rolesToExplicitActionToIndexPattern data structure + */ + private final ImmutableSet explicitlyRequiredIndexActions; + + /** + * Maps role names to concrete action names to IndexPattern objects which define the indices the privileges apply to. + * The action names are only explicitly granted privileges which are listed in explicitlyRequiredIndexActions. + *

+ * Compare https://github.com/opensearch-project/security/pull/2887 + */ + private final ImmutableMap> rolesToExplicitActionToIndexPattern; + + /** + * Creates pre-computed index privileges based on the given parameters. + *

+ * This constructor will not throw an exception if it encounters any invalid configuration (that is, + * in particular, unparseable regular expressions). Rather, it will just log an error. This is okay, as it + * just results in fewer available privileges. However, having a proper error reporting mechanism would be + * kind of nice. + */ + IndexPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + ImmutableSet wellKnownIndexActions, + ImmutableSet explicitlyRequiredIndexActions + ) { + DeduplicatingCompactSubSetBuilder roleSetBuilder = new DeduplicatingCompactSubSetBuilder<>( + roles.getCEntries().keySet() + ); + + Map> rolesToActionToIndexPattern = new HashMap<>(); + Map> rolesToActionPatternToIndexPattern = new HashMap<>(); + Map> actionToRolesWithWildcardIndexPrivileges = new HashMap<>(); + Map> rolesToExplicitActionToIndexPattern = new HashMap<>(); + + for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + roleSetBuilder.next(roleName); + + for (RoleV7.Index indexPermissions : role.getIndex_permissions()) { + ImmutableSet permissions = actionGroups.resolve(indexPermissions.getAllowed_actions()); + + for (String permission : permissions) { + // If we have a permission which does not use any pattern, we just simply add it to the + // "rolesToActionToIndexPattern" map. + // Otherwise, we match the pattern against the provided well-known index actions and add + // these to the "rolesToActionToIndexPattern" map. Additionally, for the case that the + // well-known index actions are not complete, we also collect the actionMatcher to be used + // as a last resort later. + + if (WildcardMatcher.isExact(permission)) { + rolesToActionToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(permission, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + + if (explicitlyRequiredIndexActions.contains(permission)) { + rolesToExplicitActionToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(permission, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + } + + if (indexPermissions.getIndex_patterns().contains("*")) { + actionToRolesWithWildcardIndexPrivileges.computeIfAbsent( + permission, + k -> roleSetBuilder.createSubSetBuilder() + ).add(roleName); + } + } else { + WildcardMatcher actionMatcher = WildcardMatcher.from(permission); + + for (String action : actionMatcher.iterateMatching(wellKnownIndexActions)) { + rolesToActionToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(action, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + + if (indexPermissions.getIndex_patterns().contains("*")) { + actionToRolesWithWildcardIndexPrivileges.computeIfAbsent( + permission, + k -> roleSetBuilder.createSubSetBuilder() + ).add(roleName); + } + } + + rolesToActionPatternToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(actionMatcher, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + + if (actionMatcher != WildcardMatcher.ANY) { + for (String action : actionMatcher.iterateMatching(explicitlyRequiredIndexActions)) { + rolesToExplicitActionToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(action, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + } + } + } + } + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry.getKey(), e); + } + } + + DeduplicatingCompactSubSetBuilder.Completed completedRoleSetBuilder = roleSetBuilder.build(); + + this.rolesToActionToIndexPattern = rolesToActionToIndexPattern.entrySet() + .stream() + .collect( + ImmutableMap.toImmutableMap( + Map.Entry::getKey, + entry -> entry.getValue() + .entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry2 -> entry2.getValue().build())) + ) + ); + + this.rolesToActionPatternToIndexPattern = rolesToActionPatternToIndexPattern.entrySet() + .stream() + .collect( + ImmutableMap.toImmutableMap( + Map.Entry::getKey, + entry -> entry.getValue() + .entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry2 -> entry2.getValue().build())) + ) + ); + + this.actionToRolesWithWildcardIndexPrivileges = actionToRolesWithWildcardIndexPrivileges.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().build(completedRoleSetBuilder))); + + this.rolesToExplicitActionToIndexPattern = rolesToExplicitActionToIndexPattern.entrySet() + .stream() + .collect( + ImmutableMap.toImmutableMap( + Map.Entry::getKey, + entry -> entry.getValue() + .entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry2 -> entry2.getValue().build())) + ) + ); + + this.wellKnownIndexActions = wellKnownIndexActions; + this.explicitlyRequiredIndexActions = explicitlyRequiredIndexActions; + } + + /** + * Checks whether this instance provides privileges for the combination of the provided action, + * the provided indices and the provided roles. + *

+ * Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + *

+ * If privileges are only available for a sub-set of indices, isPartiallyOk() will return true + * and the indices for which privileges are available are returned by getAvailableIndices(). This allows the + * do_not_fail_on_forbidden behaviour. + *

+ * This method will only verify privileges for the index/action combinations which are un-checked in + * the checkTable instance provided to this method. Checked index/action combinations are considered to be + * "already fulfilled by other means" - usually that comes from the stateful data structure. + * As a side-effect, this method will further mark the available index/action combinations in the provided + * checkTable instance as checked. + */ + PrivilegesEvaluatorResponse providesPrivilege( + PrivilegesEvaluationContext context, + Set actions, + IndexResolverReplacer.Resolved resolvedIndices, + CheckTable checkTable, + Map indexMetadata + ) { + List exceptions = new ArrayList<>(); + + for (String role : context.getMappedRoles()) { + ImmutableMap actionToIndexPattern = this.rolesToActionToIndexPattern.get(role); + + if (actionToIndexPattern != null) { + for (String action : actions) { + IndexPattern indexPattern = actionToIndexPattern.get(action); + + if (indexPattern != null) { + for (String index : checkTable.iterateUncheckedRows(action)) { + try { + if (indexPattern.matches(index, context, indexMetadata) && checkTable.check(index, action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } catch (PrivilegesEvaluationException e) { + // We can ignore these errors, as this max leads to fewer privileges than available + log.error("Error while evaluating index pattern of role {}. Ignoring entry", role, e); + exceptions.add(new PrivilegesEvaluationException("Error while evaluating role " + role, e)); + } + } + } + } + } + } + + // If all actions are well-known, the index.rolesToActionToIndexPattern data structure that was evaluated above, + // would have contained all the actions if privileges are provided. If there are non-well-known actions among the + // actions, we also have to evaluate action patterns to check the authorization + + boolean allActionsWellKnown = actions.stream().allMatch(a -> this.wellKnownIndexActions.contains(a)); + + if (!checkTable.isComplete() && !allActionsWellKnown) { + top: for (String role : context.getMappedRoles()) { + ImmutableMap actionPatternToIndexPattern = this.rolesToActionPatternToIndexPattern.get( + role + ); + + if (actionPatternToIndexPattern != null) { + for (String action : actions) { + if (this.wellKnownIndexActions.contains(action)) { + continue; + } + + for (Map.Entry entry : actionPatternToIndexPattern.entrySet()) { + WildcardMatcher actionMatcher = entry.getKey(); + IndexPattern indexPattern = entry.getValue(); + + if (actionMatcher.test(action)) { + for (String index : checkTable.iterateUncheckedRows(action)) { + try { + if (indexPattern.matches(index, context, indexMetadata) && checkTable.check(index, action)) { + break top; + } + } catch (PrivilegesEvaluationException e) { + // We can ignore these errors, as this max leads to fewer privileges than available + log.error("Error while evaluating index pattern of role {}. Ignoring entry", role, e); + exceptions.add(new PrivilegesEvaluationException("Error while evaluating role " + role, e)); + } + } + } + } + } + } + } + } + + if (checkTable.isComplete()) { + return PrivilegesEvaluatorResponse.ok(); + } + + Set availableIndices = checkTable.getCompleteRows(); + + if (!availableIndices.isEmpty()) { + return PrivilegesEvaluatorResponse.partiallyOk(availableIndices, checkTable).evaluationExceptions(exceptions); + } + + return PrivilegesEvaluatorResponse.insufficient(checkTable) + .reason( + resolvedIndices.getAllIndices().size() == 1 + ? "Insufficient permissions for the referenced index" + : "None of " + resolvedIndices.getAllIndices().size() + " referenced indices has sufficient permissions" + ) + .evaluationExceptions(exceptions); + } + + /** + * Returns PrivilegesEvaluatorResponse.ok() if the user identified in the context object has privileges for all + * indices (using *) for the given actions. Returns null otherwise. Then, further checks must be done to check + * the user's privileges. + */ + PrivilegesEvaluatorResponse providesWildcardPrivilege(PrivilegesEvaluationContext context, Set actions) { + ImmutableSet effectiveRoles = context.getMappedRoles(); + + for (String action : actions) { + ImmutableCompactSubSet rolesWithWildcardIndexPrivileges = this.actionToRolesWithWildcardIndexPrivileges.get(action); + + if (rolesWithWildcardIndexPrivileges == null || !rolesWithWildcardIndexPrivileges.containsAny(effectiveRoles)) { + return null; + } + } + + return PrivilegesEvaluatorResponse.ok(); + } + + /** + * Checks whether this instance provides explicit privileges for the combination of the provided action, + * the provided indices and the provided roles. + *

+ * Explicit means here that the privilege is not granted via a "*" action privilege wildcard. Other patterns + * are possible. See also: https://github.com/opensearch-project/security/pull/2411 and https://github.com/opensearch-project/security/issues/3038 + */ + PrivilegesEvaluatorResponse providesExplicitPrivilege( + PrivilegesEvaluationContext context, + Set actions, + IndexResolverReplacer.Resolved resolvedIndices, + CheckTable checkTable, + Map indexMetadata + ) { + List exceptions = new ArrayList<>(); + + if (!CollectionUtils.containsAny(actions, this.explicitlyRequiredIndexActions)) { + return PrivilegesEvaluatorResponse.insufficient(CheckTable.create(ImmutableSet.of("_"), actions)); + } + + for (String role : context.getMappedRoles()) { + ImmutableMap actionToIndexPattern = this.rolesToExplicitActionToIndexPattern.get(role); + + if (actionToIndexPattern != null) { + for (String action : actions) { + IndexPattern indexPattern = actionToIndexPattern.get(action); + + if (indexPattern != null) { + for (String index : checkTable.iterateUncheckedRows(action)) { + try { + if (indexPattern.matches(index, context, indexMetadata) && checkTable.check(index, action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } catch (PrivilegesEvaluationException e) { + // We can ignore these errors, as this max leads to fewer privileges than available + log.error("Error while evaluating index pattern of role {}. Ignoring entry", role, e); + exceptions.add(new PrivilegesEvaluationException("Error while evaluating role " + role, e)); + } + } + } + } + } + } + + return PrivilegesEvaluatorResponse.insufficient(checkTable) + .reason("No explicit privileges have been provided for the referenced indices.") + .evaluationExceptions(exceptions); + } + } + + /** + * Fully pre-computed, optimized index privilege maps. + *

+ * The data structures in this class are optimized to answer the question "given an action, an index and a set of + * roles, do I have the respective privilege" in O(1). + *

+ * There are cases where this class will not be able to answer this question. These cases are the following: + * - The requested index does not exist (especially the case for create index actions) + * - The action is not well-known. + * - The indices used for pre-computing the data structures are not complete (possibly due to race conditions) + * - The role definition uses placeholders (like "${user.name}") in index patterns. + * - The role definition grants privileges to all indices (via "*") (these are omitted here for efficiency reasons). + * In such cases, the question needs to be answered by IndexPermissions (see above). + *

+ * This class also takes into account aliases and data streams. If a permission is granted on an alias, it will be + * automatically inherited by the indices it points to. The same holds for the backing indices of a data stream. + */ + static class StatefulIndexPrivileges { + + /** + * Maps concrete action names to concrete index names and then to the roles which provide privileges for the + * combination of action and index. This map can contain besides indices also names of data streams and aliases. + * For aliases and data streams, it will then contain both the actual alias/data stream and the backing indices. + */ + private final Map>> actionToIndexToRoles; + + /** + * The index information that was used to construct this instance. + */ + private final Map indices; + + /** + * The well known index actions that were used to construct this instance. + */ + private final ImmutableSet wellKnownIndexActions; + + private final int estimatedByteSize; + + private long metadataVersion; + + /** + * Creates pre-computed index privileges based on the given parameters. + *

+ * This constructor will not throw an exception if it encounters any invalid configuration (that is, + * in particular, unparseable regular expressions). Rather, it will just log an error. This is okay, as it + * just results in fewer available privileges. + */ + StatefulIndexPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + ImmutableSet wellKnownIndexActions, + Map indices, + long metadataVersion, + ByteSizeValue statefulIndexMaxHeapSize + ) { + Map< + String, + CompactMapGroupBuilder.MapBuilder>> actionToIndexToRoles = + new HashMap<>(); + DeduplicatingCompactSubSetBuilder roleSetBuilder = new DeduplicatingCompactSubSetBuilder<>( + roles.getCEntries().keySet() + ); + CompactMapGroupBuilder> indexMapBuilder = + new CompactMapGroupBuilder<>(indices.keySet(), (k2) -> roleSetBuilder.createSubSetBuilder()); + + // We iterate here through the present RoleV7 instances and nested through their "index_permissions" sections. + // During the loop, the actionToIndexToRoles map is being built. + // For that, action patterns from the role will be matched against the "well-known actions" to build + // a concrete action map and index patterns from the role will be matched against the present indices + // to build a concrete index map. + // + // The complexity of this loop is O(n*m) where n is dependent on the structure of the roles configuration + // and m is the number of matched indices. This formula does not take the loop through matchedActions in + // account, as this is bound by a constant number and thus does not need to be considered in the O() notation. + + top: for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + roleSetBuilder.next(roleName); + + for (RoleV7.Index indexPermissions : role.getIndex_permissions()) { + ImmutableSet permissions = actionGroups.resolve(indexPermissions.getAllowed_actions()); + + if (indexPermissions.getIndex_patterns().contains("*")) { + // Wildcard index patterns are handled in the static IndexPermissions object. + // This avoids having to build huge data structures - when a very easy shortcut is available. + continue; + } + + WildcardMatcher indexMatcher = IndexPattern.from(indexPermissions.getIndex_patterns()).getStaticPattern(); + + if (indexMatcher == WildcardMatcher.NONE) { + // The pattern is likely blank because there are only templated patterns. + // Index patterns with templates are not handled here, but in the static IndexPermissions object + continue; + } + + for (String permission : permissions) { + WildcardMatcher actionMatcher = WildcardMatcher.from(permission); + Collection matchedActions = actionMatcher.getMatchAny(wellKnownIndexActions, Collectors.toList()); + + for (Map.Entry indicesEntry : indexMatcher.iterateMatching( + indices.entrySet(), + Map.Entry::getKey + )) { + for (String action : matchedActions) { + CompactMapGroupBuilder.MapBuilder< + String, + DeduplicatingCompactSubSetBuilder.SubSetBuilder> indexToRoles = actionToIndexToRoles + .computeIfAbsent(action, k -> indexMapBuilder.createMapBuilder()); + + indexToRoles.get(indicesEntry.getKey()).add(roleName); + + if (indicesEntry.getValue() instanceof IndexAbstraction.Alias) { + // For aliases we additionally add the sub-indices to the privilege map + for (IndexMetadata subIndex : indicesEntry.getValue().getIndices()) { + indexToRoles.get(subIndex.getIndex().getName()).add(roleName); + } + } + + if (roleSetBuilder.getEstimatedByteSize() + indexMapBuilder + .getEstimatedByteSize() > statefulIndexMaxHeapSize.getBytes()) { + log.info( + "Size of precomputed index privileges exceeds configured limit ({}). Using capped data structure." + + "This might lead to slightly lower performance during privilege evaluation. Consider raising {}.", + statefulIndexMaxHeapSize, + PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE.getKey() + ); + break top; + } + } + } + } + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry.getKey(), e); + } + } + + DeduplicatingCompactSubSetBuilder.Completed completedRoleSetBuilder = roleSetBuilder.build(); + + this.estimatedByteSize = roleSetBuilder.getEstimatedByteSize() + indexMapBuilder.getEstimatedByteSize(); + log.debug("Estimated size of StatefulIndexPermissions data structure: {}", this.estimatedByteSize); + + this.actionToIndexToRoles = actionToIndexToRoles.entrySet() + .stream() + .collect( + ImmutableMap.toImmutableMap( + Map.Entry::getKey, + entry -> entry.getValue().build(subSetBuilder -> subSetBuilder.build(completedRoleSetBuilder)) + ) + ); + + this.indices = ImmutableMap.copyOf(indices); + this.metadataVersion = metadataVersion; + this.wellKnownIndexActions = wellKnownIndexActions; + } + + /** + * Checks whether the user has privileges based on the given parameters and information in this class. This method + * has two major channels for returning results: + *

+ * 1. The return value is either PrivilegesEvaluatorResponse.ok() or null. If it is null, this method cannot + * completely tell whether the user has full privileges. A further check with IndexPermissions will be necessary. + * If PrivilegesEvaluatorResponse.ok() is returned, then full privileges could be already determined. + *

+ * 2. As a side effect, this method will modify the supplied CheckTable object. This will be the case regardless + * of whether null or PrivilegesEvaluatorResponse.ok() is returned. The interesting case is actually when null + * is returned, because then the remaining logic needs only to check for the unchecked cases. + * + * @param actions the actions the user needs to have privileges for + * @param resolvedIndices the index the user needs to have privileges for + * @param context context information like user, resolved roles, etc. + * @param checkTable An action/index matrix. This method will modify the table as a side effect and check the cells where privileges are present. + * @return PrivilegesEvaluatorResponse.ok() or null. + */ + PrivilegesEvaluatorResponse providesPrivilege( + Set actions, + IndexResolverReplacer.Resolved resolvedIndices, + PrivilegesEvaluationContext context, + CheckTable checkTable, + Map indexMetadata + ) { + ImmutableSet effectiveRoles = context.getMappedRoles(); + + for (String action : actions) { + Map> indexToRoles = actionToIndexToRoles.get(action); + + if (indexToRoles != null) { + for (String index : resolvedIndices.getAllIndices()) { + String lookupIndex = index; + + if (index.startsWith(DataStream.BACKING_INDEX_PREFIX)) { + // If we have a backing index of a data stream, we will not try to test + // the backing index here, as we filter backing indices during initialization. + // Instead, we look up the containing data stream and check whether this has privileges. + lookupIndex = backingIndexToDataStream(index, indexMetadata); + } + + ImmutableCompactSubSet rolesWithPrivileges = indexToRoles.get(lookupIndex); + + if (rolesWithPrivileges != null && rolesWithPrivileges.containsAny(effectiveRoles)) { + if (checkTable.check(index, action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + } + } + } + + // If we reached this point, we cannot tell whether the user has privileges using this instance. + // Return null to indicate that there is no answer. + // The checkTable object might contain already a partial result. + return null; + } + + /** + * If the given index is the backing index of a data stream, the name of the data stream is returned. + * Otherwise, the name of the index itself is being returned. + */ + static String backingIndexToDataStream(String index, Map indexMetadata) { + IndexAbstraction indexAbstraction = indexMetadata.get(index); + + if (indexAbstraction instanceof IndexAbstraction.Index && indexAbstraction.getParentDataStream() != null) { + return indexAbstraction.getParentDataStream().getName(); + } else { + return index; + } + } + + /** + * Filters the given index abstraction map to only contain entries that are relevant the for stateful class. + * This has the goal to keep the heap footprint of instances of StatefulIndexPrivileges at a reasonable size. + *

+ * This removes the following entries: + *

    + *
  • closed indices - closed indices do not need any fast privilege evaluation + *
  • backing indices of data streams - privileges should be only assigned directly to the data streams. + * the privilege evaluation code is able to recognize that an index is member of a data stream and test + * its privilege via that data stream. If a privilege is directly assigned to a backing index, we use + * the "slowish" code paths. + *
  • Indices which are not matched by includeIndices + *
+ */ + static Map relevantOnly(Map indices) { + // First pass: Check if we need to filter at all + boolean doFilter = false; + + for (IndexAbstraction indexAbstraction : indices.values()) { + if (indexAbstraction instanceof IndexAbstraction.Index) { + if (indexAbstraction.getParentDataStream() != null + || indexAbstraction.getWriteIndex().getState() == IndexMetadata.State.CLOSE) { + doFilter = true; + break; + } + } + } + + if (!doFilter) { + return indices; + } + + // Second pass: Only if we actually need filtering, we will do it + ImmutableMap.Builder builder = ImmutableMap.builder(); + + for (IndexAbstraction indexAbstraction : indices.values()) { + if (indexAbstraction instanceof IndexAbstraction.Index) { + if (indexAbstraction.getParentDataStream() == null + && indexAbstraction.getWriteIndex().getState() != IndexMetadata.State.CLOSE) { + builder.put(indexAbstraction.getName(), indexAbstraction); + } + } else { + builder.put(indexAbstraction.getName(), indexAbstraction); + } + } + + return builder.build(); + } + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivileges.java b/src/main/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivileges.java new file mode 100644 index 0000000000..282e2e6bb6 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivileges.java @@ -0,0 +1,98 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.concurrent.Future; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.threadpool.ThreadPool; + +/** + * Abstract super class for classes which need metadata updates from the cluster state. This class implements + * asynchronous updates - that means that any subclass needs to be prepared for not having the most up to date + * cluster state. + */ +public abstract class ClusterStateMetadataDependentPrivileges { + + private static final Logger log = LogManager.getLogger(ClusterStateMetadataDependentPrivileges.class); + private Future updateFuture; + + /** + * Updates the stateful index configuration asynchronously with the index metadata from the current cluster state. + * As the update process can take some seconds for clusters with many indices, this method "de-bounces" the updates, + * i.e., a further update will be only initiated after the previous update has finished. This is okay as this class + * can handle the case that it do not have the most recent information. It will fall back to slower methods then. + */ + public synchronized void updateClusterStateMetadataAsync(ClusterService clusterService, ThreadPool threadPool) { + long currentMetadataVersion = clusterService.state().metadata().version(); + + if (currentMetadataVersion <= getCurrentlyUsedMetadataVersion()) { + return; + } + + if (this.updateFuture == null || this.updateFuture.isDone()) { + this.updateFuture = threadPool.generic().submit(() -> { + for (int i = 0;; i++) { + if (i > 5) { + try { + // In case we got many consecutive updates, let's sleep a little to let + // other operations catch up. + Thread.sleep(100); + } catch (InterruptedException e) { + return; + } + } + + Metadata metadata = clusterService.state().metadata(); + + synchronized (ClusterStateMetadataDependentPrivileges.this) { + if (metadata.version() <= ClusterStateMetadataDependentPrivileges.this.getCurrentlyUsedMetadataVersion()) { + return; + } + } + + try { + log.debug("Updating {} with metadata version {}", this, metadata.version()); + updateClusterStateMetadata(metadata); + } catch (Exception e) { + log.error("Error while updating {}", this, e); + } finally { + synchronized (ClusterStateMetadataDependentPrivileges.this) { + if (ClusterStateMetadataDependentPrivileges.this.updateFuture.isCancelled()) { + // This can happen if this instance got obsolete due to a config update + // or if the node is shutting down + return; + } + } + } + } + }); + } + } + + /** + * Stops any concurrent update tasks to let the node gracefully shut down. + */ + public synchronized void shutdown() { + if (this.updateFuture != null && !this.updateFuture.isDone()) { + this.updateFuture.cancel(true); + } + } + + protected abstract void updateClusterStateMetadata(Metadata metadata); + + protected abstract long getCurrentlyUsedMetadataVersion(); + +} diff --git a/src/main/java/org/opensearch/security/privileges/DocumentAllowList.java b/src/main/java/org/opensearch/security/privileges/DocumentAllowList.java index 129233a007..6e41857737 100644 --- a/src/main/java/org/opensearch/security/privileges/DocumentAllowList.java +++ b/src/main/java/org/opensearch/security/privileges/DocumentAllowList.java @@ -14,6 +14,9 @@ import java.util.HashSet; import java.util.Set; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.security.support.ConfigConstants; @@ -25,12 +28,29 @@ */ public class DocumentAllowList { - private final Set entries = new HashSet<>(); + private static final Logger log = LogManager.getLogger(DocumentAllowList.class); - public DocumentAllowList() { + public static DocumentAllowList get(ThreadContext threadContext) { + String header = threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_DOC_ALLOWLIST_HEADER); + if (header == null) { + return EMPTY; + } else { + try { + return parse(header); + } catch (Exception e) { + log.error("Error while handling document allow list: {}", header, e); + return EMPTY; + } + } } + private static final DocumentAllowList EMPTY = new DocumentAllowList(); + + private final Set entries = new HashSet<>(); + + public DocumentAllowList() {} + public void add(String index, String id) { this.add(new Entry(index, id)); } @@ -59,6 +79,16 @@ public boolean isAllowed(String index, String id) { return false; } + public boolean isEntryForIndexPresent(String index) { + for (Entry entry : entries) { + if (entry.index.equals(index)) { + return true; + } + } + + return false; + } + public String toString() { if (this.entries.isEmpty()) { return ""; diff --git a/src/main/java/org/opensearch/security/privileges/ExpressionEvaluationException.java b/src/main/java/org/opensearch/security/privileges/ExpressionEvaluationException.java new file mode 100644 index 0000000000..50e933246b --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/ExpressionEvaluationException.java @@ -0,0 +1,21 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +/** + * This exception indicates that an expression - such as a regular expression - could not be properly evaluated during + * privilege evaluation. + */ +public class ExpressionEvaluationException extends Exception { + public ExpressionEvaluationException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/IndexPattern.java b/src/main/java/org/opensearch/security/privileges/IndexPattern.java new file mode 100644 index 0000000000..5b73904d14 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/IndexPattern.java @@ -0,0 +1,258 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import com.google.common.collect.ImmutableList; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.security.support.WildcardMatcher; + +/** + * Aggregates index patterns defined in roles and segments them into patterns using template expressions ("index_${user.name}"), + * patterns using date math and plain patterns. This segmentation is needed because only plain patterns can be used + * to pre-compute privilege maps. The other types of patterns need to be evaluated "live" during the actual request. + */ +public class IndexPattern { + private static final Logger log = LogManager.getLogger(IndexPattern.class); + + /** + * An IndexPattern which does not match any index. + */ + public static final IndexPattern EMPTY = new IndexPattern(WildcardMatcher.NONE, ImmutableList.of(), ImmutableList.of()); + + /** + * Plain index patterns without any dynamic expressions like user attributes and date math. + * This can be not null. If this instance cannot match any static pattern, this will be WildcardMatcher.NONE. + */ + private final WildcardMatcher staticPattern; + + /** + * Index patterns which contain user attributes (like ${user.name}) + */ + private final ImmutableList patternTemplates; + + /** + * Index patterns which contain date math (like ) + */ + private final ImmutableList dateMathExpressions; + private final int hashCode; + + private IndexPattern(WildcardMatcher staticPattern, ImmutableList patternTemplates, ImmutableList dateMathExpressions) { + this.staticPattern = staticPattern; + this.patternTemplates = patternTemplates; + this.dateMathExpressions = dateMathExpressions; + this.hashCode = staticPattern.hashCode() + patternTemplates.hashCode() + dateMathExpressions.hashCode(); + } + + public boolean matches(String index, PrivilegesEvaluationContext context, Map indexMetadata) + throws PrivilegesEvaluationException { + if (staticPattern != WildcardMatcher.NONE && staticPattern.test(index)) { + return true; + } + + if (!patternTemplates.isEmpty()) { + for (String patternTemplate : this.patternTemplates) { + try { + WildcardMatcher matcher = context.getRenderedMatcher(patternTemplate); + + if (matcher.test(index)) { + return true; + } + } catch (ExpressionEvaluationException e) { + throw new PrivilegesEvaluationException("Error while evaluating dynamic index pattern: " + patternTemplate, e); + } + } + } + + if (!dateMathExpressions.isEmpty()) { + IndexNameExpressionResolver indexNameExpressionResolver = context.getIndexNameExpressionResolver(); + + // Note: The use of date math expressions in privileges is a bit odd, as it only provides a very limited + // solution for the potential user case. A different approach might be nice. + + for (String dateMathExpression : this.dateMathExpressions) { + try { + String resolvedExpression = indexNameExpressionResolver.resolveDateMathExpression(dateMathExpression); + + WildcardMatcher matcher = WildcardMatcher.from(resolvedExpression); + + if (matcher.test(index)) { + return true; + } + } catch (Exception e) { + throw new PrivilegesEvaluationException("Error while evaluating date math expression: " + dateMathExpression, e); + } + } + } + + IndexAbstraction indexAbstraction = indexMetadata.get(index); + + if (indexAbstraction instanceof IndexAbstraction.Index) { + // Check for the privilege for aliases or data streams containing this index + + if (indexAbstraction.getParentDataStream() != null) { + if (matches(indexAbstraction.getParentDataStream().getName(), context, indexMetadata)) { + return true; + } + } + + // Retrieve aliases: The use of getWriteIndex() is a bit messy, but it is the only way to access + // alias metadata from here. + for (String alias : indexAbstraction.getWriteIndex().getAliases().keySet()) { + if (matches(alias, context, indexMetadata)) { + return true; + } + } + } + + return false; + } + + @Override + public String toString() { + if (patternTemplates.size() == 0 && dateMathExpressions.size() == 0) { + return staticPattern.toString(); + } else { + StringBuilder result = new StringBuilder(); + + if (staticPattern != WildcardMatcher.NONE) { + result.append(staticPattern); + } + + if (patternTemplates.size() != 0) { + if (result.length() != 0) { + result.append(" "); + } + + result.append(String.join(",", patternTemplates)); + } + + if (dateMathExpressions.size() != 0) { + if (result.length() != 0) { + result.append(" "); + } + + result.append(String.join(",", dateMathExpressions)); + } + + return result.toString(); + } + } + + public WildcardMatcher getStaticPattern() { + return staticPattern; + } + + /** + * Returns true if this object contains patterns which can be matched against indices upfront. + */ + public boolean hasStaticPattern() { + return staticPattern != WildcardMatcher.NONE; + } + + /** + * Returns true if this object contains patterns which must be matched against indices again for each request, + * as they depend on user attributes or on the current time. + */ + public boolean hasDynamicPattern() { + return !patternTemplates.isEmpty() || !dateMathExpressions.isEmpty(); + } + + /** + * Returns a sub-set of this object, which includes only the patterns which must be matched against indices again for each request, + * as they depend on user attributes or on the current time. + */ + public IndexPattern dynamicOnly() { + if (patternTemplates.isEmpty() && dateMathExpressions.isEmpty()) { + return EMPTY; + } else { + return new IndexPattern(WildcardMatcher.NONE, this.patternTemplates, this.dateMathExpressions); + } + } + + /** + * Returns true if this object cannot match against any index name. + */ + public boolean isEmpty() { + return !hasStaticPattern() && !hasDynamicPattern(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof IndexPattern)) return false; + IndexPattern that = (IndexPattern) o; + return Objects.equals(staticPattern, that.staticPattern) + && Objects.equals(patternTemplates, that.patternTemplates) + && Objects.equals(dateMathExpressions, that.dateMathExpressions); + } + + @Override + public int hashCode() { + return hashCode; + } + + static class Builder { + private List constantPatterns = new ArrayList<>(); + private List patternTemplates = new ArrayList<>(); + private List dateMathExpressions = new ArrayList<>(); + + void add(List source) { + for (int i = 0; i < source.size(); i++) { + try { + String indexPattern = source.get(i); + + if (indexPattern.startsWith("<") && indexPattern.endsWith(">")) { + this.dateMathExpressions.add(indexPattern); + } else if (!containsPlaceholder(indexPattern)) { + this.constantPatterns.add(WildcardMatcher.from(indexPattern)); + } else { + this.patternTemplates.add(indexPattern); + } + } catch (Exception e) { + // This usually happens when the index pattern defines an unparseable regular expression + log.error("Error while creating index pattern for {}", source, e); + } + } + } + + IndexPattern build() { + return new IndexPattern( + constantPatterns.size() != 0 ? WildcardMatcher.from(constantPatterns) : WildcardMatcher.NONE, + ImmutableList.copyOf(patternTemplates), + ImmutableList.copyOf(dateMathExpressions) + ); + } + } + + static boolean containsPlaceholder(String indexPattern) { + return indexPattern.indexOf("${") != -1; + } + + public static IndexPattern from(List source) { + Builder builder = new Builder(); + builder.add(source); + return builder.build(); + } + + public static IndexPattern from(String... source) { + return from(Arrays.asList(source)); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/PitPrivilegesEvaluator.java b/src/main/java/org/opensearch/security/privileges/PitPrivilegesEvaluator.java index 57c1c18414..4fd4141b08 100644 --- a/src/main/java/org/opensearch/security/privileges/PitPrivilegesEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/PitPrivilegesEvaluator.java @@ -18,17 +18,15 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import com.google.common.collect.ImmutableSet; + import org.opensearch.action.ActionRequest; import org.opensearch.action.admin.indices.segments.PitSegmentsRequest; import org.opensearch.action.search.CreatePitRequest; import org.opensearch.action.search.DeletePitRequest; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.unit.TimeValue; import org.opensearch.security.OpenSearchSecurityPlugin; import org.opensearch.security.resolver.IndexResolverReplacer; -import org.opensearch.security.securityconf.SecurityRoles; -import org.opensearch.security.user.User; /** * This class evaluates privileges for point in time (Delete and List all) operations. @@ -39,11 +37,9 @@ public class PitPrivilegesEvaluator { public PrivilegesEvaluatorResponse evaluate( final ActionRequest request, - final ClusterService clusterService, - final User user, - final SecurityRoles securityRoles, + final PrivilegesEvaluationContext context, + final ActionPrivileges actionPrivileges, final String action, - final IndexNameExpressionResolver resolver, final PrivilegesEvaluatorResponse presponse, final IndexResolverReplacer irr ) { @@ -64,7 +60,7 @@ public PrivilegesEvaluatorResponse evaluate( if (pitIds.size() == 1 && "_all".equals(pitIds.get(0))) { return presponse; } else { - return handlePitsAccess(pitIds, clusterService, user, securityRoles, action, resolver, presponse, irr); + return handlePitsAccess(pitIds, context, actionPrivileges, action, presponse, irr); } } @@ -73,11 +69,9 @@ public PrivilegesEvaluatorResponse evaluate( */ private PrivilegesEvaluatorResponse handlePitsAccess( List pitIds, - ClusterService clusterService, - User user, - SecurityRoles securityRoles, + PrivilegesEvaluationContext context, + ActionPrivileges actionPrivileges, final String action, - IndexNameExpressionResolver resolver, PrivilegesEvaluatorResponse presponse, final IndexResolverReplacer irr ) { @@ -87,30 +81,16 @@ private PrivilegesEvaluatorResponse handlePitsAccess( for (String[] indices : pitToIndicesMap.values()) { pitIndices.addAll(Arrays.asList(indices)); } - Set allPermittedIndices = getPermittedIndices(pitIndices, clusterService, user, securityRoles, action, resolver, irr); + String[] indicesArr = new String[pitIndices.size()]; + CreatePitRequest req = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, pitIndices.toArray(indicesArr)); + final IndexResolverReplacer.Resolved pitResolved = irr.resolveRequest(req); + PrivilegesEvaluatorResponse subResponse = actionPrivileges.hasIndexPrivilege(context, ImmutableSet.of(action), pitResolved); // Only if user has access to all PIT's indices, allow operation, otherwise continue evaluation in PrivilegesEvaluator. - if (allPermittedIndices.containsAll(pitIndices)) { + if (subResponse.isAllowed()) { presponse.allowed = true; presponse.markComplete(); } - return presponse; - } - /** - * This method returns list of permitted indices for the PIT indices passed - */ - private Set getPermittedIndices( - Set pitIndices, - ClusterService clusterService, - User user, - SecurityRoles securityRoles, - final String action, - IndexNameExpressionResolver resolver, - final IndexResolverReplacer irr - ) { - String[] indicesArr = new String[pitIndices.size()]; - CreatePitRequest req = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, pitIndices.toArray(indicesArr)); - final IndexResolverReplacer.Resolved pitResolved = irr.resolveRequest(req); - return securityRoles.reduce(pitResolved, user, new String[] { action }, resolver, clusterService); + return presponse; } } diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesConfigurationValidationException.java b/src/main/java/org/opensearch/security/privileges/PrivilegesConfigurationValidationException.java new file mode 100644 index 0000000000..007f782155 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesConfigurationValidationException.java @@ -0,0 +1,24 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +/** + * Thrown when the privileges configuration cannot be parsed because it is invalid. + */ +public class PrivilegesConfigurationValidationException extends Exception { + public PrivilegesConfigurationValidationException(String message) { + super(message); + } + + public PrivilegesConfigurationValidationException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationContext.java b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationContext.java index 98ffddb3d3..f7e5d6de7d 100644 --- a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationContext.java +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationContext.java @@ -10,19 +10,27 @@ */ package org.opensearch.security.privileges; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Supplier; + import com.google.common.collect.ImmutableSet; import org.opensearch.action.ActionRequest; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.user.User; import org.opensearch.tasks.Task; /** * Request-scoped context information for privilege evaluation. - * + *

* This class carries metadata about the request and provides caching facilities for data which might need to be * evaluated several times per request. - * + *

* As this class is request-scoped, it is only used by a single thread. Thus, no thread synchronization mechanisms * are necessary. */ @@ -31,9 +39,19 @@ public class PrivilegesEvaluationContext { private final String action; private final ActionRequest request; private IndexResolverReplacer.Resolved resolvedRequest; + private Map indicesLookup; private final Task task; private ImmutableSet mappedRoles; private final IndexResolverReplacer indexResolverReplacer; + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final Supplier clusterStateSupplier; + + /** + * This caches the ready to use WildcardMatcher instances for the current request. Many index patterns have + * to be executed several times per request (for example first for action privileges, later for DLS). Thus, + * it makes sense to cache and later re-use these. + */ + private final Map renderedPatternTemplateCache = new HashMap<>(); public PrivilegesEvaluationContext( User user, @@ -41,20 +59,49 @@ public PrivilegesEvaluationContext( String action, ActionRequest request, Task task, - IndexResolverReplacer indexResolverReplacer + IndexResolverReplacer indexResolverReplacer, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier clusterStateSupplier ) { this.user = user; this.mappedRoles = mappedRoles; this.action = action; this.request = request; - this.task = task; + this.clusterStateSupplier = clusterStateSupplier; this.indexResolverReplacer = indexResolverReplacer; + this.indexNameExpressionResolver = indexNameExpressionResolver; + this.task = task; } public User getUser() { return user; } + /** + * Interpolates any attribute references (like ${user.name}) in the given string and parses the result + * to a WildcardMatcher. This method catches earlier rendered templates in order to avoid recurring re-rendering + * of templates during a single privilege evaluation pass. + * + * @throws ExpressionEvaluationException if the resulting pattern could not be parsed. This is usually the case + * if an invalid regex was supplied. + */ + public WildcardMatcher getRenderedMatcher(String template) throws ExpressionEvaluationException { + WildcardMatcher matcher = this.renderedPatternTemplateCache.get(template); + + if (matcher == null) { + try { + matcher = WildcardMatcher.from(UserAttributes.replaceProperties(template, this)); + } catch (Exception e) { + // This especially happens for invalid regular expressions + throw new ExpressionEvaluationException("Error while evaluating expression in " + template, e); + } + + this.renderedPatternTemplateCache.put(template, matcher); + } + + return matcher; + } + public String getAction() { return action; } @@ -94,4 +141,35 @@ void setMappedRoles(ImmutableSet mappedRoles) { this.mappedRoles = mappedRoles; } + public Supplier getClusterStateSupplier() { + return clusterStateSupplier; + } + + public Map getIndicesLookup() { + if (this.indicesLookup == null) { + this.indicesLookup = clusterStateSupplier.get().metadata().getIndicesLookup(); + } + return this.indicesLookup; + } + + public IndexNameExpressionResolver getIndexNameExpressionResolver() { + return indexNameExpressionResolver; + } + + @Override + public String toString() { + return "PrivilegesEvaluationContext{" + + "user=" + + user + + ", action='" + + action + + '\'' + + ", request=" + + request + + ", resolvedRequest=" + + resolvedRequest + + ", mappedRoles=" + + mappedRoles + + '}'; + } } diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationException.java b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationException.java new file mode 100644 index 0000000000..b3a0ac569f --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationException.java @@ -0,0 +1,42 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import org.apache.commons.lang3.StringUtils; + +/** + * Signifies that an error was encountered while evaluating the privileges of a user for a particular request. + * + */ +public class PrivilegesEvaluationException extends Exception { + public PrivilegesEvaluationException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Returns a formatted multi-line-string showing cause messages as separate, indented lines. Does not include + * stack traces. + */ + public String getNestedMessages() { + if (this.getCause() == null) { + return this.getMessage(); + } + + StringBuilder result = new StringBuilder(this.getMessage()).append("\n"); + + Throwable cause = this.getCause(); + for (int i = 1; cause != null; cause = cause.getCause(), i++) { + result.append(StringUtils.repeat(' ', i * 3)).append(cause.getMessage()).append("\n"); + } + + return result.toString(); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluator.java b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluator.java index fb320e6bd9..0483123b6e 100644 --- a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluator.java @@ -35,6 +35,8 @@ import java.util.Map; import java.util.Set; import java.util.StringJoiner; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -71,9 +73,11 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.termvectors.MultiTermVectorsAction; import org.opensearch.action.update.UpdateAction; +import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; @@ -88,9 +92,14 @@ import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; import org.opensearch.security.securityconf.ConfigModel; +import org.opensearch.security.securityconf.DynamicConfigFactory; import org.opensearch.security.securityconf.DynamicConfigModel; -import org.opensearch.security.securityconf.SecurityRoles; +import org.opensearch.security.securityconf.FlattenedActionGroups; +import org.opensearch.security.securityconf.impl.CType; import org.opensearch.security.securityconf.impl.DashboardSignInOption; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.ActionGroupsV7; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.user.User; @@ -121,7 +130,7 @@ public class PrivilegesEvaluator { private static final IndicesOptions ALLOW_EMPTY = IndicesOptions.fromOptions(true, true, false, false); protected final Logger log = LogManager.getLogger(this.getClass()); - private final ClusterService clusterService; + private final Supplier clusterStateSupplier; private final IndexNameExpressionResolver resolver; @@ -142,10 +151,14 @@ public class PrivilegesEvaluator { private final PitPrivilegesEvaluator pitPrivilegesEvaluator; private DynamicConfigModel dcm; private final NamedXContentRegistry namedXContentRegistry; + private final Settings settings; + private final AtomicReference actionPrivileges = new AtomicReference<>(); public PrivilegesEvaluator( final ClusterService clusterService, - final ThreadPool threadPool, + Supplier clusterStateSupplier, + ThreadPool threadPool, + final ThreadContext threadContext, final ConfigurationRepository configurationRepository, final IndexNameExpressionResolver resolver, AuditLog auditLog, @@ -157,12 +170,13 @@ public PrivilegesEvaluator( ) { super(); - this.clusterService = clusterService; this.resolver = resolver; this.auditLog = auditLog; - this.threadContext = threadPool.getThreadContext(); + this.threadContext = threadContext; this.privilegesInterceptor = privilegesInterceptor; + this.clusterStateSupplier = clusterStateSupplier; + this.settings = settings; this.checkSnapshotRestoreWritePrivileges = settings.getAsBoolean( ConfigConstants.SECURITY_CHECK_SNAPSHOT_RESTORE_WRITE_PRIVILEGES, @@ -177,6 +191,56 @@ public PrivilegesEvaluator( termsAggregationEvaluator = new TermsAggregationEvaluator(); pitPrivilegesEvaluator = new PitPrivilegesEvaluator(); this.namedXContentRegistry = namedXContentRegistry; + + if (configurationRepository != null) { + configurationRepository.subscribeOnChange(configMap -> { + try { + SecurityDynamicConfiguration actionGroupsConfiguration = configurationRepository.getConfiguration( + CType.ACTIONGROUPS + ); + SecurityDynamicConfiguration rolesConfiguration = configurationRepository.getConfiguration(CType.ROLES); + + this.updateConfiguration(actionGroupsConfiguration, rolesConfiguration); + } catch (Exception e) { + log.error("Error while updating ActionPrivileges object with {}", configMap, e); + } + }); + } + + if (clusterService != null) { + clusterService.addListener(event -> { + ActionPrivileges actionPrivileges = PrivilegesEvaluator.this.actionPrivileges.get(); + if (actionPrivileges != null) { + actionPrivileges.updateClusterStateMetadataAsync(clusterService, threadPool); + } + }); + } + + } + + void updateConfiguration( + SecurityDynamicConfiguration actionGroupsConfiguration, + SecurityDynamicConfiguration rolesConfiguration + ) { + if (rolesConfiguration != null) { + SecurityDynamicConfiguration actionGroupsWithStatics = actionGroupsConfiguration != null + ? DynamicConfigFactory.addStatics(actionGroupsConfiguration.clone()) + : DynamicConfigFactory.addStatics(SecurityDynamicConfiguration.empty(CType.ACTIONGROUPS)); + FlattenedActionGroups flattenedActionGroups = new FlattenedActionGroups(actionGroupsWithStatics); + ActionPrivileges actionPrivileges = new ActionPrivileges( + DynamicConfigFactory.addStatics(rolesConfiguration.clone()), + flattenedActionGroups, + () -> clusterStateSupplier.get().metadata().getIndicesLookup(), + settings + ); + Metadata metadata = clusterStateSupplier.get().metadata(); + actionPrivileges.updateStatefulIndexPrivileges(metadata.getIndicesLookup(), metadata.version()); + ActionPrivileges oldInstance = this.actionPrivileges.getAndSet(actionPrivileges); + + if (oldInstance != null) { + oldInstance.shutdown(); + } + } } @Subscribe @@ -189,22 +253,17 @@ public void onDynamicConfigModelChanged(DynamicConfigModel dcm) { this.dcm = dcm; } - public SecurityRoles getSecurityRoles(Set roles) { - return configModel.getSecurityRoles().filter(roles); + public ActionPrivileges getActionPrivileges() { + return this.actionPrivileges.get(); } - public boolean hasRestAdminPermissions(final User user, final TransportAddress remoteAddress, final String permissions) { - final Set userRoles = mapRoles(user, remoteAddress); - return hasRestAdminPermissions(userRoles, permissions); - } - - private boolean hasRestAdminPermissions(final Set roles, String permission) { - final SecurityRoles securityRoles = getSecurityRoles(roles); - return securityRoles.hasExplicitClusterPermissionPermission(permission); + public boolean hasRestAdminPermissions(final User user, final TransportAddress remoteAddress, final String permission) { + PrivilegesEvaluationContext context = createContext(user, permission); + return this.actionPrivileges.get().hasExplicitClusterPrivilege(context, permission).isAllowed(); } public boolean isInitialized() { - return configModel != null && configModel.getSecurityRoles() != null && dcm != null; + return configModel != null && dcm != null && actionPrivileges.get() != null; } private void setUserInfoInThreadContext(User user) { @@ -221,6 +280,10 @@ private void setUserInfoInThreadContext(User user) { } } + public PrivilegesEvaluationContext createContext(User user, String action) { + return createContext(user, action, null, null, null); + } + public PrivilegesEvaluationContext createContext( User user, String action0, @@ -235,7 +298,7 @@ public PrivilegesEvaluationContext createContext( TransportAddress caller = threadContext.getTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS); ImmutableSet mappedRoles = ImmutableSet.copyOf((injectedRoles == null) ? mapRoles(user, caller) : injectedRoles); - return new PrivilegesEvaluationContext(user, mappedRoles, action0, request, task, irr); + return new PrivilegesEvaluationContext(user, mappedRoles, action0, request, task, irr, resolver, clusterStateSupplier); } public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) { @@ -262,7 +325,7 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) action0 = PutMappingAction.NAME; } - final PrivilegesEvaluatorResponse presponse = new PrivilegesEvaluatorResponse(); + PrivilegesEvaluatorResponse presponse = new PrivilegesEvaluatorResponse(); final String injectedRolesValidationString = threadContext.getTransient( ConfigConstants.OPENDISTRO_SECURITY_INJECTED_ROLES_VALIDATION @@ -278,8 +341,6 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) mappedRoles = ImmutableSet.copyOf(injectedRolesValidationSet); context.setMappedRoles(mappedRoles); } - presponse.resolvedSecurityRoles.addAll(mappedRoles); - final SecurityRoles securityRoles = getSecurityRoles(mappedRoles); // Add the security roles for this user so that they can be used for DLS parameter substitution. user.addSecurityRoles(mappedRoles); @@ -287,11 +348,16 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) final boolean isDebugEnabled = log.isDebugEnabled(); if (isDebugEnabled) { - log.debug("Evaluate permissions for {} on {}", user, clusterService.localNode().getName()); + log.debug("Evaluate permissions for {}", user); log.debug("Action: {} ({})", action0, request.getClass().getSimpleName()); log.debug("Mapped roles: {}", mappedRoles.toString()); } + ActionPrivileges actionPrivileges = this.actionPrivileges.get(); + if (actionPrivileges == null) { + throw new OpenSearchSecurityException("OpenSearch Security is not initialized: roles configuration is missing"); + } + if (request instanceof BulkRequest && (Strings.isNullOrEmpty(user.getRequestedTenant()))) { // Shortcut for bulk actions. The details are checked on the lower level of the BulkShardRequests (Action // indices:data/write/bulk[s]). @@ -300,18 +366,16 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) // No further access check for the default tenant is necessary, as access will be also checked on the TransportShardBulkAction // level. - if (!securityRoles.impliesClusterPermissionPermission(action0)) { - presponse.missingPrivileges.add(action0); - presponse.allowed = false; + presponse = actionPrivileges.hasClusterPrivilege(context, action0); + + if (!presponse.allowed) { log.info( "No cluster-level perm match for {} [Action [{}]] [RolesChecked {}]. No permissions for {}", user, action0, mappedRoles, - presponse.missingPrivileges + presponse.getMissingPrivileges() ); - } else { - presponse.allowed = true; } return presponse; } @@ -328,17 +392,8 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) } // Security index access - if (systemIndexAccessEvaluator.evaluate( - request, - task, - action0, - requestedResolved, - presponse, - securityRoles, - user, - resolver, - clusterService - ).isComplete()) { + if (systemIndexAccessEvaluator.evaluate(request, task, action0, requestedResolved, presponse, context, actionPrivileges, user) + .isComplete()) { return presponse; } @@ -348,7 +403,7 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) } // check access for point in time requests - if (pitPrivilegesEvaluator.evaluate(request, clusterService, user, securityRoles, action0, resolver, presponse, irr).isComplete()) { + if (pitPrivilegesEvaluator.evaluate(request, context, actionPrivileges, action0, presponse, irr).isComplete()) { return presponse; } @@ -362,22 +417,20 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) final boolean serviceAccountUser = user.isServiceAccount(); if (isClusterPerm(action0)) { if (serviceAccountUser) { - presponse.missingPrivileges.add(action0); - presponse.allowed = false; log.info("{} is a service account which doesn't have access to cluster level permission: {}", user, action0); - return presponse; + return PrivilegesEvaluatorResponse.insufficient(action0); } - if (!securityRoles.impliesClusterPermissionPermission(action0)) { - presponse.missingPrivileges.add(action0); - presponse.allowed = false; + presponse = actionPrivileges.hasClusterPrivilege(context, action0); + + if (!presponse.allowed) { log.info( "No cluster-level perm match for {} {} [Action [{}]] [RolesChecked {}]. No permissions for {}", user, requestedResolved, action0, mappedRoles, - presponse.missingPrivileges + presponse.getMissingPrivileges() ); return presponse; } else { @@ -429,13 +482,11 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) } // term aggregations - if (termsAggregationEvaluator.evaluate(requestedResolved, request, clusterService, user, securityRoles, resolver, presponse) - .isComplete()) { + if (termsAggregationEvaluator.evaluate(requestedResolved, request, context, actionPrivileges, presponse).isComplete()) { return presponse; } - final Set allIndexPermsRequired = evaluateAdditionalIndexPermissions(request, action0); - final String[] allIndexPermsRequiredA = allIndexPermsRequired.toArray(new String[0]); + ImmutableSet allIndexPermsRequired = evaluateAdditionalIndexPermissions(request, action0); if (isDebugEnabled) { log.debug( @@ -445,9 +496,6 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) ); } - presponse.missingPrivileges.clear(); - presponse.missingPrivileges.addAll(allIndexPermsRequired); - if (isDebugEnabled) { log.debug("Requested resolved index types: {}", requestedResolved); log.debug("Security roles: {}", mappedRoles); @@ -473,91 +521,67 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) if (!replaceResult.continueEvaluation) { if (replaceResult.accessDenied) { auditLog.logMissingPrivileges(action0, request, task); + return PrivilegesEvaluatorResponse.insufficient(action0); } else { presponse.allowed = true; presponse.createIndexRequestBuilder = replaceResult.createIndexRequestBuilder; + return presponse; } - return presponse; } } - if (dnfofEnabled && DNFOF_MATCHER.test(action0)) { - - if (requestedResolved.getAllIndices().isEmpty()) { - presponse.missingPrivileges.clear(); - presponse.allowed = true; - return presponse; - } + boolean dnfofPossible = dnfofEnabled && DNFOF_MATCHER.test(action0); - Set reduced = securityRoles.reduce(requestedResolved, user, allIndexPermsRequiredA, resolver, clusterService); + presponse = actionPrivileges.hasIndexPrivilege(context, allIndexPermsRequired, requestedResolved); - if (reduced.isEmpty()) { - if (dcm.isDnfofForEmptyResultsEnabled() && request instanceof IndicesRequest.Replaceable) { - - ((IndicesRequest.Replaceable) request).indices(new String[0]); - presponse.missingPrivileges.clear(); - presponse.allowed = true; - - if (request instanceof SearchRequest) { - ((SearchRequest) request).indicesOptions(ALLOW_EMPTY); - } else if (request instanceof ClusterSearchShardsRequest) { - ((ClusterSearchShardsRequest) request).indicesOptions(ALLOW_EMPTY); - } else if (request instanceof GetFieldMappingsRequest) { - ((GetFieldMappingsRequest) request).indicesOptions(ALLOW_EMPTY); - } - - return presponse; + if (presponse.isPartiallyOk()) { + if (dnfofPossible) { + if (irr.replace(request, true, presponse.getAvailableIndices())) { + return PrivilegesEvaluatorResponse.ok(); } - presponse.allowed = false; - return presponse; } + } else if (!presponse.isAllowed()) { + if (dnfofPossible && dcm.isDnfofForEmptyResultsEnabled() && request instanceof IndicesRequest.Replaceable) { + ((IndicesRequest.Replaceable) request).indices(new String[0]); + + if (request instanceof SearchRequest) { + ((SearchRequest) request).indicesOptions(ALLOW_EMPTY); + } else if (request instanceof ClusterSearchShardsRequest) { + ((ClusterSearchShardsRequest) request).indicesOptions(ALLOW_EMPTY); + } else if (request instanceof GetFieldMappingsRequest) { + ((GetFieldMappingsRequest) request).indicesOptions(ALLOW_EMPTY); + } - if (irr.replace(request, true, reduced.toArray(new String[0]))) { - presponse.missingPrivileges.clear(); - presponse.allowed = true; - return presponse; + return PrivilegesEvaluatorResponse.ok(); } } - // not bulk, mget, etc request here - boolean permGiven = false; - - if (isDebugEnabled) { - log.debug("Security roles: {}", securityRoles.getRoleNames()); - } + if (presponse.isAllowed()) { + if (checkFilteredAliases(requestedResolved, action0, isDebugEnabled)) { + presponse.allowed = false; + return presponse; + } - if (dcm.isMultiRolespanEnabled()) { - permGiven = securityRoles.impliesTypePermGlobal(requestedResolved, user, allIndexPermsRequiredA, resolver, clusterService); + if (isDebugEnabled) { + log.debug("Allowed because we have all indices permissions for {}", action0); + } } else { - permGiven = securityRoles.get(requestedResolved, user, allIndexPermsRequiredA, resolver, clusterService); - - } - - if (!permGiven) { log.info( - "No {}-level perm match for {} {} [Action [{}]] [RolesChecked {}]", + "No {}-level perm match for {} {}: {} [Action [{}]] [RolesChecked {}]", "index", user, requestedResolved, + presponse.getReason(), action0, mappedRoles ); - log.info("No permissions for {}", presponse.missingPrivileges); - } else { - - if (checkFilteredAliases(requestedResolved, action0, isDebugEnabled)) { - presponse.allowed = false; - return presponse; - } - - if (isDebugEnabled) { - log.debug("Allowed because we have all indices permissions for {}", action0); + log.info("Index to privilege matrix:\n{}", presponse.getPrivilegeMatrix()); + if (presponse.hasEvaluationExceptions()) { + log.info("Evaluation errors:\n{}", presponse.getEvaluationExceptionInfo()); } } - presponse.allowed = permGiven; return presponse; - } public Set mapRoles(final User user, final TransportAddress caller) { @@ -605,9 +629,8 @@ public List getSignInOptions() { return dcm.getSignInOptions(); } - private Set evaluateAdditionalIndexPermissions(final ActionRequest request, final String originalAction) { - // --- check inner bulk requests - final Set additionalPermissionsRequired = new HashSet<>(); + private ImmutableSet evaluateAdditionalIndexPermissions(final ActionRequest request, final String originalAction) { + ImmutableSet.Builder additionalPermissionsRequired = ImmutableSet.builder(); if (!isClusterPerm(originalAction)) { additionalPermissionsRequired.add(originalAction); @@ -661,15 +684,17 @@ private Set evaluateAdditionalIndexPermissions(final ActionRequest reque additionalPermissionsRequired.addAll(ConfigConstants.SECURITY_SNAPSHOT_RESTORE_NEEDED_WRITE_PRIVILEGES); } - if (additionalPermissionsRequired.size() > 1) { - traceAction("Additional permissions required: {}", additionalPermissionsRequired); + ImmutableSet result = additionalPermissionsRequired.build(); + + if (result.size() > 1) { + traceAction("Additional permissions required: {}", result); } - if (log.isDebugEnabled() && additionalPermissionsRequired.size() > 1) { - log.debug("Additional permissions required: {}", additionalPermissionsRequired); + if (log.isDebugEnabled() && result.size() > 1) { + log.debug("Additional permissions required: {}", result); } - return Collections.unmodifiableSet(additionalPermissionsRequired); + return result; } public static boolean isClusterPerm(String action0) { @@ -703,14 +728,14 @@ private boolean checkFilteredAliases(Resolved requestedResolved, String action, indexMetaDataCollection = new Iterable() { @Override public Iterator iterator() { - return clusterService.state().getMetadata().getIndices().values().iterator(); + return clusterStateSupplier.get().getMetadata().getIndices().values().iterator(); } }; } else { Set indexMetaDataSet = new HashSet<>(requestedResolved.getAllIndices().size()); for (String requestAliasOrIndex : requestedResolved.getAllIndices()) { - IndexMetadata indexMetaData = clusterService.state().getMetadata().getIndices().get(requestAliasOrIndex); + IndexMetadata indexMetaData = clusterStateSupplier.get().getMetadata().getIndices().get(requestAliasOrIndex); if (indexMetaData == null) { if (isDebugEnabled) { log.debug("{} does not exist in cluster metadata", requestAliasOrIndex); diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluatorResponse.java b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluatorResponse.java index 915514264c..d072ec301c 100644 --- a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluatorResponse.java +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluatorResponse.java @@ -26,33 +26,115 @@ package org.opensearch.security.privileges; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Set; +import com.google.common.collect.ImmutableSet; + import org.opensearch.action.admin.indices.create.CreateIndexRequestBuilder; +import com.selectivem.collections.CheckTable; + public class PrivilegesEvaluatorResponse { boolean allowed = false; - Set missingPrivileges = new HashSet(); Set missingSecurityRoles = new HashSet<>(); - Set resolvedSecurityRoles = new HashSet<>(); PrivilegesEvaluatorResponseState state = PrivilegesEvaluatorResponseState.PENDING; CreateIndexRequestBuilder createIndexRequestBuilder; - + private Set onlyAllowedForIndices = ImmutableSet.of(); + private CheckTable indexToActionCheckTable; + private String privilegeMatrix; + private String reason; + + /** + * Contains issues that were encountered during privilege evaluation. Can be used for logging. + */ + private List evaluationExceptions = new ArrayList<>(); + + /** + * Returns true if the request can be fully allowed. See also isAllowedForSpecificIndices(). + */ public boolean isAllowed() { return allowed; } + /** + * Returns true if the request can be allowed if the referenced indices are reduced (aka "do not fail on forbidden"). + * See getAvailableIndices() for the indices for which we have privileges. + */ + public boolean isPartiallyOk() { + return !this.onlyAllowedForIndices.isEmpty(); + } + + /** + * In case isPartiallyOk() is true, this returns the indices for which we have privileges. + */ + public Set getAvailableIndices() { + return this.onlyAllowedForIndices; + } + + /** + * In case isAllowed() is false, this returns the privileges (aka action names) for which we do not have sufficient + * privileges. + */ public Set getMissingPrivileges() { - return new HashSet(missingPrivileges); + return this.indexToActionCheckTable != null ? this.indexToActionCheckTable.getIncompleteColumns() : Collections.emptySet(); } - public Set getMissingSecurityRoles() { - return new HashSet<>(missingSecurityRoles); + /** + * Returns a human-readable reason for the missing privilege. Can be used to make the error message more easy + * to understand. + */ + public String getReason() { + return this.reason; + } + + public PrivilegesEvaluatorResponse reason(String reason) { + this.reason = reason; + return this; + } + + /** + * Returns a diagnostic string that contains issues that were encountered during privilege evaluation. Can be used for logging. + */ + public String getEvaluationExceptionInfo() { + StringBuilder result = new StringBuilder("Exceptions encountered during privilege evaluation:\n"); + + for (PrivilegesEvaluationException evaluationException : this.evaluationExceptions) { + result.append(evaluationException.getNestedMessages()).append("\n"); + } + + return result.toString(); + } + + public boolean hasEvaluationExceptions() { + return !evaluationExceptions.isEmpty(); } - public Set getResolvedSecurityRoles() { - return new HashSet<>(resolvedSecurityRoles); + public PrivilegesEvaluatorResponse evaluationExceptions(Collection evaluationExceptions) { + this.evaluationExceptions.addAll(evaluationExceptions); + return this; + } + + /** + * Returns an ASCII string showing a matrix of available/missing privileges. + * Rows represent indices, columns represent actions. + */ + public String getPrivilegeMatrix() { + String result = this.privilegeMatrix; + + if (result == null) { + result = this.indexToActionCheckTable.toTableString("ok", "MISSING"); + this.privilegeMatrix = result; + } + return result; + } + + public Set getMissingSecurityRoles() { + return new HashSet<>(missingSecurityRoles); } public CreateIndexRequestBuilder getCreateIndexRequestBuilder() { @@ -79,11 +161,46 @@ public boolean isPending() { @Override public String toString() { - return "PrivEvalResponse [allowed=" + allowed + ", missingPrivileges=" + missingPrivileges + "]"; + return "PrivEvalResponse [\nallowed=" + + allowed + + ",\nonlyAllowedForIndices=" + + onlyAllowedForIndices + + ",\n" + + (indexToActionCheckTable != null ? indexToActionCheckTable.toTableString("ok", "MISSING") : "") + + "]"; + } + + public static PrivilegesEvaluatorResponse ok() { + PrivilegesEvaluatorResponse response = new PrivilegesEvaluatorResponse(); + response.allowed = true; + return response; + } + + public static PrivilegesEvaluatorResponse partiallyOk( + Set availableIndices, + CheckTable indexToActionCheckTable + ) { + PrivilegesEvaluatorResponse response = new PrivilegesEvaluatorResponse(); + response.onlyAllowedForIndices = ImmutableSet.copyOf(availableIndices); + response.indexToActionCheckTable = indexToActionCheckTable; + return response; + } + + public static PrivilegesEvaluatorResponse insufficient(String missingPrivilege) { + PrivilegesEvaluatorResponse response = new PrivilegesEvaluatorResponse(); + response.indexToActionCheckTable = CheckTable.create(ImmutableSet.of("_"), ImmutableSet.of(missingPrivilege)); + return response; + } + + public static PrivilegesEvaluatorResponse insufficient(CheckTable indexToActionCheckTable) { + PrivilegesEvaluatorResponse response = new PrivilegesEvaluatorResponse(); + response.indexToActionCheckTable = indexToActionCheckTable; + return response; } public static enum PrivilegesEvaluatorResponseState { PENDING, COMPLETE; } + } diff --git a/src/main/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluator.java b/src/main/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluator.java index d1ccb84fc8..b1f994163c 100644 --- a/src/main/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluator.java @@ -16,89 +16,38 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.OpenSearchSecurityException; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.core.common.transport.TransportAddress; -import org.opensearch.security.securityconf.ConfigModel; -import org.opensearch.security.securityconf.SecurityRoles; -import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.user.User; -import org.opensearch.threadpool.ThreadPool; - -import org.greenrobot.eventbus.Subscribe; public class RestLayerPrivilegesEvaluator { protected final Logger log = LogManager.getLogger(this.getClass()); - private final ClusterService clusterService; - private ThreadContext threadContext; - private ConfigModel configModel; - - public RestLayerPrivilegesEvaluator(final ClusterService clusterService, final ThreadPool threadPool) { - this.clusterService = clusterService; - this.threadContext = threadPool.getThreadContext(); - } - - @Subscribe - public void onConfigModelChanged(final ConfigModel configModel) { - this.configModel = configModel; - } - - SecurityRoles getSecurityRoles(final Set roles) { - return configModel.getSecurityRoles().filter(roles); - } + private final PrivilegesEvaluator privilegesEvaluator; - boolean isInitialized() { - return configModel != null && configModel.getSecurityRoles() != null; + public RestLayerPrivilegesEvaluator(PrivilegesEvaluator privilegesEvaluator) { + this.privilegesEvaluator = privilegesEvaluator; } - public PrivilegesEvaluatorResponse evaluate(final User user, final Set actions) { - if (!isInitialized()) { - throw new OpenSearchSecurityException("OpenSearch Security is not initialized."); - } - - final PrivilegesEvaluatorResponse presponse = new PrivilegesEvaluatorResponse(); - - final TransportAddress caller = threadContext.getTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS); - - final Set mappedRoles = mapRoles(user, caller); - - presponse.resolvedSecurityRoles.addAll(mappedRoles); - final SecurityRoles securityRoles = getSecurityRoles(mappedRoles); + public PrivilegesEvaluatorResponse evaluate(final User user, final String routeName, final Set actions) { + PrivilegesEvaluationContext context = privilegesEvaluator.createContext(user, routeName); final boolean isDebugEnabled = log.isDebugEnabled(); if (isDebugEnabled) { - log.debug("Evaluate permissions for {} on {}", user, clusterService.localNode().getName()); + log.debug("Evaluate permissions for {}", user); log.debug("Action: {}", actions); - log.debug("Mapped roles: {}", mappedRoles.toString()); + log.debug("Mapped roles: {}", context.getMappedRoles().toString()); } - for (final String action : actions) { - if (!securityRoles.impliesClusterPermissionPermission(action)) { - presponse.missingPrivileges.add(action); - presponse.allowed = false; - log.info( - "No permission match for {} [Action [{}]] [RolesChecked {}]. No permissions for {}", - user, - action, - securityRoles.getRoleNames(), - presponse.missingPrivileges - ); - } else { - if (isDebugEnabled) { - log.debug("Allowed because we have permissions for {}", actions); - } - presponse.allowed = true; + PrivilegesEvaluatorResponse result = privilegesEvaluator.getActionPrivileges().hasAnyClusterPrivilege(context, actions); - // break the loop as we found the matching permission - break; - } + if (!result.allowed) { + log.info( + "No permission match for {} [Action [{}]] [RolesChecked {}]. No permissions for {}", + user, + routeName, + context.getMappedRoles(), + result.getMissingPrivileges() + ); } - return presponse; - } - - Set mapRoles(final User user, final TransportAddress caller) { - return this.configModel.mapSecurityRoles(user, caller); + return result; } } diff --git a/src/main/java/org/opensearch/security/privileges/SystemIndexAccessEvaluator.java b/src/main/java/org/opensearch/security/privileges/SystemIndexAccessEvaluator.java index 38825a9bf1..99828f7b17 100644 --- a/src/main/java/org/opensearch/security/privileges/SystemIndexAccessEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/SystemIndexAccessEvaluator.java @@ -32,20 +32,18 @@ import java.util.Set; import java.util.stream.Collectors; +import com.google.common.collect.ImmutableSet; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionRequest; import org.opensearch.action.RealtimeRequest; import org.opensearch.action.search.SearchRequest; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; import org.opensearch.indices.SystemIndexRegistry; import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.securityconf.SecurityRoles; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.user.User; @@ -72,6 +70,7 @@ public class SystemIndexAccessEvaluator { private final boolean isSystemIndexEnabled; private final boolean isSystemIndexPermissionEnabled; + private final static ImmutableSet SYSTEM_INDEX_PERMISSION_SET = ImmutableSet.of(ConfigConstants.SYSTEM_INDEX_PERMISSION); public SystemIndexAccessEvaluator(final Settings settings, AuditLog auditLog, IndexResolverReplacer irr) { this.securityIndex = settings.get( @@ -128,12 +127,11 @@ public PrivilegesEvaluatorResponse evaluate( final String action, final Resolved requestedResolved, final PrivilegesEvaluatorResponse presponse, - final SecurityRoles securityRoles, - final User user, - final IndexNameExpressionResolver resolver, - final ClusterService clusterService + final PrivilegesEvaluationContext context, + final ActionPrivileges actionPrivileges, + final User user ) { - evaluateSystemIndicesAccess(action, requestedResolved, request, task, presponse, securityRoles, user, resolver, clusterService); + evaluateSystemIndicesAccess(action, requestedResolved, request, task, presponse, context, actionPrivileges, user); if (requestedResolved.isLocalAll() || requestedResolved.getAllIndices().contains(securityIndex) @@ -235,10 +233,9 @@ private boolean isActionAllowed(String action) { * @param request the action request to be used for audit logging * @param task task in which this access check will be performed * @param presponse the pre-response object that will eventually become a response and returned to the requester - * @param securityRoles user's roles which will be used for access evaluation + * @param context conveys information about user and mapped roles, etc. + * @param actionPrivileges the up-to-date ActionPrivileges instance * @param user this user's permissions will be looked up - * @param resolver the index expression resolver - * @param clusterService required to fetch cluster state metadata */ private void evaluateSystemIndicesAccess( final String action, @@ -246,10 +243,9 @@ private void evaluateSystemIndicesAccess( final ActionRequest request, final Task task, final PrivilegesEvaluatorResponse presponse, - SecurityRoles securityRoles, - final User user, - final IndexNameExpressionResolver resolver, - final ClusterService clusterService + final PrivilegesEvaluationContext context, + final ActionPrivileges actionPrivileges, + final User user ) { // Perform access check is system index permissions are enabled boolean containsSystemIndex = requestContainsAnySystemIndices(requestedResolved); @@ -260,7 +256,7 @@ private void evaluateSystemIndicesAccess( if (serviceAccountUser && containsRegularIndex) { auditLog.logSecurityIndexAttempt(request, action, task); if (!containsSystemIndex && log.isInfoEnabled()) { - log.info("{} not permitted for a service account {} on non-system indices.", action, securityRoles); + log.info("{} not permitted for a service account {} on non-system indices.", action, context.getMappedRoles()); } else if (containsSystemIndex && log.isDebugEnabled()) { List regularIndices = requestedResolved.getAllIndices() .stream() @@ -282,7 +278,7 @@ private void evaluateSystemIndicesAccess( log.info( "{} not permitted for a regular user {} on protected system indices {}", action, - securityRoles, + context.getMappedRoles(), String.join(", ", getAllProtectedSystemIndices(requestedResolved)) ); } @@ -290,19 +286,13 @@ private void evaluateSystemIndicesAccess( presponse.markComplete(); return; } else if (containsSystemIndex - && !securityRoles.hasExplicitIndexPermission( - requestedResolved, - user, - new String[] { ConfigConstants.SYSTEM_INDEX_PERMISSION }, - resolver, - clusterService - )) { + && !actionPrivileges.hasExplicitIndexPrivilege(context, SYSTEM_INDEX_PERMISSION_SET, requestedResolved).isAllowed()) { auditLog.logSecurityIndexAttempt(request, action, task); if (log.isInfoEnabled()) { log.info( "No {} permission for user roles {} to System Indices {}", action, - securityRoles, + context.getMappedRoles(), String.join(", ", getAllSystemIndices(requestedResolved)) ); } diff --git a/src/main/java/org/opensearch/security/privileges/TermsAggregationEvaluator.java b/src/main/java/org/opensearch/security/privileges/TermsAggregationEvaluator.java index cc0bf25b5e..a2cd1c16a7 100644 --- a/src/main/java/org/opensearch/security/privileges/TermsAggregationEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/TermsAggregationEvaluator.java @@ -26,8 +26,8 @@ package org.opensearch.security.privileges; -import java.util.Set; - +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -38,27 +38,24 @@ import org.opensearch.action.search.MultiSearchAction; import org.opensearch.action.search.SearchAction; import org.opensearch.action.search.SearchRequest; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; import org.opensearch.index.query.MatchNoneQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.securityconf.SecurityRoles; -import org.opensearch.security.user.User; public class TermsAggregationEvaluator { protected final Logger log = LogManager.getLogger(this.getClass()); - private static final String[] READ_ACTIONS = new String[] { + private static final ImmutableSet READ_ACTIONS = ImmutableSet.of( MultiSearchAction.NAME, MultiGetAction.NAME, GetAction.NAME, SearchAction.NAME, - FieldCapabilitiesAction.NAME }; + FieldCapabilitiesAction.NAME + ); private static final QueryBuilder NONE_QUERY = new MatchNoneQueryBuilder(); @@ -67,10 +64,8 @@ public TermsAggregationEvaluator() {} public PrivilegesEvaluatorResponse evaluate( final Resolved resolved, final ActionRequest request, - ClusterService clusterService, - User user, - SecurityRoles securityRoles, - IndexNameExpressionResolver resolver, + PrivilegesEvaluationContext context, + ActionPrivileges actionPrivileges, PrivilegesEvaluatorResponse presponse ) { try { @@ -89,17 +84,22 @@ public PrivilegesEvaluatorResponse evaluate( && ab.getPipelineAggregations().isEmpty() && ab.getSubAggregations().isEmpty()) { - final Set allPermittedIndices = securityRoles.getAllPermittedIndicesForDashboards( - resolved, - user, + PrivilegesEvaluatorResponse subResponse = actionPrivileges.hasIndexPrivilege( + context, READ_ACTIONS, - resolver, - clusterService + Resolved._LOCAL_ALL ); - if (allPermittedIndices == null || allPermittedIndices.isEmpty()) { + + if (subResponse.isPartiallyOk()) { + sr.source() + .query( + new TermsQueryBuilder( + "_index", + Sets.union(subResponse.getAvailableIndices(), resolved.getRemoteIndices()) + ) + ); + } else if (!subResponse.isAllowed()) { sr.source().query(NONE_QUERY); - } else { - sr.source().query(new TermsQueryBuilder("_index", allPermittedIndices)); } presponse.allowed = true; diff --git a/src/main/java/org/opensearch/security/privileges/UserAttributes.java b/src/main/java/org/opensearch/security/privileges/UserAttributes.java index e138c5f621..a1a949d96c 100644 --- a/src/main/java/org/opensearch/security/privileges/UserAttributes.java +++ b/src/main/java/org/opensearch/security/privileges/UserAttributes.java @@ -15,6 +15,7 @@ import com.google.common.base.Joiner; import com.google.common.collect.Iterables; +import com.google.common.collect.Sets; import org.opensearch.security.user.User; @@ -24,6 +25,34 @@ * This code was moved over from ConfigModelV7. */ public class UserAttributes { + public static String replaceProperties(String orig, PrivilegesEvaluationContext context) { + User user = context.getUser(); + + orig = orig.replace("${user.name}", user.getName()).replace("${user_name}", user.getName()); + orig = replaceRoles(orig, user); + orig = replaceSecurityRoles(orig, context); + for (Map.Entry entry : user.getCustomAttributesMap().entrySet()) { + if (entry.getKey() == null || entry.getValue() == null) { + continue; + } + orig = orig.replace("${" + entry.getKey() + "}", entry.getValue()); + orig = orig.replace("${" + entry.getKey().replace('.', '_') + "}", entry.getValue()); + } + return orig; + } + + private static String replaceSecurityRoles(final String orig, PrivilegesEvaluationContext context) { + String retVal = orig; + if (orig.contains("${user.securityRoles}") || orig.contains("${user_securityRoles}")) { + final String commaSeparatedRoles = toQuotedCommaSeparatedString( + Sets.union(context.getUser().getSecurityRoles(), context.getMappedRoles()) + ); + retVal = orig.replace("${user.securityRoles}", commaSeparatedRoles).replace("${user_securityRoles}", commaSeparatedRoles); + } + return retVal; + } + + @Deprecated public static String replaceProperties(String orig, User user) { if (user == null || orig == null) { @@ -52,6 +81,7 @@ private static String replaceRoles(final String orig, final User user) { return retVal; } + @Deprecated private static String replaceSecurityRoles(final String orig, final User user) { String retVal = orig; if (orig.contains("${user.securityRoles}") || orig.contains("${user_securityRoles}")) { diff --git a/src/main/java/org/opensearch/security/privileges/WellKnownActions.java b/src/main/java/org/opensearch/security/privileges/WellKnownActions.java new file mode 100644 index 0000000000..af4f0bb025 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/WellKnownActions.java @@ -0,0 +1,88 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import com.google.common.collect.ImmutableSet; + +import org.opensearch.action.admin.cluster.health.ClusterHealthAction; +import org.opensearch.action.admin.cluster.node.stats.NodesStatsAction; +import org.opensearch.action.admin.cluster.state.ClusterStateAction; +import org.opensearch.action.admin.cluster.stats.ClusterStatsAction; +import org.opensearch.action.admin.indices.analyze.AnalyzeAction; +import org.opensearch.action.admin.indices.create.AutoCreateAction; +import org.opensearch.action.admin.indices.mapping.put.AutoPutMappingAction; +import org.opensearch.action.admin.indices.mapping.put.PutMappingAction; +import org.opensearch.action.admin.indices.refresh.RefreshAction; +import org.opensearch.action.admin.indices.refresh.TransportShardRefreshAction; +import org.opensearch.action.bulk.BulkAction; +import org.opensearch.action.bulk.TransportShardBulkAction; +import org.opensearch.action.delete.DeleteAction; +import org.opensearch.action.fieldcaps.FieldCapabilitiesAction; +import org.opensearch.action.get.GetAction; +import org.opensearch.action.get.MultiGetAction; +import org.opensearch.action.index.IndexAction; +import org.opensearch.action.main.MainAction; +import org.opensearch.action.search.ClearScrollAction; +import org.opensearch.action.search.MultiSearchAction; +import org.opensearch.action.search.SearchAction; +import org.opensearch.action.search.SearchScrollAction; +import org.opensearch.action.termvectors.MultiTermVectorsAction; +import org.opensearch.action.termvectors.TermVectorsAction; +import org.opensearch.action.update.UpdateAction; +import org.opensearch.index.reindex.DeleteByQueryAction; +import org.opensearch.index.reindex.UpdateByQueryAction; +import org.opensearch.security.support.ConfigConstants; + +/** + * This class lists so-called "well-known actions". These are taken into account when creating the pre-computed + * data structures of the ActionPrivileges class. Thus, a very fast performance evaluation will be possible for + * these actions. The trade-off is that each well-known action increases the heap footprint required by the data + * structures. Thus, it makes sense to limit these actions to these which are really performance critical. + */ +public class WellKnownActions { + public static final ImmutableSet CLUSTER_ACTIONS = ImmutableSet.of( + MultiGetAction.NAME, + BulkAction.NAME, + SearchScrollAction.NAME, + MultiSearchAction.NAME, + MultiTermVectorsAction.NAME, + ClearScrollAction.NAME, + MainAction.NAME, + ClusterStatsAction.NAME, + ClusterStateAction.NAME, + ClusterHealthAction.NAME, + NodesStatsAction.NAME + ); + + public static final ImmutableSet INDEX_ACTIONS = ImmutableSet.of( + IndexAction.NAME, + GetAction.NAME, + TermVectorsAction.NAME, + DeleteAction.NAME, + UpdateAction.NAME, + SearchAction.NAME, + UpdateByQueryAction.NAME, + DeleteByQueryAction.NAME, + TransportShardBulkAction.ACTION_NAME, + PutMappingAction.NAME, + AutoPutMappingAction.NAME, + AnalyzeAction.NAME, + AutoCreateAction.NAME, + RefreshAction.NAME, + TransportShardRefreshAction.NAME, + FieldCapabilitiesAction.NAME + ); + + /** + * Compare https://github.com/opensearch-project/security/pull/2887 + */ + public static final ImmutableSet EXPLICITLY_REQUIRED_INDEX_ACTIONS = ImmutableSet.of(ConfigConstants.SYSTEM_INDEX_PERMISSION); +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/AbstractRuleBasedPrivileges.java b/src/main/java/org/opensearch/security/privileges/dlsfls/AbstractRuleBasedPrivileges.java new file mode 100644 index 0000000000..43baf8090d --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/AbstractRuleBasedPrivileges.java @@ -0,0 +1,833 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.google.common.collect.ImmutableMap; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.privileges.IndexPattern; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.support.WildcardMatcher; + +import com.selectivem.collections.CompactMapGroupBuilder; +import com.selectivem.collections.DeduplicatingCompactSubSetBuilder; + +/** + * Abstract super class which provides common DLS/FLS/FM rule evaluation functionality for the concrete classes + * DocumentPrivileges, FieldPrivileges and FieldMasking. + *

+ * With the exception of the statefulRules property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role configuration. If the role configuration is changed, + * a new instance needs to be built. + *

+ * Following the secure-by-default principle, this class returns full restrictions if there is no role covering the + * requested index. It has two fundamental working modes, based on the value of the plugins.security.dfm_empty_overrides_all + * setting: If the setting is true, roles without a DLS/FLS/FM rule are always considered to grant full access. If the + * setting is false, roles without a DLS/FLS/FM rule are ONLY considered if there are no other roles that restrict access. + * The former is the more logical one, as it follows the rule that a user gaining more roles can only gain more privileges. + * The latter breaks that rule. In that case, a user with more roles can have fewer privileges. + *

+ * Concrete sub-classes of this class must define concrete types for SingleRule and JoinedRule. These should be immutable + * types. Additionally, they must define a function that converts roles to SingleRule objects and pass that function + * to the constructor via the roleToRuleFunction parameter. Finally, the abstract methods unrestricted(), restricted() + * and compile() must be implemented. + * + * @param A single DLS/FLS/FM rule as defined in roles.yml. + * @param A merged DLS/FLS/FM rule that might contain SingleRules from several roles that apply to a user at the same time. + */ +abstract class AbstractRuleBasedPrivileges { + private static final Logger log = LogManager.getLogger(AbstractRuleBasedPrivileges.class); + + /** + * The roles configuration this instance is based on + */ + protected final SecurityDynamicConfiguration roles; + + /** + * Compiled rules that are immutable. + */ + protected final StaticRules staticRules; + + /** + * Compiled rules, that are denormalized based on the current indices. These are updated whenever the indices change. + * As this attribute is volatile, access to this attribute should be limited, e.g., not used in tight loops. + */ + private volatile StatefulRules statefulRules; + + /** + * A function that converts role instances to rules. + */ + private final RoleToRuleFunction roleToRuleFunction; + + /** + * Corresponds to the settings flag plugins.security.dfm_empty_overrides_all. + */ + private final boolean dfmEmptyOverridesAll; + + public AbstractRuleBasedPrivileges( + SecurityDynamicConfiguration roles, + Map indexMetadata, + RoleToRuleFunction roleToRuleFunction, + Settings settings + ) { + this.roles = roles; + this.roleToRuleFunction = roleToRuleFunction; + this.staticRules = new StaticRules<>(roles, roleToRuleFunction); + this.dfmEmptyOverridesAll = settings.getAsBoolean(ConfigConstants.SECURITY_DFM_EMPTY_OVERRIDES_ALL, false); + this.statefulRules = new StatefulRules<>(roles, indexMetadata, roleToRuleFunction); + } + + /** + * Returns true if the user identified in the PrivilegesEvaluationContext does not have any restrictions in any case, + * independently of the indices they are requesting. + */ + public boolean isUniversallyUnrestricted(PrivilegesEvaluationContext context) { + if (this.dfmEmptyOverridesAll + && CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return true; + } + + return false; + } + + /** + * Returns true if the user identified in the PrivilegesEvaluationContext does not have any restrictions for the + * given resolved indices. + * + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public boolean isUnrestricted(PrivilegesEvaluationContext context, IndexResolverReplacer.Resolved resolved) + throws PrivilegesEvaluationException { + if (context.getMappedRoles().isEmpty()) { + return false; + } + + if (this.dfmEmptyOverridesAll + && CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return true; + } + + if (resolved == null) { + return false; + } + + if (this.hasRestrictedRulesWithIndexWildcard(context)) { + return false; + } + + StatefulRules statefulRules = this.statefulRules; + + // The logic is here a bit tricky: For each index/alias/data stream we assume restrictions until we found an unrestricted role. + // If we found an unrestricted role, we continue with the next index/alias/data stream. If we found a restricted role, we abort + // early and return true. + + for (String index : resolved.getAllIndicesResolved(context.getClusterStateSupplier(), context.getIndexNameExpressionResolver())) { + if (this.dfmEmptyOverridesAll) { + // We assume that we have a restriction unless there are roles without restriction. + // Thus, we only have to check the roles without restriction. + if (!this.hasUnrestrictedRulesExplicit(context, statefulRules, index)) { + return false; + } + } else { + // if dfmEmptyOverwritesAll == false, we prefer restricted roles over unrestricted ones. + // Thus, we first check for restricted roles. Only if there are not any restricted roles, + // we check for the presence of unrestricted roles. If there are not any matching roles, + // we also assume full restrictions. + + if (this.hasRestrictedRulesExplicit(context, statefulRules, index)) { + return false; + } else if (!CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles()) + && !this.hasUnrestrictedRulesExplicit(context, statefulRules, index)) { + return false; + } + } + } + + return true; + } + + /** + * Returns true if there are roles without a rule which imposes restrictions for the particular index. + * Does consider rules with index wildcards ("*"). + */ + public boolean isUnrestricted(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + if (context.getMappedRoles().isEmpty()) { + return false; + } + + if (this.dfmEmptyOverridesAll + && CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return true; + } + + if (this.hasRestrictedRulesWithIndexWildcard(context)) { + return false; + } + + if (this.dfmEmptyOverridesAll) { + // We assume that we have a restriction unless there are roles without restriction. + // Thus, we only have to check the roles without restriction. + return this.hasUnrestrictedRulesExplicit(context, statefulRules, index); + } else { + // if dfmEmptyOverwritesAll == false, we prefer restricted roles over unrestricted ones. + // Thus, we first check for restricted roles. Only if there are not any restricted roles, + // we check for the presence of unrestricted roles. If there are not any matching roles, + // we also assume full restrictions. + + if (this.hasRestrictedRulesExplicit(context, statefulRules, index)) { + return false; + } else { + if (CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return true; + } + + return this.hasUnrestrictedRulesExplicit(context, statefulRules, index); + } + } + } + + /** + * Returns true if there are roles without a rule which imposes restrictions for the particular index. + * Does not consider rules with index wildcards ("*") - this is reflected by the "explicit" in the method name. + */ + private boolean hasUnrestrictedRulesExplicit(PrivilegesEvaluationContext context, StatefulRules statefulRules, String index) + throws PrivilegesEvaluationException { + + if (statefulRules != null && statefulRules.covers(index)) { + Set roleWithoutRule = statefulRules.indexToRoleWithoutRule.get(index); + + if (roleWithoutRule != null && CollectionUtils.containsAny(roleWithoutRule, context.getMappedRoles())) { + return true; + } + } else { + if (this.staticRules.hasUnrestrictedPatterns(context, index)) { + return true; + } + } + + if (this.staticRules.hasUnrestrictedPatternTemplates(context, index)) { + return true; + } + + IndexAbstraction indexAbstraction = context.getIndicesLookup().get(index); + if (indexAbstraction != null) { + for (String parent : getParents(indexAbstraction)) { + if (hasUnrestrictedRulesExplicit(context, statefulRules, parent)) { + return true; + } + } + } + + return false; + + } + + /** + * Returns true if there are roles with a rule which imposes restrictions for the particular index. + * Does not consider rules with index wildcards ("*") - this is reflected by the "explicit" in the method name. + */ + private boolean hasRestrictedRulesExplicit(PrivilegesEvaluationContext context, StatefulRules statefulRules, String index) + throws PrivilegesEvaluationException { + + if (statefulRules != null && statefulRules.covers(index)) { + Map roleWithRule = statefulRules.indexToRoleToRule.get(index); + + if (roleWithRule != null && CollectionUtils.containsAny(roleWithRule.keySet(), context.getMappedRoles())) { + return true; + } + } else { + if (this.staticRules.hasRestrictedPatterns(context, index)) { + return true; + } + } + + if (this.staticRules.hasRestrictedPatternTemplates(context, index)) { + return true; + } + + IndexAbstraction indexAbstraction = context.getIndicesLookup().get(index); + if (indexAbstraction != null) { + for (String parent : getParents(indexAbstraction)) { + if (hasRestrictedRulesExplicit(context, statefulRules, parent)) { + return true; + } + } + } + + return false; + } + + /** + * Returns true if the user specified by the given context parameter has roles which apply for the index wildcard ("*") + * and which specify DLS rules. + */ + private boolean hasRestrictedRulesWithIndexWildcard(PrivilegesEvaluationContext context) { + return CollectionUtils.containsAny(this.staticRules.roleWithIndexWildcardToRule.keySet(), context.getMappedRoles()); + } + + /** + * Returns the joined restrictions for the given index. + *

+ * If you only need to know whether there are restrictions for an index or not, prefer to use isUnrestricted(), + * as this might be faster. + * + * @param context The current PrivilegesEvaluationContext + * @param index The index to be considered. This can be ONLY a concrete index, not an alias or data stream. + * @return The joined restrictions for the given index. + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public JoinedRule getRestriction(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + return getRestriction(context, index, fullyRestricted()); + } + + /** + * Returns the joined restrictions for the given index. + *

+ * If you only need to know whether there are restrictions for an index or not, prefer to use isUnrestricted(), + * as this might be faster. + * + * @param context The current PrivilegesEvaluationContext + * @param index The index to be considered. This can be ONLY a concrete index, not an alias or data stream. + * @param noRulesDefault Specifies the restriction that shall be used in case no rules are found for an index. Ideally, + * this is fullRestriction(), as the absence of any role mentioning an index means no privileges. + * For backwards compatibility, this might need to be noRestriction(). * @return The joined restrictions for the given index. + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public JoinedRule getRestriction(PrivilegesEvaluationContext context, String index, JoinedRule noRulesDefault) + throws PrivilegesEvaluationException { + if (context.getMappedRoles().isEmpty()) { + return fullyRestricted(); + } + + if (this.dfmEmptyOverridesAll + && CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return unrestricted(); + } + + StatefulRules statefulRules = this.statefulRules; + if (statefulRules != null && !statefulRules.covers(index)) { + statefulRules = null; + } + + if (this.dfmEmptyOverridesAll && this.hasUnrestrictedRulesExplicit(context, statefulRules, index)) { + // If dfmEmptyOverwritesAll == true, we can abort early in case unrestricted rules are present. These + // will overrule any other rules. + return unrestricted(); + } + + // Collect rules into ruleSink + Set ruleSink = new HashSet<>(); + collectRules(context, ruleSink, index, statefulRules); + + IndexAbstraction indexAbstraction = context.getIndicesLookup().get(index); + + if (indexAbstraction != null) { + for (String parent : getParents(indexAbstraction)) { + collectRules(context, ruleSink, parent, statefulRules); + } + } + + if (ruleSink.isEmpty()) { + if (this.dfmEmptyOverridesAll) { + // If we did not find any rules, we assume full restrictions + return noRulesDefault; + } else { + // In case dfmEmptyOverwritesAll == false, we now check for unrestricted rules. If these are present, + // we give full access. Otherwise, we also assume full restrictions + if (CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles()) + || this.hasUnrestrictedRulesExplicit(context, statefulRules, index)) { + return unrestricted(); + } else { + return noRulesDefault; + } + } + } else { + return compile(context, ruleSink); + } + } + + /** + * Returns the joined restrictions for the given indices. + *

+ * If you only need to know whether there are restrictions for an index or not, prefer to use isUnrestricted(), + * as this might be faster. + * + * @param context The current PrivilegesEvaluationContext + * @param indices The indices to be considered. This can be ONLY concrete indices, not aliases or data streams. + * @return The joined restrictions for the given indices. The resulting map is guaranteed to contain entries for + * all indices specified in the corresponding parameter. + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public IndexToRuleMap getRestrictions(PrivilegesEvaluationContext context, Collection indices) + throws PrivilegesEvaluationException { + return getRestrictions(context, indices, fullyRestricted()); + } + + /** + * Returns the joined restrictions for the given indices. + *

+ * If you only need to know whether there are restrictions for an index or not, prefer to use isUnrestricted(), + * as this might be faster. + * + * @param context The current PrivilegesEvaluationContext + * @param indices The indices to be considered. This can be ONLY concrete indices, not aliases or data streams.y + * @param noRulesDefault Specifies the restriction that shall be used in case no rules are found for an index. Ideally, + * this is fullRestriction(), as the absence of any role mentioning an index means no privileges. + * For backwards compatibility, this might need to be noRestriction(). + * @return The joined restrictions for the given indices. The resulting map is guaranteed to contain entries for + * all indices specified in the corresponding parameter. + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public IndexToRuleMap getRestrictions( + PrivilegesEvaluationContext context, + Collection indices, + JoinedRule noRulesDefault + ) throws PrivilegesEvaluationException { + if (isUniversallyUnrestricted(context)) { + return IndexToRuleMap.unrestricted(); + } + + ImmutableMap.Builder result = ImmutableMap.builderWithExpectedSize(indices.size()); + + int restrictedIndices = 0; + + for (String index : indices) { + JoinedRule restriction = getRestriction(context, index, noRulesDefault); + + if (!restriction.isUnrestricted()) { + restrictedIndices++; + } + + result.put(index, restriction); + } + + if (restrictedIndices == 0) { + return IndexToRuleMap.unrestricted(); + } + + return new IndexToRuleMap<>(result.build()); + } + + /** + * Collects the rules for the given index and adds them to the given ruleSink set. + */ + private void collectRules( + PrivilegesEvaluationContext context, + Set ruleSink, + String index, + StatefulRules statefulRules + ) throws PrivilegesEvaluationException { + Map statefulRoleToRule = null; + boolean statefulRulesEffective; + + if (statefulRules != null) { + statefulRoleToRule = statefulRules.indexToRoleToRule.get(index); + statefulRulesEffective = true; + } else { + statefulRulesEffective = false; + } + + for (String role : context.getMappedRoles()) { + { + SingleRule rule = this.staticRules.roleWithIndexWildcardToRule.get(role); + + if (rule != null) { + ruleSink.add(rule); + } + } + + if (statefulRoleToRule != null) { + SingleRule rule = statefulRoleToRule.get(role); + + if (rule != null) { + ruleSink.add(rule); + } + } + + if (!statefulRulesEffective) { + // Only when we have no stateful information, we also check the static index patterns + + Map indexPatternToRule = this.staticRules.rolesToStaticIndexPatternToRule.get(role); + if (indexPatternToRule != null) { + for (Map.Entry entry : indexPatternToRule.entrySet()) { + WildcardMatcher pattern = entry.getKey(); + + if (pattern.test(index)) { + ruleSink.add(entry.getValue()); + } + } + } + } + + Map dynamicIndexPatternToRule = this.staticRules.rolesToDynamicIndexPatternToRule.get(role); + + if (dynamicIndexPatternToRule != null) { + for (Map.Entry entry : dynamicIndexPatternToRule.entrySet()) { + try { + if (entry.getKey().matches(index, context, context.getIndicesLookup())) { + ruleSink.add(entry.getValue()); + } + } catch (PrivilegesEvaluationException e) { + throw new PrivilegesEvaluationException("Error while evaluating index pattern of role " + role, e); + } + } + } + } + } + + /** + * Returns a rule that signifies full access + */ + protected abstract JoinedRule unrestricted(); + + /** + * Returns a rule that signifies that a user cannot access anything. + */ + protected abstract JoinedRule fullyRestricted(); + + /** + * Merges the given collection of single rules into one joined rule. + */ + protected abstract JoinedRule compile(PrivilegesEvaluationContext context, Collection rules) + throws PrivilegesEvaluationException; + + synchronized void updateIndices(Map indexMetadata) { + StatefulRules statefulRules = this.statefulRules; + + if (statefulRules == null || !statefulRules.indexMetadata.keySet().equals(indexMetadata.keySet())) { + this.statefulRules = new StatefulRules<>(roles, indexMetadata, this.roleToRuleFunction); + } + } + + /** + * Returns aliases and/or data streams containing the specified index. + */ + private Collection getParents(IndexAbstraction indexAbstraction) { + if (indexAbstraction instanceof IndexAbstraction.Index) { + IndexAbstraction.Index index = (IndexAbstraction.Index) indexAbstraction; + + if (index.getWriteIndex().getAliases().isEmpty() && index.getParentDataStream() == null) { + return Collections.emptySet(); + } + + List result = new ArrayList<>(index.getWriteIndex().getAliases().size() + 1); + + for (String aliasName : index.getWriteIndex().getAliases().keySet()) { + result.add(aliasName); + } + + if (indexAbstraction.getParentDataStream() != null) { + result.add(indexAbstraction.getParentDataStream().getName()); + } + + return result; + } else { + return Collections.emptySet(); + } + } + + /** + * This is an immutable class that contains compiled rules. It is independent of the current indices. + */ + static class StaticRules { + + protected final Set rolesWithIndexWildcardWithoutRule; + protected final Map roleWithIndexWildcardToRule; + protected final Map> rolesToDynamicIndexPatternToRule; + protected final Map> rolesToDynamicIndexPatternWithoutRule; + + /** + * Only used when no index metadata is available upon construction + */ + protected final Map> rolesToStaticIndexPatternToRule; + + /** + * Only used when no index metadata is available upon construction + */ + protected final Map rolesToStaticIndexPatternWithoutRule; + + protected final RoleToRuleFunction roleToRuleFunction; + + StaticRules(SecurityDynamicConfiguration roles, RoleToRuleFunction roleToRuleFunction) { + this.roleToRuleFunction = roleToRuleFunction; + + Set rolesWithIndexWildcardWithoutRule = new HashSet<>(); + Map roleWithIndexWildcardToRule = new HashMap<>(); + Map> rolesToDynamicIndexPatternToRule = new HashMap<>(); + Map> rolesToDynamicIndexPatternWithoutRule = new HashMap<>(); + Map> rolesToStaticIndexPatternToRule = new HashMap<>(); + Map> rolesToStaticIndexPatternWithoutRule = new HashMap<>(); + + for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + for (RoleV7.Index rolePermissions : role.getIndex_permissions()) { + if (rolePermissions.getIndex_patterns().contains("*")) { + SingleRule singleRule = this.roleToRule(rolePermissions); + + if (singleRule == null) { + rolesWithIndexWildcardWithoutRule.add(roleName); + } else { + roleWithIndexWildcardToRule.put(roleName, singleRule); + } + } else { + SingleRule singleRule = this.roleToRule(rolePermissions); + IndexPattern indexPattern = IndexPattern.from(rolePermissions.getIndex_patterns()); + + if (indexPattern.hasStaticPattern()) { + if (singleRule == null) { + rolesToStaticIndexPatternWithoutRule.computeIfAbsent(roleName, k -> new ArrayList<>()) + .add(indexPattern.getStaticPattern()); + } else { + rolesToStaticIndexPatternToRule.computeIfAbsent(roleName, k -> new HashMap<>()) + .put(indexPattern.getStaticPattern(), singleRule); + } + } + + if (indexPattern.hasDynamicPattern()) { + if (singleRule == null) { + rolesToDynamicIndexPatternWithoutRule.computeIfAbsent(roleName, k -> new HashSet<>()) + .add(indexPattern.dynamicOnly()); + } else { + rolesToDynamicIndexPatternToRule.computeIfAbsent(roleName, k -> new HashMap<>()) + .put(indexPattern.dynamicOnly(), singleRule); + } + } + } + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry, e); + } + } + + this.rolesWithIndexWildcardWithoutRule = rolesWithIndexWildcardWithoutRule; + this.roleWithIndexWildcardToRule = roleWithIndexWildcardToRule; + this.rolesToDynamicIndexPatternToRule = rolesToDynamicIndexPatternToRule; + this.rolesToDynamicIndexPatternWithoutRule = rolesToDynamicIndexPatternWithoutRule; + + this.rolesToStaticIndexPatternToRule = rolesToStaticIndexPatternToRule; + this.rolesToStaticIndexPatternWithoutRule = rolesToStaticIndexPatternWithoutRule.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(entry -> entry.getKey(), entry -> WildcardMatcher.from(entry.getValue()))); + } + + protected SingleRule roleToRule(RoleV7.Index rolePermissions) throws PrivilegesConfigurationValidationException { + return this.roleToRuleFunction.apply(rolePermissions); + } + + /** + * Only to be used if there is no stateful index information + */ + boolean hasUnrestrictedPatterns(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + // We assume that we have a restriction unless there are roles without restriction. This, we only have to check the roles + // without restriction. + for (String role : context.getMappedRoles()) { + WildcardMatcher pattern = this.rolesToStaticIndexPatternWithoutRule.get(role); + + if (pattern != null && pattern.test(index)) { + return true; + } + } + + // If we found no roles without restriction, we assume a restriction + return false; + } + + boolean hasUnrestrictedPatternTemplates(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + // We assume that we have a restriction unless there are roles without restriction. This, we only have to check the roles + // without restriction. + for (String role : context.getMappedRoles()) { + Set dynamicIndexPatternsWithoutRule = this.rolesToDynamicIndexPatternWithoutRule.get(role); + + if (dynamicIndexPatternsWithoutRule != null) { + for (IndexPattern indexPatternTemplate : dynamicIndexPatternsWithoutRule) { + try { + if (indexPatternTemplate.matches(index, context, context.getIndicesLookup())) { + return true; + } + } catch (PrivilegesEvaluationException e) { + log.error("Error while matching index pattern of role {}", role, e); + } + } + } + } + + // If we found no roles without restriction, we assume a restriction + return false; + } + + /** + * Only to be used if there is no stateful index information + */ + boolean hasRestrictedPatterns(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + for (String role : context.getMappedRoles()) { + Map indexPatternToRule = this.rolesToStaticIndexPatternToRule.get(role); + + if (indexPatternToRule != null) { + for (WildcardMatcher indexPattern : indexPatternToRule.keySet()) { + if (indexPattern.test(index)) { + return true; + } + } + } + } + + return false; + } + + boolean hasRestrictedPatternTemplates(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + for (String role : context.getMappedRoles()) { + Map dynamicIndexPatternToRule = this.rolesToDynamicIndexPatternToRule.get(role); + + if (dynamicIndexPatternToRule != null) { + for (IndexPattern indexPattern : dynamicIndexPatternToRule.keySet()) { + try { + if (indexPattern.matches(index, context, context.getIndicesLookup())) { + return true; + } + } catch (PrivilegesEvaluationException e) { + throw new PrivilegesEvaluationException("Error while evaluating index pattern of role " + role, e); + } + } + } + } + + return false; + } + } + + /** + * This is an immutable class which contains compiled rules based on the set of actually existing indices. Objects + * of this class need to be re-constructed whenever the set of indices changes. + */ + static class StatefulRules { + final Map indexMetadata; + + final ImmutableMap> indexToRoleToRule; + final ImmutableMap> indexToRoleWithoutRule; + + private final RoleToRuleFunction roleToRuleFunction; + + StatefulRules( + SecurityDynamicConfiguration roles, + Map indexMetadata, + RoleToRuleFunction roleToRuleFunction + ) { + this.roleToRuleFunction = roleToRuleFunction; + this.indexMetadata = indexMetadata; + + DeduplicatingCompactSubSetBuilder roleSetBuilder = new DeduplicatingCompactSubSetBuilder<>( + roles.getCEntries().keySet() + ); + CompactMapGroupBuilder roleMapBuilder = new CompactMapGroupBuilder<>(roles.getCEntries().keySet()); + Map> indexToRoleWithoutRule = new HashMap<>(); + Map> indexToRoleToRule = new HashMap<>(); + + for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + roleSetBuilder.next(roleName); + + for (RoleV7.Index indexPermissions : role.getIndex_permissions()) { + if (indexPermissions.getIndex_patterns().contains("*")) { + // Wildcard index patterns are handled in the static IndexPermissions object. + continue; + } + + WildcardMatcher indexMatcher = IndexPattern.from(indexPermissions.getIndex_patterns()).getStaticPattern(); + + if (indexMatcher == WildcardMatcher.NONE) { + // The pattern is likely blank because there are only dynamic patterns. + // Dynamic index patterns are not handled here, but in the static IndexPermissions object + continue; + } + + SingleRule rule = this.roleToRule(indexPermissions); + + if (rule != null) { + for (String index : indexMatcher.iterateMatching(indexMetadata.keySet())) { + indexToRoleToRule.computeIfAbsent(index, k -> roleMapBuilder.createMapBuilder()).put(roleName, rule); + } + } else { + for (String index : indexMatcher.iterateMatching(indexMetadata.keySet())) { + indexToRoleWithoutRule.computeIfAbsent(index, k -> roleSetBuilder.createSubSetBuilder()).add(roleName); + } + } + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry, e); + } + } + + DeduplicatingCompactSubSetBuilder.Completed completed = roleSetBuilder.build(); + + this.indexToRoleToRule = indexToRoleToRule.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(entry -> entry.getKey(), entry -> entry.getValue().build())); + this.indexToRoleWithoutRule = indexToRoleWithoutRule.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(entry -> entry.getKey(), entry -> entry.getValue().build(completed))); + + } + + protected SingleRule roleToRule(RoleV7.Index rolePermissions) throws PrivilegesConfigurationValidationException { + return this.roleToRuleFunction.apply(rolePermissions); + } + + /** + * Returns true if the given index is known to this instance - then it can be assumed that this instance + * has proper rules for the index in the indexToRoleToRule and the indexToRoleWithoutRule attributes. + *

+ * If this returns false, this instance cannot be relied on to determine the correct rules. + */ + boolean covers(String index) { + return this.indexMetadata.get(index) != null; + } + } + + @FunctionalInterface + static interface RoleToRuleFunction { + SingleRule apply(RoleV7.Index indexPrivileges) throws PrivilegesConfigurationValidationException; + } + + static abstract class Rule { + abstract boolean isUnrestricted(); + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsBaseContext.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsBaseContext.java new file mode 100644 index 0000000000..232e2d7422 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsBaseContext.java @@ -0,0 +1,68 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.configuration.AdminDNs; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluator; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.support.HeaderHelper; +import org.opensearch.security.user.User; + +/** + * Node global context data for DLS/FLS. The lifecycle of an instance of this class is equal to the lifecycle of a running node. + */ +public class DlsFlsBaseContext { + private final PrivilegesEvaluator privilegesEvaluator; + private final ThreadContext threadContext; + private final AdminDNs adminDNs; + + public DlsFlsBaseContext(PrivilegesEvaluator privilegesEvaluator, ThreadContext threadContext, AdminDNs adminDNs) { + this.privilegesEvaluator = privilegesEvaluator; + this.threadContext = threadContext; + this.adminDNs = adminDNs; + } + + /** + * Returns the PrivilegesEvaluationContext for the current thread. Returns null if the current thread is not + * associated with a user. This indicates a system action. In these cases, no privilege evaluation should be performed. + */ + public PrivilegesEvaluationContext getPrivilegesEvaluationContext() { + User user = threadContext.getTransient(ConfigConstants.OPENDISTRO_SECURITY_USER); + + if (user == null || adminDNs.isAdmin(user)) { + return null; + } + + return this.privilegesEvaluator.createContext(user, null); + } + + public boolean isDlsDoneOnFilterLevel() { + if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE) != null) { + return true; + } else { + return false; + } + } + + /** + * Returns true for requests that have raised privileges. This corresponds to the check in SecurityFilter: + * https://github.com/opensearch-project/security/blob/1c898dcc4a92e8d4aa8b18c3fed761b5f6e52d4f/src/main/java/org/opensearch/security/filter/SecurityFilter.java#L209 + *

+ * In earlier versions the check in SecurityFilter would automatically bypass any DLS/FLS logic if it was true, + * because no DLS/FLS thread context headers were written. As these are no longer used and the DLS/FLS components + * do the access control checks by themselves, we now need to do that check at these particular locations. + */ + public boolean isPrivilegedConfigRequest() { + return "true".equals(HeaderHelper.getSafeFromHeader(threadContext, ConfigConstants.OPENDISTRO_SECURITY_CONF_REQUEST_HEADER)); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeaders.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeaders.java new file mode 100644 index 0000000000..e2322bbcc7 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeaders.java @@ -0,0 +1,247 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import com.google.common.collect.Sets; + +import org.opensearch.Version; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsRequest; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.support.Base64Helper; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.support.HeaderHelper; +import org.opensearch.transport.Transport; +import org.opensearch.transport.TransportRequest; + +/** + * Encapsulates functionality to provide transport headers with DLS/FLS information that need to be sent + * to nodes which run on the legacy DLS/FLS implementation. This is only needed for mixed clusters. + * See the attribute LEGACY_HEADERS_UNNECESSARY_AS_OF for the concrete version. + *

+ * As soon as backward compat in mixed clusters is no longer required, this class should be removed. + * + */ +public class DlsFlsLegacyHeaders { + /** + * Defines the first OpenSearch version which does not need the legacy headers + * TODO this needs to be adapted + */ + static final Version LEGACY_HEADERS_UNNECESSARY_AS_OF = Version.V_2_19_0; + + /** + * Returns true if the current cluster still contains nodes which are on an OpenSearch version which + * requires the legacy DLS/FLS transport headers to be set. This still does not necessarily indicate that the + * headers must be set, as this also depends on the concrete message that is being sent. + */ + public static boolean possiblyRequired(ClusterService clusterService) { + return !clusterService.state().nodes().getMinNodeVersion().onOrAfter(LEGACY_HEADERS_UNNECESSARY_AS_OF); + } + + /** + * Creates an DlsFlsLegacyHeaders instance and puts it asa transient into the thread context. This should be only called + * if DlsFlsLegacyHeaders.possiblyRequired() returns true. + *

+ * This method should be called in the DlsFlsRequestValve implementation, i.e., during action filtering. + * Later, when transport messages are sent, performHeaderDecoration() should be called in the SecurityInterceptor + * class. + */ + public static void prepare( + ThreadContext threadContext, + PrivilegesEvaluationContext context, + DlsFlsProcessedConfig config, + Metadata metadata, + boolean doFilterLevelDls + ) throws PrivilegesEvaluationException { + DlsFlsLegacyHeaders preparedHeaders = new DlsFlsLegacyHeaders(context, config, metadata, doFilterLevelDls); + + if (context.getRequest() instanceof ClusterSearchShardsRequest && HeaderHelper.isTrustedClusterRequest(threadContext)) { + // Special case: Another cluster tries to initiate a cross cluster search and will talk directly to + // the shards on our cluster. In this case, we do send the information as response headers. + // The other cluster has code to correctly evaluate these response headers + preparedHeaders.performResponseHeaderDecoration(threadContext); + } else if (threadContext.getTransient(TRANSIENT_HEADER) == null) { + // Normal case: No CCS involved + threadContext.putTransient(TRANSIENT_HEADER, preparedHeaders); + } + } + + public static final String TRANSIENT_HEADER = ConfigConstants.OPENDISTRO_SECURITY_CONFIG_PREFIX + "dls_fls_legacy_headers"; + + private final DlsFlsProcessedConfig config; + + private final String dlsHeader; + private final String flsHeader; + private final String fmHeader; + + public DlsFlsLegacyHeaders( + PrivilegesEvaluationContext context, + DlsFlsProcessedConfig config, + Metadata metadata, + boolean doFilterLevelDls + ) throws PrivilegesEvaluationException { + this.config = config; + this.dlsHeader = !doFilterLevelDls ? getDlsHeader(context, config.getDocumentPrivileges(), metadata) : null; + this.flsHeader = getFlsHeader(context, config.getFieldPrivileges(), metadata); + this.fmHeader = getFieldMaskingHeader(context, config.getFieldMasking(), metadata); + } + + /** + * Writes the prepared DLS/FLS headers into the given map IF this method deems that it is necessary. + * To be called when a transport message is sent to another node, i.e. in TransportInterceptor.interceptSender(). + */ + public void performHeaderDecoration(Transport.Connection connection, TransportRequest request, Map headerMap) { + + if (connection.getVersion().onOrAfter(LEGACY_HEADERS_UNNECESSARY_AS_OF)) { + // Target node is new enough -> no headers to be applied + return; + } + + if (request instanceof ActionRequest) { + // The legacy implementation will create the information by itself in DlsFlsValve if an ActionRequest is received + // Thus, if we have an ActionRequest, we do not need to get active either + return; + } + + if (dlsHeader != null) { + headerMap.put(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER, dlsHeader); + } + + if (flsHeader != null) { + headerMap.put(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER, flsHeader); + } + + if (fmHeader != null) { + headerMap.put(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER, fmHeader); + } + } + + /** + * Only necessary for CCS in the case that another cluster checks out our shards with ClusterSearchShardsRequest: + * In this case, we send the necessary information as response headers. The other cluster has code to evaluate + * these response headers. + */ + public void performResponseHeaderDecoration(ThreadContext threadContext) { + if (dlsHeader != null) { + threadContext.addResponseHeader(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER, dlsHeader); + } + + if (flsHeader != null) { + threadContext.addResponseHeader(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER, flsHeader); + } + + if (fmHeader != null) { + threadContext.addResponseHeader(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER, fmHeader); + } + } + + public String getDlsHeader() { + return dlsHeader; + } + + public String getFlsHeader() { + return flsHeader; + } + + public String getFmHeader() { + return fmHeader; + } + + private static String getDlsHeader(PrivilegesEvaluationContext context, DocumentPrivileges documentPrivileges, Metadata metadata) + throws PrivilegesEvaluationException { + IndexToRuleMap dlsRestrictionMap = documentPrivileges.getRestrictions( + context, + metadata.indices().keySet(), + documentPrivileges.unrestricted() + ); + + if (dlsRestrictionMap.isUnrestricted()) { + return null; + } + + Map> dlsQueriesByIndex = new HashMap<>(); + + for (Map.Entry entry : dlsRestrictionMap.getIndexMap().entrySet()) { + // Do not include implicitly unrestricted rules (this is achieved by the != operator, an equals() would also catch explicit + // unrestricted rules) + if (entry.getValue() != documentPrivileges.unrestricted()) { + dlsQueriesByIndex.put( + entry.getKey(), + entry.getValue().getQueries().stream().map(query -> query.getRenderedSource()).collect(Collectors.toSet()) + ); + } + } + + return Base64Helper.serializeObject((Serializable) dlsQueriesByIndex); + } + + private static String getFlsHeader(PrivilegesEvaluationContext context, FieldPrivileges fieldPrivileges, Metadata metadata) + throws PrivilegesEvaluationException { + IndexToRuleMap flsRuleMap = fieldPrivileges.getRestrictions( + context, + metadata.indices().keySet(), + fieldPrivileges.unrestricted() + ); + + if (flsRuleMap.isUnrestricted()) { + return null; + } + + Map> flsFields = new HashMap<>(); + + for (Map.Entry entry : flsRuleMap.getIndexMap().entrySet()) { + // Do not include implicitly unrestricted rules (this is achieved by the != operator, an equals() would also catch explicit + // unrestricted rules) + if (entry.getValue() != fieldPrivileges.unrestricted()) { + flsFields.put(entry.getKey(), Sets.newHashSet(entry.getValue().getSource())); + } + + } + + return Base64Helper.serializeObject((Serializable) flsFields); + } + + private static String getFieldMaskingHeader(PrivilegesEvaluationContext context, FieldMasking fieldMasking, Metadata metadata) + throws PrivilegesEvaluationException { + IndexToRuleMap fmRuleMap = fieldMasking.getRestrictions( + context, + metadata.indices().keySet(), + fieldMasking.unrestricted() + ); + + if (fmRuleMap.isUnrestricted()) { + return null; + } + + Map> maskedFieldsMap = new HashMap<>(); + + for (Map.Entry entry : fmRuleMap.getIndexMap().entrySet()) { + // Do not include implicitly unrestricted rules (this is achieved by the != operator, an equals() would also catch explicit + // unrestricted rules) + if (entry.getValue() != fieldMasking.unrestricted()) { + maskedFieldsMap.put(entry.getKey(), Sets.newHashSet(entry.getValue().getSource())); + } + } + + return Base64Helper.serializeObject((Serializable) maskedFieldsMap); + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsProcessedConfig.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsProcessedConfig.java new file mode 100644 index 0000000000..b217b59df3 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsProcessedConfig.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.Map; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.security.privileges.ClusterStateMetadataDependentPrivileges; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; + +/** + * Encapsulates the processed DLS/FLS configuration from roles.yml. + * The current instance is held and managed by DlsFlsValveImpl. + */ +public class DlsFlsProcessedConfig extends ClusterStateMetadataDependentPrivileges { + private static final Logger log = LogManager.getLogger(DlsFlsProcessedConfig.class); + + private final DocumentPrivileges documentPrivileges; + private final FieldPrivileges fieldPrivileges; + private final FieldMasking fieldMasking; + private long metadataVersionEffective = -1; + + public DlsFlsProcessedConfig( + SecurityDynamicConfiguration rolesConfiguration, + Map indexMetadata, + NamedXContentRegistry xContentRegistry, + Settings settings, + FieldMasking.Config fieldMaskingConfig + ) { + this.documentPrivileges = new DocumentPrivileges(rolesConfiguration, indexMetadata, xContentRegistry, settings); + this.fieldPrivileges = new FieldPrivileges(rolesConfiguration, indexMetadata, settings); + this.fieldMasking = new FieldMasking(rolesConfiguration, indexMetadata, fieldMaskingConfig, settings); + } + + public DocumentPrivileges getDocumentPrivileges() { + return this.documentPrivileges; + } + + public FieldPrivileges getFieldPrivileges() { + return this.fieldPrivileges; + } + + public FieldMasking getFieldMasking() { + return this.fieldMasking; + } + + @Override + protected void updateClusterStateMetadata(Metadata metadata) { + long start = System.currentTimeMillis(); + Map indexLookup = metadata.getIndicesLookup(); + + this.documentPrivileges.updateIndices(indexLookup); + this.fieldPrivileges.updateIndices(indexLookup); + this.fieldMasking.updateIndices(indexLookup); + + long duration = System.currentTimeMillis() - start; + + log.debug("Updating DlsFlsProcessedConfig took {} ms", duration); + } + + @Override + protected long getCurrentlyUsedMetadataVersion() { + return this.metadataVersionEffective; + } +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DlsRestriction.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsRestriction.java new file mode 100644 index 0000000000..242e0000a4 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsRestriction.java @@ -0,0 +1,122 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.Collections; +import java.util.List; +import java.util.function.Function; + +import com.google.common.collect.ImmutableList; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; +import org.apache.lucene.search.join.ToChildBlockJoinQuery; + +import org.opensearch.index.query.ParsedQuery; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.index.query.TermsQueryBuilder; +import org.opensearch.security.queries.QueryBuilderTraverser; + +/** + * Represents the DlsRestriction for a particular index. Internally, the DLS restriction is realized by boolean queries, + * which restrict the allowed documents. + */ +public class DlsRestriction extends AbstractRuleBasedPrivileges.Rule { + + public static final DlsRestriction NONE = new DlsRestriction(Collections.emptyList()); + public static final DlsRestriction FULL = new DlsRestriction(ImmutableList.of(DocumentPrivileges.RenderedDlsQuery.MATCH_NONE)); + + private static final Query NON_NESTED_QUERY; + + static { + // Moved from + // https://github.com/opensearch-project/security/blob/main/src/main/java/org/opensearch/security/configuration/DlsQueryParser.java + // Match all documents but not the nested ones + // Nested document types start with __ + // https://discuss.elastic.co/t/whats-nested-documents-layout-inside-the-lucene/59944/9 + NON_NESTED_QUERY = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER) + .add(new PrefixQuery(new Term("_type", "__")), BooleanClause.Occur.MUST_NOT) + .build(); + } + + private final ImmutableList queries; + + DlsRestriction(List queries) { + this.queries = ImmutableList.copyOf(queries); + } + + @Override + public boolean isUnrestricted() { + return this.queries.isEmpty(); + } + + public org.apache.lucene.search.BooleanQuery.Builder toBooleanQueryBuilder( + QueryShardContext queryShardContext, + Function queryMapFunction + ) { + if (this.queries.isEmpty()) { + return null; + } + + boolean hasNestedMapping = queryShardContext.getMapperService().hasNested(); + + org.apache.lucene.search.BooleanQuery.Builder dlsQueryBuilder = new org.apache.lucene.search.BooleanQuery.Builder(); + dlsQueryBuilder.setMinimumNumberShouldMatch(1); + + for (DocumentPrivileges.RenderedDlsQuery query : this.queries) { + ParsedQuery parsedQuery = queryShardContext.toQuery(query.getQueryBuilder()); + org.apache.lucene.search.Query luceneQuery = parsedQuery.query(); + + if (queryMapFunction != null) { + luceneQuery = queryMapFunction.apply(luceneQuery); + } + + dlsQueryBuilder.add(luceneQuery, BooleanClause.Occur.SHOULD); + + if (hasNestedMapping) { + final BitSetProducer parentDocumentsFilter = queryShardContext.bitsetFilter(NON_NESTED_QUERY); + dlsQueryBuilder.add(new ToChildBlockJoinQuery(luceneQuery, parentDocumentsFilter), BooleanClause.Occur.SHOULD); + } + } + + return dlsQueryBuilder; + } + + public boolean containsTermLookupQuery() { + for (DocumentPrivileges.RenderedDlsQuery query : this.queries) { + if (QueryBuilderTraverser.exists( + query.getQueryBuilder(), + (q) -> (q instanceof TermsQueryBuilder) && ((TermsQueryBuilder) q).termsLookup() != null + )) { + return true; + } + } + + return false; + } + + @Override + public String toString() { + if (isUnrestricted()) { + return "DLS:"; + } else { + return "DLS:" + queries; + } + } + + public ImmutableList getQueries() { + return queries; + } +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DocumentPrivileges.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DocumentPrivileges.java new file mode 100644 index 0000000000..2afcdd4b82 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DocumentPrivileges.java @@ -0,0 +1,210 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.apache.logging.log4j.util.Strings; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.AbstractQueryBuilder; +import org.opensearch.index.query.MatchNoneQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.privileges.UserAttributes; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; + +/** + * This class converts role configuration into pre-computed, optimized data structures for checking DLS privileges. + *

+ * With the exception of the statefulRules property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role configuration. If the role configuration is changed, + * a new instance needs to be built. + *

+ * Instances of this class are managed by DlsFlsProcessedConfig. + */ +public class DocumentPrivileges extends AbstractRuleBasedPrivileges { + + private final NamedXContentRegistry xContentRegistry; + + public DocumentPrivileges( + SecurityDynamicConfiguration roles, + Map indexMetadata, + NamedXContentRegistry xContentRegistry, + Settings settings + ) { + super(roles, indexMetadata, (rolePermissions) -> roleToRule(rolePermissions, xContentRegistry), settings); + this.xContentRegistry = xContentRegistry; + } + + static DlsQuery roleToRule(RoleV7.Index rolePermissions, NamedXContentRegistry xContentRegistry) + throws PrivilegesConfigurationValidationException { + String dlsQueryTemplate = rolePermissions.getDls(); + + if (dlsQueryTemplate != null && !Strings.isBlank(dlsQueryTemplate)) { + return DlsQuery.create(dlsQueryTemplate, xContentRegistry); + } else { + return null; + } + } + + @Override + protected DlsRestriction unrestricted() { + return DlsRestriction.NONE; + } + + @Override + protected DlsRestriction fullyRestricted() { + return DlsRestriction.FULL; + } + + @Override + protected DlsRestriction compile(PrivilegesEvaluationContext context, Collection rules) throws PrivilegesEvaluationException { + List renderedQueries = new ArrayList<>(rules.size()); + + for (DlsQuery query : rules) { + renderedQueries.add(query.evaluate(context)); + } + + return new DlsRestriction(renderedQueries); + } + + /** + * The basic rules of DLS are queries. This class encapsulates single queries. + */ + static abstract class DlsQuery { + final String queryString; + + DlsQuery(String queryString) { + this.queryString = queryString; + } + + abstract RenderedDlsQuery evaluate(PrivilegesEvaluationContext context) throws PrivilegesEvaluationException; + + @Override + public int hashCode() { + return queryString.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof DlsQuery)) { + return false; + } + DlsQuery other = (DlsQuery) obj; + return Objects.equals(this.queryString, other.queryString); + } + + protected QueryBuilder parseQuery(String queryString, NamedXContentRegistry xContentRegistry) + throws PrivilegesConfigurationValidationException { + try { + XContentParser parser = JsonXContent.jsonXContent.createParser( + xContentRegistry, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + queryString + ); + return AbstractQueryBuilder.parseInnerQueryBuilder(parser); + } catch (Exception e) { + throw new PrivilegesConfigurationValidationException("Invalid DLS query: " + queryString, e); + } + } + + static DlsQuery create(String queryString, NamedXContentRegistry xContentRegistry) + throws PrivilegesConfigurationValidationException { + if (queryString.contains("${")) { + return new DlsQuery.Dynamic(queryString, xContentRegistry); + } else { + return new DlsQuery.Constant(queryString, xContentRegistry); + } + } + + /** + * Represents a DLS query WITHOUT user attribute references like "${user.name}". These queries are already + * pre-parsed and ready for use. + */ + static class Constant extends DlsQuery { + private final RenderedDlsQuery renderedDlsQuery; + + Constant(String queryString, NamedXContentRegistry xContentRegistry) throws PrivilegesConfigurationValidationException { + super(queryString); + this.renderedDlsQuery = new RenderedDlsQuery(parseQuery(queryString, xContentRegistry), queryString); + } + + @Override + RenderedDlsQuery evaluate(PrivilegesEvaluationContext context) { + return renderedDlsQuery; + } + } + + /** + * Represents a DLS query with user attribute references like "${user.name}". These queries are parsed + * during privilege evaluation time, after user attribute interpolation has been performed. + */ + static class Dynamic extends DlsQuery { + private final NamedXContentRegistry xContentRegistry; + + Dynamic(String queryString, NamedXContentRegistry xContentRegistry) { + super(queryString); + this.xContentRegistry = xContentRegistry; + } + + @Override + RenderedDlsQuery evaluate(PrivilegesEvaluationContext context) throws PrivilegesEvaluationException { + String effectiveQueryString = UserAttributes.replaceProperties(this.queryString, context); + try { + return new RenderedDlsQuery(parseQuery(effectiveQueryString, xContentRegistry), effectiveQueryString); + } catch (Exception e) { + throw new PrivilegesEvaluationException("Invalid DLS query: " + effectiveQueryString, e); + } + } + } + } + + /** + * This is a DLS query where any templates (like ${user.name}) have been interpolated and which has been + * succesfully parsed to a QueryBuilder instance. + */ + public static class RenderedDlsQuery { + public static RenderedDlsQuery MATCH_NONE = new RenderedDlsQuery(new MatchNoneQueryBuilder(), "{\"match_none:\" {}}"); + + private final QueryBuilder queryBuilder; + private final String renderedSource; + + RenderedDlsQuery(QueryBuilder queryBuilder, String renderedSource) { + this.queryBuilder = queryBuilder; + this.renderedSource = renderedSource; + } + + public QueryBuilder getQueryBuilder() { + return queryBuilder; + } + + public String getRenderedSource() { + return renderedSource; + } + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/FieldMasking.java b/src/main/java/org/opensearch/security/privileges/dlsfls/FieldMasking.java new file mode 100644 index 0000000000..3f1d492f94 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/FieldMasking.java @@ -0,0 +1,488 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.regex.PatternSyntaxException; +import java.util.stream.Collectors; + +import com.google.common.base.Splitter; +import com.google.common.collect.ImmutableList; +import org.apache.commons.lang3.StringUtils; +import org.apache.lucene.util.BytesRef; +import org.bouncycastle.util.encoders.Hex; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.configuration.Salt; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.support.WildcardMatcher; + +import com.rfksystems.blake2b.Blake2b; + +/** + * This class converts role configuration into pre-computed, optimized data structures for applying field masking + * to indexed documents. + *

+ * With the exception of the statefulRules property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role configuration. If the role configuration is changed, + * a new instance needs to be built. + *

+ * Instances of this class are managed by DlsFlsProcessedConfig. + */ +public class FieldMasking extends AbstractRuleBasedPrivileges { + + private final FieldMasking.Config fieldMaskingConfig; + + public FieldMasking( + SecurityDynamicConfiguration roles, + Map indexMetadata, + FieldMasking.Config fieldMaskingConfig, + Settings settings + ) { + super(roles, indexMetadata, (rolePermissions) -> roleToRule(rolePermissions, fieldMaskingConfig), settings); + this.fieldMaskingConfig = fieldMaskingConfig; + } + + static FieldMaskingRule.SimpleRule roleToRule(RoleV7.Index rolePermissions, FieldMasking.Config fieldMaskingConfig) + throws PrivilegesConfigurationValidationException { + List fmExpressions = rolePermissions.getMasked_fields(); + + if (fmExpressions != null && !fmExpressions.isEmpty()) { + return new FieldMaskingRule.SimpleRule(rolePermissions, fieldMaskingConfig); + } else { + return null; + } + } + + @Override + protected FieldMaskingRule unrestricted() { + return FieldMaskingRule.ALLOW_ALL; + } + + @Override + protected FieldMaskingRule fullyRestricted() { + return new FieldMaskingRule.SimpleRule( + ImmutableList.of(new FieldMaskingRule.Field(FieldMaskingExpression.MASK_ALL, fieldMaskingConfig)) + ); + } + + @Override + protected FieldMaskingRule compile(PrivilegesEvaluationContext context, Collection rules) + throws PrivilegesEvaluationException { + return new FieldMaskingRule.MultiRole(rules); + } + + public static abstract class FieldMaskingRule extends AbstractRuleBasedPrivileges.Rule { + public static final FieldMaskingRule ALLOW_ALL = new SimpleRule(ImmutableList.of()); + + public static FieldMaskingRule of(FieldMasking.Config fieldMaskingConfig, String... rules) + throws PrivilegesConfigurationValidationException { + ImmutableList.Builder patterns = new ImmutableList.Builder<>(); + + for (String rule : rules) { + patterns.add(new Field(new FieldMaskingExpression(rule), fieldMaskingConfig)); + } + + return new SimpleRule(patterns.build()); + } + + public abstract Field get(String field); + + public abstract boolean isAllowAll(); + + public boolean isMasked(String field) { + return get(field) != null; + } + + public boolean isUnrestricted() { + return this.isAllowAll(); + } + + public abstract List getSource(); + + /** + * A rule which was derived directly from exactly one role. + */ + public static class SimpleRule extends FieldMaskingRule { + + final RoleV7.Index sourceIndex; + final ImmutableList expressions; + + SimpleRule(RoleV7.Index sourceIndex, FieldMasking.Config fieldMaskingConfig) throws PrivilegesConfigurationValidationException { + this.sourceIndex = sourceIndex; + this.expressions = parseExpressions(sourceIndex, fieldMaskingConfig); + } + + SimpleRule(ImmutableList expressions) { + this.sourceIndex = null; + this.expressions = expressions; + } + + public Field get(String field) { + return internalGet(stripKeywordSuffix(field)); + } + + private Field internalGet(String field) { + for (Field expression : this.expressions) { + if (expression.getPattern().test(field)) { + return expression; + } + } + + return null; + } + + public boolean isAllowAll() { + return expressions.isEmpty(); + } + + @Override + public String toString() { + if (isAllowAll()) { + return "FM:[]"; + } else { + return "FM:" + expressions; + } + } + + @Override + public List getSource() { + return this.expressions.stream().map(FieldMaskingRule.Field::getSource).collect(Collectors.toList()); + } + + static ImmutableList parseExpressions(RoleV7.Index index, FieldMasking.Config fieldMaskingConfig) + throws PrivilegesConfigurationValidationException { + ImmutableList.Builder result = ImmutableList.builder(); + + for (String source : index.getMasked_fields()) { + result.add(new Field(new FieldMaskingExpression(source), fieldMaskingConfig)); + } + + return result.build(); + } + } + + public static class MultiRole extends FieldMaskingRule { + final ImmutableList parts; + final boolean allowAll; + + MultiRole(Collection parts) { + this.parts = ImmutableList.copyOf(parts); + this.allowAll = this.parts.stream().anyMatch(SimpleRule::isAllowAll); + } + + public Field get(String field) { + field = stripKeywordSuffix(field); + + for (SimpleRule part : parts) { + Field masking = part.get(field); + + if (masking != null) { + return masking; + } + } + + return null; + } + + public boolean isAllowAll() { + return allowAll; + } + + @Override + public String toString() { + if (isAllowAll()) { + return "FM:[]"; + } else { + return "FM:" + parts.stream().map((p) -> p.expressions).collect(Collectors.toList()); + } + } + + @Override + public List getSource() { + return this.parts.stream().flatMap(r -> r.getSource().stream()).collect(Collectors.toList()); + } + } + + /** + * Represents a single field that is supposed to be masked. Combines a single expression with the global + * configuration. + */ + public static class Field { + private final FieldMaskingExpression expression; + + private final String hashAlgorithm; + private final Salt salt; + private final byte[] saltBytes; + + Field(FieldMaskingExpression expression, FieldMasking.Config fieldMaskingConfig) { + this.expression = expression; + this.hashAlgorithm = expression.getAlgoName() != null ? expression.getAlgoName() + : StringUtils.isNotEmpty(fieldMaskingConfig.getDefaultHashAlgorithm()) ? fieldMaskingConfig.getDefaultHashAlgorithm() + : null; + this.salt = fieldMaskingConfig.getSalt(); + this.saltBytes = this.salt.getSalt16(); + } + + public WildcardMatcher getPattern() { + return expression.getPattern(); + } + + public byte[] apply(byte[] value) { + if (expression.getRegexReplacements() != null) { + return applyRegexReplacements(value, expression.getRegexReplacements()); + } else if (this.hashAlgorithm != null) { + return customHash(value, this.hashAlgorithm); + } else { + return blake2bHash(value); + } + } + + public String apply(String value) { + return new String(apply(value.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); + } + + public BytesRef apply(BytesRef value) { + if (value == null) { + return null; + } + + return new BytesRef(apply(BytesRef.deepCopyOf(value).bytes)); + } + + @Override + public String toString() { + return expression.toString(); + } + + String getSource() { + return expression.getSource(); + } + + FieldMaskingExpression getExpression() { + return expression; + } + + private static byte[] customHash(byte[] in, String algorithm) { + try { + MessageDigest digest = MessageDigest.getInstance(algorithm); + return Hex.encode(digest.digest(in)); + } catch (NoSuchAlgorithmException e) { + throw new IllegalArgumentException(e); + } + } + + private byte[] applyRegexReplacements(byte[] value, List regexReplacements) { + String string = new String(value, StandardCharsets.UTF_8); + for (FieldMaskingExpression.RegexReplacement rr : regexReplacements) { + string = rr.getRegex().matcher(string).replaceAll(rr.getReplacement()); + } + return string.getBytes(StandardCharsets.UTF_8); + } + + private byte[] blake2bHash(byte[] in) { + // Salt is passed incorrectly but order of parameters is retained at present to ensure full backwards compatibility + // Tracking with https://github.com/opensearch-project/security/issues/4274 + final Blake2b hash = new Blake2b(null, 32, null, saltBytes); + hash.update(in, 0, in.length); + final byte[] out = new byte[hash.getDigestSize()]; + hash.digest(out, 0); + + return Hex.encode(out); + } + } + + static String stripKeywordSuffix(String field) { + if (field.endsWith(".keyword")) { + return field.substring(0, field.length() - ".keyword".length()); + } else { + return field; + } + } + } + + /** + * Represents a parsed field masking expression from the roles.yml file. + */ + public static class FieldMaskingExpression { + public static final FieldMaskingExpression MASK_ALL = new FieldMaskingExpression(WildcardMatcher.ANY, "*"); + + private final WildcardMatcher pattern; + private final String algoName; + private final List regexReplacements; + private final String source; + + public FieldMaskingExpression(String value) throws PrivilegesConfigurationValidationException { + this.source = value; + + List tokens = Splitter.on("::").splitToList(value); + pattern = WildcardMatcher.from(tokens.get(0)); + + if (tokens.size() == 1) { + algoName = null; + regexReplacements = null; + } else if (tokens.size() == 2) { + regexReplacements = null; + try { + this.algoName = tokens.get(1); + // We try to instantiate the MessageDigest instance already now to make sure that it is valid. + // However, we do not store the instance as MessageDigest instance are NOT thread safe. + // Some MessageDigest implementations allow to be cloned. A possible future optimization would + // be detecting whether the instances can be cloned and then using the clone method for + // construction. + MessageDigest.getInstance(tokens.get(1)); + } catch (NoSuchAlgorithmException e) { + throw new PrivilegesConfigurationValidationException("Invalid algorithm " + tokens.get(1)); + } + } else if (tokens.size() % 2 == 1) { + algoName = null; + regexReplacements = new ArrayList<>((tokens.size() - 1) / 2); + for (int i = 1; i < tokens.size() - 1; i = i + 2) { + regexReplacements.add(new RegexReplacement(tokens.get(i), tokens.get(i + 1))); + } + } else { + throw new PrivilegesConfigurationValidationException( + "A field masking expression must have the form 'field_name', 'field_name::algorithm', 'field_name::regex::replacement' or 'field_name::(regex::replacement)+'" + ); + } + } + + private FieldMaskingExpression(WildcardMatcher pattern, String source) { + this.pattern = pattern; + this.source = source; + this.algoName = null; + this.regexReplacements = null; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof FieldMaskingExpression)) { + return false; + } + FieldMaskingExpression that = (FieldMaskingExpression) o; + return Objects.equals(pattern, that.pattern) + && Objects.equals(algoName, that.algoName) + && Objects.equals(regexReplacements, that.regexReplacements); + } + + @Override + public int hashCode() { + return Objects.hash(pattern, algoName, regexReplacements); + } + + static class RegexReplacement { + private final java.util.regex.Pattern regex; + private final String replacement; + + RegexReplacement(String regex, String replacement) throws PrivilegesConfigurationValidationException { + if (!regex.startsWith("/") || !regex.endsWith("/")) { + throw new PrivilegesConfigurationValidationException("A regular expression needs to be wrapped in /.../"); + } + + try { + this.regex = java.util.regex.Pattern.compile(regex.substring(1).substring(0, regex.length() - 2)); + } catch (PatternSyntaxException e) { + throw new PrivilegesConfigurationValidationException(e.getMessage(), e); + } + + this.replacement = replacement; + } + + java.util.regex.Pattern getRegex() { + return regex; + } + + String getReplacement() { + return replacement; + } + + @Override + public String toString() { + return "/" + regex + "/::" + replacement; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof RegexReplacement)) return false; + RegexReplacement that = (RegexReplacement) o; + return Objects.equals(regex.pattern(), that.regex.pattern()) && Objects.equals(replacement, that.replacement); + } + + @Override + public int hashCode() { + return Objects.hash(regex.pattern(), replacement); + } + } + + @Override + public String toString() { + return source; + } + + String getAlgoName() { + return algoName; + } + + List getRegexReplacements() { + return regexReplacements; + } + + WildcardMatcher getPattern() { + return pattern; + } + + String getSource() { + return source; + } + } + + public static class Config { + public static Config fromSettings(Settings settings) { + return new Config(settings.get(ConfigConstants.SECURITY_MASKED_FIELDS_ALGORITHM_DEFAULT), Salt.from(settings)); + } + + public static final Config DEFAULT = fromSettings(Settings.EMPTY); + + private final String defaultHashAlgorithm; + private final Salt salt; + + Config(String defaultHashAlgorithm, Salt salt) { + this.defaultHashAlgorithm = defaultHashAlgorithm; + this.salt = salt; + } + + public String getDefaultHashAlgorithm() { + return defaultHashAlgorithm; + } + + public Salt getSalt() { + return salt; + } + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/FieldPrivileges.java b/src/main/java/org/opensearch/security/privileges/dlsfls/FieldPrivileges.java new file mode 100644 index 0000000000..90434b7e89 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/FieldPrivileges.java @@ -0,0 +1,379 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.regex.PatternSyntaxException; + +import com.google.common.collect.ImmutableList; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.WildcardMatcher; + +/** + * This class converts role configuration into pre-computed, optimized data structures for checking FLS privileges. + *

+ * With the exception of the statefulRules property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role configuration. If the role configuration is changed, + * a new instance needs to be built. + *

+ * Instances of this class are managed by DlsFlsProcessedConfig. + */ +public class FieldPrivileges extends AbstractRuleBasedPrivileges { + public FieldPrivileges(SecurityDynamicConfiguration roles, Map indexMetadata, Settings settings) { + super(roles, indexMetadata, FieldPrivileges::roleToRule, settings); + } + + static FlsRule roleToRule(RoleV7.Index rolePermissions) throws PrivilegesConfigurationValidationException { + List flsPatterns = rolePermissions.getFls(); + + if (flsPatterns != null && !flsPatterns.isEmpty()) { + return FlsRule.from(rolePermissions); + } else { + return null; + } + } + + @Override + protected FlsRule unrestricted() { + return FlsRule.ALLOW_ALL; + } + + @Override + protected FlsRule fullyRestricted() { + return FlsRule.DENY_ALL; + } + + @Override + protected FlsRule compile(PrivilegesEvaluationContext context, Collection rules) throws PrivilegesEvaluationException { + return FlsRule.merge(rules); + } + + /** + * Represents a set of FlsPatterns for a specific index. + */ + public static class FlsRule extends AbstractRuleBasedPrivileges.Rule { + static FlsRule of(String... rules) throws PrivilegesConfigurationValidationException { + return from(FlsPattern.parse(Arrays.asList(rules)), ImmutableList.of()); + } + + static FlsRule from(RoleV7.Index role) throws PrivilegesConfigurationValidationException { + return from(FlsPattern.parse(role.getFls()), ImmutableList.of(role)); + } + + static FlsRule from(List flsPatterns, ImmutableList sourceRoles) + throws PrivilegesConfigurationValidationException { + Set flsPatternsIncludingObjectsOnly = new HashSet<>(); + + for (FlsPattern flsPattern : flsPatterns) { + flsPatternsIncludingObjectsOnly.addAll(flsPattern.getParentObjectPatterns()); + } + + // If there are already explicit exclusions on certain object-only inclusions, we can remove these again + flsPatternsIncludingObjectsOnly.removeAll(flsPatterns); + + return new FlsRule(flsPatterns, flsPatternsIncludingObjectsOnly, sourceRoles); + } + + static FlsRule merge(Collection rules) { + if (rules.size() == 1) { + return rules.iterator().next(); + } + + Set patterns = new HashSet<>(); + Set objectOnlyPatterns = new HashSet<>(); + ImmutableList.Builder roles = ImmutableList.builderWithExpectedSize(rules.size()); + + for (FlsRule flsRule : rules) { + patterns.addAll(flsRule.patterns); + objectOnlyPatterns.addAll(flsRule.objectOnlyPatterns); + roles.addAll(flsRule.sourceRole); + } + + objectOnlyPatterns.removeAll(patterns); + + return new FlsRule(patterns, objectOnlyPatterns, roles.build()); + } + + public static final FlsRule ALLOW_ALL = new FlsRule(ImmutableList.of(), ImmutableList.of(), ImmutableList.of()); + public static final FlsRule DENY_ALL = new FlsRule( + ImmutableList.of(FlsPattern.EXCLUDE_ALL), + ImmutableList.of(), + ImmutableList.of() + ); + + final ImmutableList sourceRole; + final ImmutableList patterns; + final ImmutableList effectivePatterns; + final ImmutableList objectOnlyPatterns; + final boolean allowAll; + final boolean excluding; + + FlsRule( + Collection patterns, + Collection flsPatternsIncludingObjectsOnly, + ImmutableList sourceRole + ) { + this.sourceRole = sourceRole; + + Set flsPatternsExcluding = new HashSet<>(patterns.size()); + Set flsPatternsIncluding = new HashSet<>(patterns.size()); + + for (FlsPattern flsPattern : patterns) { + if (flsPattern.isExcluded()) { + flsPatternsExcluding.add(flsPattern); + } else { + flsPatternsIncluding.add(flsPattern); + } + } + + int exclusions = flsPatternsExcluding.size(); + int inclusions = flsPatternsIncluding.size(); + + if (exclusions == 0 && inclusions == 0) { + // Empty + this.effectivePatterns = this.patterns = ImmutableList.of(FlsPattern.INCLUDE_ALL); + this.excluding = false; + this.allowAll = true; + } else if (exclusions != 0 && inclusions == 0) { + // Only exclusions + this.effectivePatterns = this.patterns = ImmutableList.copyOf(flsPatternsExcluding); + this.excluding = true; + this.allowAll = false; + } else if (exclusions == 0 && inclusions != 0) { + // Only inclusions + this.effectivePatterns = this.patterns = ImmutableList.copyOf(flsPatternsIncluding); + this.excluding = false; + this.allowAll = flsPatternsIncluding.contains(FlsPattern.INCLUDE_ALL); + } else { + // Mixed inclusions and exclusions + // + // While the docs say that mixing inclusions and exclusions is not supported, the original + // implementation only regarded exclusions and disregarded inclusions if these were mixed. + // We are mirroring this behaviour here. It might make sense to rethink the semantics here, + // though, as there might be semantics which make more sense. From a UX POV, the current behavior + // can be quite confusing. + // + // See: + // https://github.com/opensearch-project/security/blob/e73fc24509363cb1573607c6cf47c98780fc89de/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java#L658-L662 + // https://opensearch.org/docs/latest/security/access-control/field-level-security/ + this.patterns = ImmutableList.copyOf(patterns); + this.effectivePatterns = ImmutableList.copyOf(flsPatternsExcluding); + this.excluding = true; + this.allowAll = false; + } + + this.objectOnlyPatterns = ImmutableList.copyOf(flsPatternsIncludingObjectsOnly); + } + + public boolean isAllowed(String field) { + if (isAllowAll()) { + return true; + } + + field = stripKeywordSuffix(field); + + if (excluding) { + for (FlsPattern pattern : this.effectivePatterns) { + assert pattern.isExcluded(); + if (pattern.getPattern().test(field)) { + return false; + } + } + return true; + } else { + // including + for (FlsPattern pattern : this.effectivePatterns) { + assert !pattern.isExcluded(); + if (pattern.getPattern().test(field)) { + return true; + } + } + return false; + } + } + + public boolean isObjectAllowed(String field) { + if (excluding) { + return isAllowed(field); + } + + for (FlsPattern pattern : this.objectOnlyPatterns) { + if (pattern.getPattern().test(field)) { + return true; + } + } + + return false; + } + + public boolean isAllowAll() { + return allowAll; + } + + @Override + public String toString() { + if (isAllowAll()) { + return "FLS:*"; + } else { + return "FLS:" + patterns; + } + } + + public List getSource() { + return patterns.stream().map(FlsPattern::getSource).collect(ImmutableList.toImmutableList()); + } + + @Override + public boolean isUnrestricted() { + return this.isAllowAll(); + } + + /** + * See https://github.com/opensearch-project/security/pull/2375 + */ + static String stripKeywordSuffix(String field) { + if (field.endsWith(".keyword")) { + return field.substring(0, field.length() - ".keyword".length()); + } else { + return field; + } + } + } + + /** + * Represents a single FLS pattern that is matched again a field name. + *

+ * FLS patterns can look like this: + *

    + *
  • field - just a simple field name, included in the visible fields + *
  • field* - a pattern on a field name, included in the visible fields + *
  • ~field - a simple field name, excluded from the visible fields (the prefix ! is also supported for legacy reasons, but it is undocumented) + *
  • field.field - a field inside another field + *
  • Regular expressions enclosed in /.../ (undocumented, does not pair well with nested objects) + *
  • Any combination of above + *
+ */ + public static class FlsPattern { + public static final FlsPattern INCLUDE_ALL = new FlsPattern(WildcardMatcher.ANY, false, "*"); + public static final FlsPattern EXCLUDE_ALL = new FlsPattern(WildcardMatcher.ANY, true, "~*"); + + /** + * True if the attribute is supposed to be excluded (i.e., pattern started with ~), false otherwise. + */ + private final boolean excluded; + + /** + * The compiled pattern (excluding leading ~) + */ + private final WildcardMatcher pattern; + + /** + * The original string + */ + private final String source; + + public FlsPattern(String string) throws PrivilegesConfigurationValidationException { + try { + if (string.startsWith("~") || string.startsWith("!")) { + excluded = true; + pattern = WildcardMatcher.from(string.substring(1)); + } else { + pattern = WildcardMatcher.from(string); + excluded = false; + } + + this.source = string; + } catch (PatternSyntaxException e) { + throw new PrivilegesConfigurationValidationException("Invalid FLS pattern " + string, e); + } + } + + FlsPattern(WildcardMatcher pattern, boolean excluded, String source) { + this.pattern = pattern; + this.excluded = excluded; + this.source = source; + } + + public String getSource() { + return source; + } + + public WildcardMatcher getPattern() { + return pattern; + } + + public boolean isExcluded() { + return excluded; + } + + @Override + public String toString() { + return source; + } + + List getParentObjectPatterns() { + if (excluded || source.indexOf('.') == -1 || (source.startsWith("/") && source.endsWith("/"))) { + return Collections.emptyList(); + } + + List subPatterns = new ArrayList<>(); + + for (int pos = source.indexOf('.'); pos != -1; pos = source.indexOf('.', pos + 1)) { + String subString = source.substring(0, pos); + + subPatterns.add(new FlsPattern(WildcardMatcher.from(subString), false, subString)); + } + + return subPatterns; + } + + @Override + public boolean equals(Object o) { + if (o instanceof FlsPattern) { + FlsPattern that = (FlsPattern) o; + return this.source.equals(that.source); + } else { + return false; + } + } + + @Override + public int hashCode() { + return source.hashCode(); + } + + public static List parse(List flsPatternStrings) throws PrivilegesConfigurationValidationException { + List flsPatterns = new ArrayList<>(flsPatternStrings.size()); + + for (String flsPatternSource : flsPatternStrings) { + flsPatterns.add(new FlsPattern(flsPatternSource)); + } + + return flsPatterns; + } + + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilter.java b/src/main/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilter.java new file mode 100644 index 0000000000..9f36d2ef5c --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilter.java @@ -0,0 +1,207 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.Set; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; + +/** + * Implements document transformation for FLS and field masking using a chained streaming parser and generator. + * This provides optimal throughput while keeping the heap footprint low. + *

+ * This class is supposed to operate on _source documents. It will filter these document and remove fields disallowed + * by FLS, and mask fields when required for field masking. + *

+ * While FLS applies to attributes of any type, field masking is only available for string valued attributes. + */ +class FlsDocumentFilter { + private static final JsonFactory JSON_FACTORY = new JsonFactory(); + + static byte[] filter( + byte[] bytes, + FieldPrivileges.FlsRule flsRule, + FieldMasking.FieldMaskingRule fieldMaskingRule, + Set metaFields + ) throws IOException { + try (InputStream in = new ByteArrayInputStream(bytes); ByteArrayOutputStream out = new ByteArrayOutputStream()) { + filter(in, out, flsRule, fieldMaskingRule, metaFields); + return out.toByteArray(); + } + } + + static void filter( + InputStream in, + OutputStream out, + FieldPrivileges.FlsRule flsRule, + FieldMasking.FieldMaskingRule fieldMaskingRule, + Set metaFields + ) throws IOException { + try (JsonParser parser = JSON_FACTORY.createParser(in); JsonGenerator generator = JSON_FACTORY.createGenerator(out)) { + new FlsDocumentFilter(parser, generator, flsRule, fieldMaskingRule, metaFields).copy(); + } + } + + private final JsonParser parser; + private final JsonGenerator generator; + private final FieldPrivileges.FlsRule flsRule; + private final FieldMasking.FieldMaskingRule fieldMaskingRule; + + /** + * Names of meta fields. Meta fields will be always kept included in the documents, even if the FLS or + * fieldMaskingRule would forbid them. + */ + private final Set metaFields; + + /** + * A stack of field names. The first element will be the name of the attribute in the root object. Does not include + * fullParentName. + */ + private Deque nameStack = new ArrayDeque<>(); + + FlsDocumentFilter( + JsonParser parser, + JsonGenerator generator, + FieldPrivileges.FlsRule flsRule, + FieldMasking.FieldMaskingRule fieldMaskingRule, + Set metaFields + ) { + this.parser = parser; + this.generator = generator; + this.flsRule = flsRule; + this.fieldMaskingRule = fieldMaskingRule; + this.metaFields = metaFields; + } + + @SuppressWarnings("incomplete-switch") + private void copy() throws IOException { + // queuedFieldName will contain the unqualified name of a field that was encountered, but not yet written. + // It is necessary to queue the field names because it can depend on the type of the following value whether + // the field/value pair will be written: If the value is object-valued, we will also start writing the object + // if we expect the object to contain allowed values, even if the object itself is not fully allowed. + String queuedFieldName = null; + // fullCurrentName contains the qualified name of the current field. Changes for every FIELD_NAME token. Does + // include names of parent objects concatenated by ".". If the current field is named "c" and the parent + // objects are named "a", "b", this will contain "a.b.c". + String fullCurrentName = null; + // fullParentName contains the qualified name of the object containing the current field. Will be null if the + // current field is at the root object of the document. + String fullParentName = null; + + for (JsonToken token = parser.currentToken() != null ? parser.currentToken() : parser.nextToken(); token != null; token = parser + .nextToken()) { + + if (queuedFieldName != null) { + boolean startOfObjectOrArray = (token == JsonToken.START_OBJECT || token == JsonToken.START_ARRAY); + String fullQueuedFieldName = fullParentName == null ? queuedFieldName : fullParentName + "." + queuedFieldName; + queuedFieldName = null; + + if (metaFields.contains(fullQueuedFieldName) + || flsRule.isAllowed(fullQueuedFieldName) + || (startOfObjectOrArray && flsRule.isObjectAllowed(fullQueuedFieldName))) { + generator.writeFieldName(parser.currentName()); + fullCurrentName = fullQueuedFieldName; + } else { + // If the current field name is disallowed by FLS, we will skip the next token. + // If the next token is an object or array start, all the child tokens will be also skipped + if (startOfObjectOrArray) { + parser.skipChildren(); + } + continue; + } + } + + switch (token) { + case FIELD_NAME: + // We do not immediately write field names, because we need to know the type of the value + // when checking FLS rules + queuedFieldName = parser.currentName(); + break; + + case START_OBJECT: + generator.writeStartObject(); + if (fullParentName != null) { + nameStack.add(fullParentName); + } + fullParentName = fullCurrentName; + break; + + case END_OBJECT: + generator.writeEndObject(); + fullCurrentName = fullParentName; + if (nameStack.isEmpty()) { + fullParentName = null; + } else { + fullParentName = nameStack.removeLast(); + } + break; + + case START_ARRAY: + generator.writeStartArray(); + break; + + case END_ARRAY: + generator.writeEndArray(); + break; + + case VALUE_TRUE: + generator.writeBoolean(Boolean.TRUE); + break; + + case VALUE_FALSE: + generator.writeBoolean(Boolean.FALSE); + break; + + case VALUE_NULL: + generator.writeNull(); + break; + + case VALUE_NUMBER_FLOAT: + generator.writeNumber(parser.getDecimalValue()); + break; + + case VALUE_NUMBER_INT: + generator.writeNumber(parser.getBigIntegerValue()); + break; + + case VALUE_STRING: + FieldMasking.FieldMaskingRule.Field field = fieldMaskingRule.get(fullCurrentName); + + if (field != null) { + generator.writeString(field.apply(parser.getText())); + } else { + generator.writeString(parser.getText()); + } + break; + + case VALUE_EMBEDDED_OBJECT: + generator.writeEmbeddedObject(parser.getEmbeddedObject()); + break; + + default: + throw new IllegalStateException("Unexpected token: " + token); + + } + + } + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/FlsStoredFieldVisitor.java b/src/main/java/org/opensearch/security/privileges/dlsfls/FlsStoredFieldVisitor.java new file mode 100644 index 0000000000..e504eed41d --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/FlsStoredFieldVisitor.java @@ -0,0 +1,130 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.IOException; +import java.util.Set; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.StoredFieldVisitor; + +import org.opensearch.OpenSearchException; + +/** + * Applies FLS and field masking while reading documents. This does two things: + *

    + *
  • Filter the _source document and remove fields disallowed by FLS, and mask fields when required for field masking
  • + *
  • Filter out other fields disallowed by FLS by using the needsField() method
  • + *
+ */ +public class FlsStoredFieldVisitor extends StoredFieldVisitor { + private static final Logger log = LogManager.getLogger(FlsStoredFieldVisitor.class); + + private final StoredFieldVisitor delegate; + private final FieldPrivileges.FlsRule flsRule; + private final FieldMasking.FieldMaskingRule fieldMaskingRule; + private final Set metaFields; + + public FlsStoredFieldVisitor( + StoredFieldVisitor delegate, + FieldPrivileges.FlsRule flsRule, + FieldMasking.FieldMaskingRule fieldMaskingRule, + Set metaFields + ) { + super(); + this.delegate = delegate; + this.flsRule = flsRule; + this.fieldMaskingRule = fieldMaskingRule; + this.metaFields = metaFields; + + if (log.isDebugEnabled()) { + log.debug("Created FlsStoredFieldVisitor for {}; {}", flsRule, fieldMaskingRule); + } + } + + @Override + public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException { + if (fieldInfo.name.equals("_source")) { + try { + delegate.binaryField(fieldInfo, FlsDocumentFilter.filter(value, flsRule, fieldMaskingRule, metaFields)); + } catch (IOException e) { + throw new OpenSearchException("Cannot filter source of document", e); + } + } else { + // See https://github.com/opensearch-project/security/pull/4826 + FieldMasking.FieldMaskingRule.Field field = this.fieldMaskingRule.get(fieldInfo.name); + + if (field != null) { + delegate.binaryField(fieldInfo, field.apply(value)); + } else { + delegate.binaryField(fieldInfo, value); + } + } + } + + @Override + public void stringField(FieldInfo fieldInfo, String value) throws IOException { + FieldMasking.FieldMaskingRule.Field field = this.fieldMaskingRule.get(fieldInfo.name); + + if (field != null) { + delegate.stringField(fieldInfo, field.apply(value)); + } else { + delegate.stringField(fieldInfo, value); + } + } + + @Override + public Status needsField(FieldInfo fieldInfo) throws IOException { + return metaFields.contains(fieldInfo.name) || flsRule.isAllowed(fieldInfo.name) ? delegate.needsField(fieldInfo) : Status.NO; + } + + @Override + public int hashCode() { + return delegate.hashCode(); + } + + @Override + public void intField(final FieldInfo fieldInfo, final int value) throws IOException { + delegate.intField(fieldInfo, value); + } + + @Override + public void longField(final FieldInfo fieldInfo, final long value) throws IOException { + delegate.longField(fieldInfo, value); + } + + @Override + public void floatField(final FieldInfo fieldInfo, final float value) throws IOException { + delegate.floatField(fieldInfo, value); + } + + @Override + public void doubleField(final FieldInfo fieldInfo, final double value) throws IOException { + delegate.doubleField(fieldInfo, value); + } + + @Override + public boolean equals(final Object obj) { + return delegate.equals(obj); + } + + @Override + public String toString() { + return delegate.toString(); + } + + public StoredFieldVisitor delegate() { + return this.delegate; + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/IndexToRuleMap.java b/src/main/java/org/opensearch/security/privileges/dlsfls/IndexToRuleMap.java new file mode 100644 index 0000000000..2c359af032 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/IndexToRuleMap.java @@ -0,0 +1,61 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.function.Predicate; + +import com.google.common.collect.ImmutableMap; + +/** + * Maps index names to DLS/FLS/FM rules. + *

+ * This only contains index names, not any alias or data stream names. + *

+ * This map should be only used when really necessary, as computing a whole map of indices can be expensive. + * It should be preferred to directly query the privilege status of indices using the getRestriction() methods + * of the sub-classes of AbstractRuleBasedPrivileges. + */ +public class IndexToRuleMap { + private static final IndexToRuleMap UNRESTRICTED = new IndexToRuleMap(ImmutableMap.of()); + + private final ImmutableMap indexMap; + + IndexToRuleMap(ImmutableMap indexMap) { + this.indexMap = indexMap; + } + + public boolean isUnrestricted() { + return this.indexMap.isEmpty() || this.indexMap.values().stream().allMatch(Rule::isUnrestricted); + } + + public ImmutableMap getIndexMap() { + return indexMap; + } + + public boolean containsAny(Predicate predicate) { + if (indexMap.isEmpty()) { + return false; + } + + for (Rule rule : this.indexMap.values()) { + if (predicate.test(rule)) { + return true; + } + } + + return false; + } + + @SuppressWarnings("unchecked") + public static IndexToRuleMap unrestricted() { + return (IndexToRuleMap) UNRESTRICTED; + } +} diff --git a/src/main/java/org/opensearch/security/resolver/IndexResolverReplacer.java b/src/main/java/org/opensearch/security/resolver/IndexResolverReplacer.java index 4a4e714348..ebca3e652e 100644 --- a/src/main/java/org/opensearch/security/resolver/IndexResolverReplacer.java +++ b/src/main/java/org/opensearch/security/resolver/IndexResolverReplacer.java @@ -30,6 +30,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -37,6 +38,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Supplier; import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; @@ -104,13 +106,17 @@ public class IndexResolverReplacer { private static final Set NULL_SET = new HashSet<>(Collections.singleton(null)); private final Logger log = LogManager.getLogger(this.getClass()); private final IndexNameExpressionResolver resolver; - private final ClusterService clusterService; + private final Supplier clusterStateSupplier; private final ClusterInfoHolder clusterInfoHolder; private volatile boolean respectRequestIndicesOptions = false; - public IndexResolverReplacer(IndexNameExpressionResolver resolver, ClusterService clusterService, ClusterInfoHolder clusterInfoHolder) { + public IndexResolverReplacer( + IndexNameExpressionResolver resolver, + Supplier clusterStateSupplier, + ClusterInfoHolder clusterInfoHolder + ) { this.resolver = resolver; - this.clusterService = clusterService; + this.clusterStateSupplier = clusterStateSupplier; this.clusterInfoHolder = clusterInfoHolder; } @@ -236,10 +242,10 @@ private void resolveIndexPatterns( final RemoteClusterService remoteClusterService = OpenSearchSecurityPlugin.GuiceHolder.getRemoteClusterService(); - if (remoteClusterService.isCrossClusterSearchEnabled() && enableCrossClusterResolution) { + if (remoteClusterService != null && remoteClusterService.isCrossClusterSearchEnabled() && enableCrossClusterResolution) { remoteIndices = new HashSet<>(); final Map remoteClusterIndices = OpenSearchSecurityPlugin.GuiceHolder.getRemoteClusterService() - .groupIndices(indicesOptions, original, idx -> resolver.hasIndexAbstraction(idx, clusterService.state())); + .groupIndices(indicesOptions, original, idx -> resolver.hasIndexAbstraction(idx, clusterStateSupplier.get())); final Set remoteClusters = remoteClusterIndices.keySet() .stream() .filter(k -> !RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY.equals(k)) @@ -292,7 +298,7 @@ private void resolveIndexPatterns( } else { - final ClusterState state = clusterService.state(); + final ClusterState state = clusterStateSupplier.get(); final Set dateResolvedLocalRequestedPatterns = localRequestedPatterns.stream() .map(resolver::resolveDateMathExpression) .collect(Collectors.toSet()); @@ -425,6 +431,10 @@ public String[] provide(String[] original, Object request, boolean supportsRepla }, false); } + public boolean replace(final TransportRequest request, boolean retainMode, Collection replacements) { + return replace(request, retainMode, replacements.toArray(new String[replacements.size()])); + } + public Resolved resolveRequest(final Object request) { if (log.isDebugEnabled()) { log.debug("Resolve aliases, indices and types from {}", request.getClass().getSimpleName()); @@ -449,6 +459,11 @@ public final static class Resolved { SearchRequest.DEFAULT_INDICES_OPTIONS ); + private static final IndicesOptions EXACT_INDEX_OPTIONS = new IndicesOptions( + EnumSet.of(IndicesOptions.Option.FORBID_ALIASES_TO_MULTIPLE_INDICES), + EnumSet.noneOf(IndicesOptions.WildcardStates.class) + ); + private final Set aliases; private final Set allIndices; private final Set originalRequested; @@ -485,8 +500,12 @@ public Set getAllIndices() { } public Set getAllIndicesResolved(ClusterService clusterService, IndexNameExpressionResolver resolver) { + return getAllIndicesResolved(clusterService::state, resolver); + } + + public Set getAllIndicesResolved(Supplier clusterStateSupplier, IndexNameExpressionResolver resolver) { if (isLocalAll) { - return new HashSet<>(Arrays.asList(resolver.concreteIndexNames(clusterService.state(), indicesOptions, "*"))); + return new HashSet<>(Arrays.asList(resolver.concreteIndexNames(clusterStateSupplier.get(), indicesOptions, "*"))); } else { return allIndices; } @@ -550,6 +569,11 @@ public boolean equals(Object obj) { } else if (!remoteIndices.equals(other.remoteIndices)) return false; return true; } + + public static Resolved ofIndex(String index) { + ImmutableSet indexSet = ImmutableSet.of(index); + return new Resolved(ImmutableSet.of(), indexSet, indexSet, ImmutableSet.of(), EXACT_INDEX_OPTIONS); + } } private List renamedIndices(final RestoreSnapshotRequest request, final List filteredIndices) { diff --git a/src/main/java/org/opensearch/security/securityconf/ConfigModel.java b/src/main/java/org/opensearch/security/securityconf/ConfigModel.java index 33af51257c..7429a2c776 100644 --- a/src/main/java/org/opensearch/security/securityconf/ConfigModel.java +++ b/src/main/java/org/opensearch/security/securityconf/ConfigModel.java @@ -38,7 +38,5 @@ public abstract class ConfigModel { public abstract Set mapSecurityRoles(User user, TransportAddress caller); - public abstract SecurityRoles getSecurityRoles(); - public abstract Set getAllConfiguredTenantNames(); } diff --git a/src/main/java/org/opensearch/security/securityconf/ConfigModelV6.java b/src/main/java/org/opensearch/security/securityconf/ConfigModelV6.java deleted file mode 100644 index e35fb40a24..0000000000 --- a/src/main/java/org/opensearch/security/securityconf/ConfigModelV6.java +++ /dev/null @@ -1,1316 +0,0 @@ -/* - * Copyright 2015-2018 floragunn GmbH - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.opensearch.security.securityconf; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Objects; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import com.google.common.base.Joiner; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; -import com.google.common.collect.ListMultimap; -import com.google.common.collect.MultimapBuilder.SetMultimapBuilder; -import com.google.common.collect.SetMultimap; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import org.opensearch.ExceptionsHelper; -import org.opensearch.action.support.IndicesOptions; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.collect.Tuple; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.set.Sets; -import org.opensearch.core.common.transport.TransportAddress; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; -import org.opensearch.security.securityconf.impl.v6.ActionGroupsV6; -import org.opensearch.security.securityconf.impl.v6.RoleMappingsV6; -import org.opensearch.security.securityconf.impl.v6.RoleV6; -import org.opensearch.security.securityconf.impl.v6.RoleV6.Index; -import org.opensearch.security.support.ConfigConstants; -import org.opensearch.security.support.WildcardMatcher; -import org.opensearch.security.user.User; - -import static org.opensearch.cluster.metadata.IndexAbstraction.Type.ALIAS; - -public class ConfigModelV6 extends ConfigModel { - - protected final Logger log = LogManager.getLogger(this.getClass()); - private ConfigConstants.RolesMappingResolution rolesMappingResolution; - private ActionGroupResolver agr = null; - private SecurityRoles securityRoles = null; - private TenantHolder tenantHolder; - private RoleMappingHolder roleMappingHolder; - private SecurityDynamicConfiguration roles; - - public ConfigModelV6( - SecurityDynamicConfiguration roles, - SecurityDynamicConfiguration actiongroups, - SecurityDynamicConfiguration rolesmapping, - DynamicConfigModel dcm, - Settings opensearchSettings - ) { - - this.roles = roles; - - try { - rolesMappingResolution = ConfigConstants.RolesMappingResolution.valueOf( - opensearchSettings.get( - ConfigConstants.SECURITY_ROLES_MAPPING_RESOLUTION, - ConfigConstants.RolesMappingResolution.MAPPING_ONLY.toString() - ).toUpperCase() - ); - } catch (Exception e) { - log.error("Cannot apply roles mapping resolution", e); - rolesMappingResolution = ConfigConstants.RolesMappingResolution.MAPPING_ONLY; - } - - agr = reloadActionGroups(actiongroups); - securityRoles = reload(roles); - tenantHolder = new TenantHolder(roles); - roleMappingHolder = new RoleMappingHolder(rolesmapping, dcm.getHostsResolverMode()); - } - - public Set getAllConfiguredTenantNames() { - final Set configuredTenants = new HashSet<>(); - for (Entry securityRole : roles.getCEntries().entrySet()) { - Map tenants = securityRole.getValue().getTenants(); - - if (tenants != null) { - configuredTenants.addAll(tenants.keySet()); - } - - } - - return Collections.unmodifiableSet(configuredTenants); - } - - public SecurityRoles getSecurityRoles() { - return securityRoles; - } - - private static interface ActionGroupResolver { - Set resolvedActions(final List actions); - } - - private ActionGroupResolver reloadActionGroups(SecurityDynamicConfiguration actionGroups) { - return new ActionGroupResolver() { - - private Set getGroupMembers(final String groupname) { - - if (actionGroups == null) { - return Collections.emptySet(); - } - - return Collections.unmodifiableSet(resolve(actionGroups, groupname)); - } - - private Set resolve(final SecurityDynamicConfiguration actionGroups, final String entry) { - - // SG5 format, plain array - // List en = actionGroups.getAsList(DotPath.of(entry)); - // if (en.isEmpty()) { - // try SG6 format including readonly and permissions key - // en = actionGroups.getAsList(DotPath.of(entry + "." + ConfigConstants.CONFIGKEY_ACTION_GROUPS_PERMISSIONS)); - // } - - if (!actionGroups.getCEntries().containsKey(entry)) { - return Collections.emptySet(); - } - - final Set ret = new HashSet(); - - final Object actionGroupAsObject = actionGroups.getCEntries().get(entry); - - if (actionGroupAsObject instanceof List) { - @SuppressWarnings("unchecked") - final List actionGroupPermissions = (List) actionGroupAsObject; - for (final String perm : actionGroupPermissions) { - if (actionGroups.getCEntries().containsKey(perm)) { - ret.addAll(resolve(actionGroups, perm)); - } else { - ret.add(perm); - } - } - - } else if (actionGroupAsObject instanceof ActionGroupsV6) { - for (final String perm : ((ActionGroupsV6) actionGroupAsObject).getPermissions()) { - if (actionGroups.getCEntries().containsKey(perm)) { - ret.addAll(resolve(actionGroups, perm)); - } else { - ret.add(perm); - } - } - } else { - throw new RuntimeException("Unable to handle " + actionGroupAsObject); - } - - return Collections.unmodifiableSet(ret); - } - - @Override - public Set resolvedActions(final List actions) { - final Set resolvedActions = new HashSet(); - for (String string : actions) { - final Set groups = getGroupMembers(string); - if (groups.isEmpty()) { - resolvedActions.add(string); - } else { - resolvedActions.addAll(groups); - } - } - - return Collections.unmodifiableSet(resolvedActions); - } - }; - } - - private SecurityRoles reload(SecurityDynamicConfiguration settings) { - - final Set> futures = new HashSet<>(5000); - final ExecutorService execs = Executors.newFixedThreadPool(10); - - for (Entry securityRole : settings.getCEntries().entrySet()) { - - Future future = execs.submit(new Callable() { - - @Override - public SecurityRole call() throws Exception { - SecurityRole _securityRole = new SecurityRole(securityRole.getKey()); - - if (securityRole.getValue() == null) { - return null; - } - - final Set permittedClusterActions = agr.resolvedActions(securityRole.getValue().getCluster()); - _securityRole.addClusterPerms(permittedClusterActions); - - // if(tenants != null) { - for (Entry tenant : securityRole.getValue().getTenants().entrySet()) { - - // if(tenant.equals(user.getName())) { - // continue; - // } - - if ("RW".equalsIgnoreCase(tenant.getValue())) { - _securityRole.addTenant(new Tenant(tenant.getKey(), true)); - } else { - _securityRole.addTenant(new Tenant(tenant.getKey(), false)); - // if(_securityRole.tenants.stream().filter(t->t.tenant.equals(tenant)).count() > 0) { //RW outperforms RO - // _securityRole.addTenant(new Tenant(tenant, false)); - // } - } - } - // } - - // final Map permittedAliasesIndices = - // securityRoleSettings.getGroups(DotPath.of("indices")); - - for (final Entry permittedAliasesIndex : securityRole.getValue().getIndices().entrySet()) { - - // final String resolvedRole = securityRole; - // final String indexPattern = permittedAliasesIndex; - - final String dls = permittedAliasesIndex.getValue().get_dls_(); - final List fls = permittedAliasesIndex.getValue().get_fls_(); - final List maskedFields = permittedAliasesIndex.getValue().get_masked_fields_(); - - IndexPattern _indexPattern = new IndexPattern(permittedAliasesIndex.getKey()); - _indexPattern.setDlsQuery(dls); - _indexPattern.addFlsFields(fls); - _indexPattern.addMaskedFields(maskedFields); - - for (Entry> type : permittedAliasesIndex.getValue().getTypes().entrySet()) { - TypePerm typePerm = new TypePerm(type.getKey()); - final List perms = type.getValue(); - typePerm.addPerms(agr.resolvedActions(perms)); - _indexPattern.addTypePerms(typePerm); - } - - _securityRole.addIndexPattern(_indexPattern); - - } - - return _securityRole; - } - }); - - futures.add(future); - } - - execs.shutdown(); - try { - execs.awaitTermination(30, TimeUnit.SECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.error("Thread interrupted (1) while loading roles"); - return null; - } - - try { - SecurityRoles _securityRoles = new SecurityRoles(futures.size()); - for (Future future : futures) { - _securityRoles.addSecurityRole(future.get()); - } - - return _securityRoles; - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.error("Thread interrupted (2) while loading roles"); - return null; - } catch (ExecutionException e) { - log.error("Error while updating roles: {}", e.getCause(), e.getCause()); - throw ExceptionsHelper.convertToOpenSearchException(e); - } - } - - // beans - - public static class SecurityRoles implements org.opensearch.security.securityconf.SecurityRoles { - - protected final Logger log = LogManager.getLogger(this.getClass()); - - final Set roles; - - private SecurityRoles(int roleCount) { - roles = new HashSet<>(roleCount); - } - - private SecurityRoles addSecurityRole(SecurityRole securityRole) { - if (securityRole != null) { - this.roles.add(securityRole); - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((roles == null) ? 0 : roles.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - SecurityRoles other = (SecurityRoles) obj; - if (roles == null) { - if (other.roles != null) return false; - } else if (!roles.equals(other.roles)) return false; - return true; - } - - @Override - public String toString() { - return "roles=" + roles; - } - - public Set getRoles() { - return Collections.unmodifiableSet(roles); - } - - public Set getRoleNames() { - return getRoles().stream().map(r -> r.getName()).collect(Collectors.toSet()); - } - - public SecurityRoles filter(Set keep) { - final SecurityRoles retVal = new SecurityRoles(roles.size()); - for (SecurityRole sr : roles) { - if (keep.contains(sr.getName())) { - retVal.addSecurityRole(sr); - } - } - return retVal; - } - - @Override - public EvaluatedDlsFlsConfig getDlsFls( - User user, - boolean dfmEmptyOverwritesAll, - IndexNameExpressionResolver resolver, - ClusterService cs, - NamedXContentRegistry namedXContentRegistry - ) { - - final Map> dlsQueries = new HashMap>(); - final Map> flsFields = new HashMap>(); - final Map> maskedFieldsMap = new HashMap>(); - - for (SecurityRole sr : roles) { - for (IndexPattern ip : sr.getIpatterns()) { - final Set fls = ip.getFls(); - final String dls = ip.getDlsQuery(user); - final String indexPattern = ip.getUnresolvedIndexPattern(user); - final Set maskedFields = ip.getMaskedFields(); - Set concreteIndices = new HashSet<>(); - - if ((dls != null && dls.length() > 0) - || (fls != null && fls.size() > 0) - || (maskedFields != null && maskedFields.size() > 0)) { - concreteIndices = ip.getResolvedIndexPattern(user, resolver, cs); - } - - if (dls != null && dls.length() > 0) { - - Set dlsQuery = dlsQueries.get(indexPattern); - if (dlsQuery != null) { - dlsQuery.add(dls); - } else { - dlsQueries.put(indexPattern, new HashSet<>(Arrays.asList(dls))); - } - - for (String concreteIndex : concreteIndices) { - dlsQuery = dlsQueries.get(concreteIndex); - if (dlsQuery != null) { - dlsQuery.add(dls); - } else { - dlsQueries.put(concreteIndex, new HashSet<>(Arrays.asList(dls))); - } - } - - } - - if (fls != null && fls.size() > 0) { - - Set flsField = flsFields.get(indexPattern); - if (flsField != null) { - flsField.addAll(fls); - } else { - flsFields.put(indexPattern, new HashSet<>(fls)); - } - - for (String concreteIndex : concreteIndices) { - flsField = flsFields.get(concreteIndex); - if (flsField != null) { - flsField.addAll(fls); - } else { - flsFields.put(concreteIndex, new HashSet<>(fls)); - } - } - } - - if (maskedFields != null && maskedFields.size() > 0) { - - if (maskedFieldsMap.containsKey(indexPattern)) { - maskedFieldsMap.get(indexPattern).addAll(Sets.newHashSet(maskedFields)); - } else { - maskedFieldsMap.put(indexPattern, new HashSet()); - maskedFieldsMap.get(indexPattern).addAll(Sets.newHashSet(maskedFields)); - } - - for (String concreteIndex : concreteIndices) { - if (maskedFieldsMap.containsKey(concreteIndex)) { - maskedFieldsMap.get(concreteIndex).addAll(Sets.newHashSet(maskedFields)); - } else { - maskedFieldsMap.put(concreteIndex, new HashSet()); - maskedFieldsMap.get(concreteIndex).addAll(Sets.newHashSet(maskedFields)); - } - } - } - } - } - - return new EvaluatedDlsFlsConfig(dlsQueries, flsFields, maskedFieldsMap); - } - - public boolean hasExplicitIndexPermission( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - final Set indicesForRequest = new HashSet<>(resolved.getAllIndicesResolved(cs, resolver)); - if (indicesForRequest.isEmpty()) { - // If no indices could be found on the request there is no way to check for the explicit permissions - return false; - } - - final Set explicitlyAllowedIndices = roles.stream() - .map(role -> role.getAllResolvedPermittedIndices(resolved, user, actions, resolver, cs, true)) - .flatMap(Collection::stream) - .collect(Collectors.toSet()); - - if (log.isDebugEnabled()) { - log.debug( - "ExplicitIndexPermission check indices for request {}, explicitly allowed indices {}", - indicesForRequest.toString(), - explicitlyAllowedIndices.toString() - ); - } - - indicesForRequest.removeAll(explicitlyAllowedIndices); - return indicesForRequest.isEmpty(); - } - - // opensearchDashboards special only, terms eval - public Set getAllPermittedIndicesForDashboards( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - Set retVal = new HashSet<>(); - for (SecurityRole sr : roles) { - retVal.addAll(sr.getAllResolvedPermittedIndices(Resolved._LOCAL_ALL, user, actions, resolver, cs, false)); - retVal.addAll(resolved.getRemoteIndices()); - } - return Collections.unmodifiableSet(retVal); - } - - // dnfof only - public Set reduce(Resolved resolved, User user, String[] actions, IndexNameExpressionResolver resolver, ClusterService cs) { - Set retVal = new HashSet<>(); - for (SecurityRole sr : roles) { - retVal.addAll(sr.getAllResolvedPermittedIndices(resolved, user, actions, resolver, cs, false)); - } - if (log.isDebugEnabled()) { - log.debug("Reduced requested resolved indices {} to permitted indices {}.", resolved, retVal.toString()); - } - return Collections.unmodifiableSet(retVal); - } - - // return true on success - public boolean get(Resolved resolved, User user, String[] actions, IndexNameExpressionResolver resolver, ClusterService cs) { - for (SecurityRole sr : roles) { - if (ConfigModelV6.impliesTypePerm(sr.getIpatterns(), resolved, user, actions, resolver, cs)) { - return true; - } - } - return false; - } - - @Override - public boolean impliesClusterPermissionPermission(String action) { - return roles.stream().filter(r -> r.impliesClusterPermission(action)).count() > 0; - } - - @Override - public boolean hasExplicitClusterPermissionPermission(String action) { - return roles.stream().map(r -> { - final WildcardMatcher m = WildcardMatcher.from(r.clusterPerms); - return m == WildcardMatcher.ANY ? WildcardMatcher.NONE : m; - }).filter(m -> m.test(action)).count() > 0; - } - - // rolespan - public boolean impliesTypePermGlobal( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - Set ipatterns = new HashSet(); - roles.stream().forEach(p -> ipatterns.addAll(p.getIpatterns())); - return ConfigModelV6.impliesTypePerm(ipatterns, resolved, user, actions, resolver, cs); - } - - @Override - public boolean isPermittedOnSystemIndex(String indexName) { - boolean isPatternMatched = false; - boolean isPermitted = false; - for (SecurityRole role : roles) { - for (IndexPattern ip : role.getIpatterns()) { - WildcardMatcher wildcardMatcher = WildcardMatcher.from(ip.indexPattern); - if (wildcardMatcher.test(indexName)) { - isPatternMatched = true; - } - for (TypePerm tp : ip.getTypePerms()) { - if (tp.perms.contains(ConfigConstants.SYSTEM_INDEX_PERMISSION)) { - isPermitted = true; - } - } - } - } - return isPatternMatched && isPermitted; - } - } - - public static class SecurityRole { - - private final String name; - private final Set tenants = new HashSet<>(); - private final Set ipatterns = new HashSet<>(); - private final Set clusterPerms = new HashSet<>(); - - private SecurityRole(String name) { - super(); - this.name = Objects.requireNonNull(name); - } - - private boolean impliesClusterPermission(String action) { - return WildcardMatcher.from(clusterPerms).test(action); - } - - // get indices which are permitted for the given types and actions - // dnfof + opensearchDashboards special only - private Set getAllResolvedPermittedIndices( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs, - boolean matchExplicitly - ) { - - final Set retVal = new HashSet<>(); - for (IndexPattern p : ipatterns) { - // what if we cannot resolve one (for create purposes) - boolean patternMatch = false; - final Set tperms = p.getTypePerms(); - for (TypePerm tp : tperms) { - // if matchExplicitly is true we don't want to match against `*` pattern - WildcardMatcher matcher = matchExplicitly && (tp.getPerms() == WildcardMatcher.ANY) - ? WildcardMatcher.NONE - : tp.getTypeMatcher(); - if (matcher.matchAny(resolved.getTypes())) { - patternMatch = tp.getPerms().matchAll(actions); - } - } - if (patternMatch) { - // resolved but can contain patterns for nonexistent indices - final WildcardMatcher permitted = WildcardMatcher.from(p.getResolvedIndexPattern(user, resolver, cs)); // maybe they do - // not exist - final Set res = new HashSet<>(); - if (!resolved.isLocalAll() && !resolved.getAllIndices().contains("*") && !resolved.getAllIndices().contains("_all")) { - // resolved but can contain patterns for nonexistent indices - resolved.getAllIndices().stream().filter(permitted).forEach(res::add); - } else { - // we want all indices so just return what's permitted - - // #557 - // final String[] allIndices = resolver.concreteIndexNames(cs.state(), IndicesOptions.lenientExpandOpen(), "*"); - Arrays.stream(cs.state().metadata().getConcreteAllOpenIndices()).filter(permitted).forEach(res::add); - } - retVal.addAll(res); - } - } - - // all that we want and all thats permitted of them - return Collections.unmodifiableSet(retVal); - } - - private SecurityRole addTenant(Tenant tenant) { - if (tenant != null) { - this.tenants.add(tenant); - } - return this; - } - - private SecurityRole addIndexPattern(IndexPattern indexPattern) { - if (indexPattern != null) { - this.ipatterns.add(indexPattern); - } - return this; - } - - private SecurityRole addClusterPerms(Collection clusterPerms) { - if (clusterPerms != null) { - this.clusterPerms.addAll(clusterPerms); - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((clusterPerms == null) ? 0 : clusterPerms.hashCode()); - result = prime * result + ((ipatterns == null) ? 0 : ipatterns.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((tenants == null) ? 0 : tenants.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - SecurityRole other = (SecurityRole) obj; - if (clusterPerms == null) { - if (other.clusterPerms != null) return false; - } else if (!clusterPerms.equals(other.clusterPerms)) return false; - if (ipatterns == null) { - if (other.ipatterns != null) return false; - } else if (!ipatterns.equals(other.ipatterns)) return false; - if (name == null) { - if (other.name != null) return false; - } else if (!name.equals(other.name)) return false; - if (tenants == null) { - if (other.tenants != null) return false; - } else if (!tenants.equals(other.tenants)) return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " " - + name - + System.lineSeparator() - + " tenants=" - + tenants - + System.lineSeparator() - + " ipatterns=" - + ipatterns - + System.lineSeparator() - + " clusterPerms=" - + clusterPerms; - } - - public Set getTenants(User user) { - // TODO filter out user tenants - return Collections.unmodifiableSet(tenants); - } - - public Set getIpatterns() { - return Collections.unmodifiableSet(ipatterns); - } - - public Set getClusterPerms() { - return Collections.unmodifiableSet(clusterPerms); - } - - public String getName() { - return name; - } - - } - - // sg roles - public static class IndexPattern { - private final String indexPattern; - private String dlsQuery; - private final Set fls = new HashSet<>(); - private final Set maskedFields = new HashSet<>(); - private final Set typePerms = new HashSet<>(); - - public IndexPattern(String indexPattern) { - super(); - this.indexPattern = Objects.requireNonNull(indexPattern); - } - - public IndexPattern addFlsFields(List flsFields) { - if (flsFields != null) { - this.fls.addAll(flsFields); - } - return this; - } - - public IndexPattern addMaskedFields(List maskedFields) { - if (maskedFields != null) { - this.maskedFields.addAll(maskedFields); - } - return this; - } - - public IndexPattern addTypePerms(TypePerm typePerm) { - if (typePerm != null) { - this.typePerms.add(typePerm); - } - return this; - } - - public IndexPattern setDlsQuery(String dlsQuery) { - if (dlsQuery != null) { - this.dlsQuery = dlsQuery; - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((dlsQuery == null) ? 0 : dlsQuery.hashCode()); - result = prime * result + ((fls == null) ? 0 : fls.hashCode()); - result = prime * result + ((maskedFields == null) ? 0 : maskedFields.hashCode()); - result = prime * result + ((indexPattern == null) ? 0 : indexPattern.hashCode()); - result = prime * result + ((typePerms == null) ? 0 : typePerms.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - IndexPattern other = (IndexPattern) obj; - if (dlsQuery == null) { - if (other.dlsQuery != null) return false; - } else if (!dlsQuery.equals(other.dlsQuery)) return false; - if (fls == null) { - if (other.fls != null) return false; - } else if (!fls.equals(other.fls)) return false; - if (maskedFields == null) { - if (other.maskedFields != null) return false; - } else if (!maskedFields.equals(other.maskedFields)) return false; - if (indexPattern == null) { - if (other.indexPattern != null) return false; - } else if (!indexPattern.equals(other.indexPattern)) return false; - if (typePerms == null) { - if (other.typePerms != null) return false; - } else if (!typePerms.equals(other.typePerms)) return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " indexPattern=" - + indexPattern - + System.lineSeparator() - + " dlsQuery=" - + dlsQuery - + System.lineSeparator() - + " fls=" - + fls - + System.lineSeparator() - + " typePerms=" - + typePerms; - } - - public String getUnresolvedIndexPattern(User user) { - return replaceProperties(indexPattern, user); - } - - private Set getResolvedIndexPattern(User user, IndexNameExpressionResolver resolver, ClusterService cs) { - String unresolved = getUnresolvedIndexPattern(user); - WildcardMatcher matcher = WildcardMatcher.from(unresolved); - String[] resolved = null; - if (!(matcher instanceof WildcardMatcher.Exact)) { - final String[] aliasesForPermittedPattern = cs.state() - .getMetadata() - .getIndicesLookup() - .entrySet() - .stream() - .filter(e -> e.getValue().getType() == ALIAS) - .filter(e -> matcher.test(e.getKey())) - .map(e -> e.getKey()) - .toArray(String[]::new); - - if (aliasesForPermittedPattern.length > 0) { - resolved = resolver.concreteIndexNames(cs.state(), IndicesOptions.lenientExpandOpen(), aliasesForPermittedPattern); - } - } - - if (resolved == null && !unresolved.isEmpty()) { - resolved = resolver.concreteIndexNames(cs.state(), IndicesOptions.lenientExpandOpen(), unresolved); - } - if (resolved == null || resolved.length == 0) { - return ImmutableSet.of(unresolved); - } else { - return ImmutableSet.builder().addAll(Arrays.asList(resolved)).add(unresolved).build(); - } - } - - public String getDlsQuery(User user) { - return replaceProperties(dlsQuery, user); - } - - public Set getFls() { - return Collections.unmodifiableSet(fls); - } - - public Set getMaskedFields() { - return Collections.unmodifiableSet(maskedFields); - } - - public Set getTypePerms() { - return Collections.unmodifiableSet(typePerms); - } - - } - - public static class TypePerm { - private final WildcardMatcher typeMatcher; - private final Set perms = new HashSet<>(); - - private TypePerm(String typePattern) { - this.typeMatcher = WildcardMatcher.ANY; - } - - private TypePerm addPerms(Collection perms) { - if (perms != null) { - this.perms.addAll(perms); - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((perms == null) ? 0 : perms.hashCode()); - result = prime * result + ((typeMatcher == null) ? 0 : typeMatcher.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - TypePerm other = (TypePerm) obj; - if (perms == null) { - if (other.perms != null) return false; - } else if (!perms.equals(other.perms)) return false; - if (typeMatcher == null) { - if (other.typeMatcher != null) return false; - } else if (!typeMatcher.equals(other.typeMatcher)) return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " typePattern=" - + typeMatcher - + System.lineSeparator() - + " perms=" - + perms; - } - - public WildcardMatcher getTypeMatcher() { - return typeMatcher; - } - - public WildcardMatcher getPerms() { - return WildcardMatcher.from(perms); - } - - } - - public static class Tenant { - private final String tenant; - private final boolean readWrite; - - private Tenant(String tenant, boolean readWrite) { - super(); - this.tenant = tenant; - this.readWrite = readWrite; - } - - public String getTenant() { - return tenant; - } - - public boolean isReadWrite() { - return readWrite; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (readWrite ? 1231 : 1237); - result = prime * result + ((tenant == null) ? 0 : tenant.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - Tenant other = (Tenant) obj; - if (readWrite != other.readWrite) return false; - if (tenant == null) { - if (other.tenant != null) return false; - } else if (!tenant.equals(other.tenant)) return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " tenant=" - + tenant - + System.lineSeparator() - + " readWrite=" - + readWrite; - } - } - - private static String replaceProperties(String orig, User user) { - - if (user == null || orig == null) { - return orig; - } - - orig = orig.replace("${user.name}", user.getName()).replace("${user_name}", user.getName()); - orig = replaceRoles(orig, user); - for (Entry entry : user.getCustomAttributesMap().entrySet()) { - if (entry == null || entry.getKey() == null || entry.getValue() == null) { - continue; - } - orig = orig.replace("${" + entry.getKey() + "}", entry.getValue()); - orig = orig.replace("${" + entry.getKey().replace('.', '_') + "}", entry.getValue()); - } - return orig; - } - - private static String replaceRoles(final String orig, final User user) { - String retVal = orig; - if (orig.contains("${user.roles}") || orig.contains("${user_roles}")) { - final String commaSeparatedRoles = toQuotedCommaSeparatedString(user.getRoles()); - retVal = orig.replace("${user.roles}", commaSeparatedRoles).replace("${user_roles}", commaSeparatedRoles); - } - return retVal; - } - - private static String toQuotedCommaSeparatedString(final Set roles) { - return Joiner.on(',').join(Iterables.transform(roles, s -> { - return new StringBuilder(s.length() + 2).append('"').append(s).append('"').toString(); - })); - } - - private static final class IndexMatcherAndTypePermissions { - private static final Logger log = LogManager.getLogger(IndexMatcherAndTypePermissions.class); - - private final WildcardMatcher matcher; - private final Set typePerms; - - public IndexMatcherAndTypePermissions(Set pattern, Set typePerms) { - this.matcher = WildcardMatcher.from(pattern); - this.typePerms = typePerms; - } - - private static String b2s(boolean matches) { - return matches ? "matches" : "does not match"; - } - - public boolean matches(String index, String type, String action) { - final boolean isDebugEnabled = log.isDebugEnabled(); - boolean matchIndex = matcher.test(index); - if (isDebugEnabled) { - log.debug("index {} {} index pattern {}", index, b2s(matchIndex), matcher); - } - if (matchIndex) { - return typePerms.stream().anyMatch(tp -> { - boolean matchType = tp.getTypeMatcher().test(type); - if (isDebugEnabled) { - log.debug("type {} {} type pattern {}", type, b2s(matchType), tp.getTypeMatcher()); - } - if (matchType) { - boolean matchAction = tp.getPerms().test(action); - if (isDebugEnabled) { - log.debug("action {} {} action pattern {}", action, b2s(matchAction), tp.getPerms()); - } - return matchAction; - } - return false; - }); - } - return false; - } - } - - private static boolean impliesTypePerm( - Set ipatterns, - Resolved resolved, - User user, - String[] requestedActions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - IndexMatcherAndTypePermissions[] indexMatcherAndTypePermissions; - if (resolved.isLocalAll()) { - // Only let localAll pass if there is an explicit privilege for a * index pattern - indexMatcherAndTypePermissions = ipatterns.stream() - .filter(indexPattern -> "*".equals(indexPattern.getUnresolvedIndexPattern(user))) - .map(p -> new IndexMatcherAndTypePermissions(p.getResolvedIndexPattern(user, resolver, cs), p.getTypePerms())) - .toArray(IndexMatcherAndTypePermissions[]::new); - } else { - indexMatcherAndTypePermissions = ipatterns.stream() - .map(p -> new IndexMatcherAndTypePermissions(p.getResolvedIndexPattern(user, resolver, cs), p.getTypePerms())) - .toArray(IndexMatcherAndTypePermissions[]::new); - } - - return resolved.getAllIndices() - .stream() - .allMatch( - index -> resolved.getTypes() - .stream() - .allMatch( - type -> Arrays.stream(requestedActions) - .allMatch( - action -> Arrays.stream(indexMatcherAndTypePermissions) - .anyMatch(ipatp -> ipatp.matches(index, type, action)) - ) - ) - ); - } - - // ####### - - private class TenantHolder { - - private SetMultimap> tenantsMM = null; - - public TenantHolder(SecurityDynamicConfiguration roles) { - final Set>>>> futures = new HashSet<>(roles.getCEntries().size()); - - final ExecutorService execs = Executors.newFixedThreadPool(10); - - for (Entry securityRole : roles.getCEntries().entrySet()) { - - if (securityRole.getValue() == null) { - continue; - } - - Future>>> future = execs.submit( - new Callable>>>() { - @Override - public Tuple>> call() throws Exception { - final Set> tuples = new HashSet<>(); - final Map tenants = securityRole.getValue().getTenants(); - - if (tenants != null) { - - for (String tenant : tenants.keySet()) { - - if ("RW".equalsIgnoreCase(tenants.get(tenant))) { - // RW - tuples.add(new Tuple(tenant, true)); - } else { - // RO - // if(!tenantsMM.containsValue(value)) { //RW outperforms RO - tuples.add(new Tuple(tenant, false)); - // } - } - } - } - - return new Tuple>>(securityRole.getKey(), tuples); - } - } - ); - - futures.add(future); - - } - - execs.shutdown(); - try { - execs.awaitTermination(30, TimeUnit.SECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.error("Thread interrupted (1) while loading roles"); - return; - } - - try { - final SetMultimap> tenantsMM_ = SetMultimapBuilder.hashKeys(futures.size()) - .hashSetValues(16) - .build(); - - for (Future>>> future : futures) { - Tuple>> result = future.get(); - tenantsMM_.putAll(result.v1(), result.v2()); - } - - tenantsMM = tenantsMM_; - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.error("Thread interrupted (2) while loading roles"); - return; - } catch (ExecutionException e) { - log.error("Error while updating roles: {}", e.getCause(), e.getCause()); - throw ExceptionsHelper.convertToOpenSearchException(e); - } - - } - - public Map mapTenants(final User user, Set roles) { - - if (user == null || tenantsMM == null) { - return Collections.emptyMap(); - } - - final Map result = new HashMap<>(roles.size()); - result.put(user.getName(), true); - - tenantsMM.entries() - .stream() - .filter(e -> roles.contains(e.getKey())) - .filter(e -> !user.getName().equals(e.getValue().v1())) - .forEach(e -> { - final String tenant = e.getValue().v1(); - final boolean rw = e.getValue().v2(); - - if (rw || !result.containsKey(tenant)) { // RW outperforms RO - result.put(tenant, rw); - } - }); - - return Collections.unmodifiableMap(result); - } - } - - private class RoleMappingHolder { - - private ListMultimap users; - private ListMultimap, String> abars; - private ListMultimap bars; - private ListMultimap hosts; - private final String hostResolverMode; - - private List userMatchers; - private List barMatchers; - private List hostMatchers; - - private RoleMappingHolder(final SecurityDynamicConfiguration rolesMapping, final String hostResolverMode) { - - this.hostResolverMode = hostResolverMode; - - if (rolesMapping != null) { - - users = ArrayListMultimap.create(); - abars = ArrayListMultimap.create(); - bars = ArrayListMultimap.create(); - hosts = ArrayListMultimap.create(); - - for (final Entry roleMap : rolesMapping.getCEntries().entrySet()) { - final String roleMapKey = roleMap.getKey(); - final RoleMappingsV6 roleMapValue = roleMap.getValue(); - - for (String u : roleMapValue.getUsers()) { - users.put(u, roleMapKey); - } - - final Set abar = new HashSet<>(roleMapValue.getAndBackendroles()); - - if (!abar.isEmpty()) { - abars.put(WildcardMatcher.matchers(abar), roleMapKey); - } - - for (String bar : roleMapValue.getBackendroles()) { - bars.put(bar, roleMapKey); - } - - for (String host : roleMapValue.getHosts()) { - hosts.put(host, roleMapKey); - } - } - - userMatchers = WildcardMatcher.matchers(users.keySet()); - barMatchers = WildcardMatcher.matchers(bars.keySet()); - hostMatchers = WildcardMatcher.matchers(hosts.keySet()); - } - } - - private Set map(final User user, final TransportAddress caller) { - - if (user == null || users == null || abars == null || bars == null || hosts == null) { - return Collections.emptySet(); - } - - final Set securityRoles = new HashSet<>(); - - if (rolesMappingResolution == ConfigConstants.RolesMappingResolution.BOTH - || rolesMappingResolution == ConfigConstants.RolesMappingResolution.BACKENDROLES_ONLY) { - if (log.isDebugEnabled()) { - log.debug("Pass backendroles from {}", user); - } - securityRoles.addAll(user.getRoles()); - } - - if (((rolesMappingResolution == ConfigConstants.RolesMappingResolution.BOTH - || rolesMappingResolution == ConfigConstants.RolesMappingResolution.MAPPING_ONLY))) { - - for (String p : WildcardMatcher.getAllMatchingPatterns(userMatchers, user.getName())) { - securityRoles.addAll(users.get(p)); - } - - for (String p : WildcardMatcher.getAllMatchingPatterns(barMatchers, user.getRoles())) { - securityRoles.addAll(bars.get(p)); - } - - for (List patterns : abars.keySet()) { - if (patterns.stream().allMatch(p -> p.matchAny(user.getRoles()))) { - securityRoles.addAll(abars.get(patterns)); - } - } - - if (caller != null) { - // IPV4 or IPv6 (compressed and without scope identifiers) - final String ipAddress = caller.getAddress(); - - final List hostMatchers = WildcardMatcher.matchers(hosts.keySet()); - for (String p : WildcardMatcher.getAllMatchingPatterns(hostMatchers, ipAddress)) { - securityRoles.addAll(hosts.get(p)); - } - - if (caller.address() != null - && (hostResolverMode.equalsIgnoreCase("ip-hostname") || hostResolverMode.equalsIgnoreCase("ip-hostname-lookup"))) { - final String hostName = caller.address().getHostString(); - - for (String p : WildcardMatcher.getAllMatchingPatterns(hostMatchers, hostName)) { - securityRoles.addAll(hosts.get(p)); - } - } - - if (caller.address() != null && hostResolverMode.equalsIgnoreCase("ip-hostname-lookup")) { - - final String resolvedHostName = caller.address().getHostName(); - - for (String p : WildcardMatcher.getAllMatchingPatterns(hostMatchers, resolvedHostName)) { - securityRoles.addAll(hosts.get(p)); - } - } - } - } - - return Collections.unmodifiableSet(securityRoles); - - } - } - - public Map mapTenants(User user, Set roles) { - return tenantHolder.mapTenants(user, roles); - } - - public Set mapSecurityRoles(User user, TransportAddress caller) { - return roleMappingHolder.map(user, caller); - } -} diff --git a/src/main/java/org/opensearch/security/securityconf/ConfigModelV7.java b/src/main/java/org/opensearch/security/securityconf/ConfigModelV7.java index e3f2ff7c85..84c32e2f97 100644 --- a/src/main/java/org/opensearch/security/securityconf/ConfigModelV7.java +++ b/src/main/java/org/opensearch/security/securityconf/ConfigModelV7.java @@ -17,15 +17,12 @@ package org.opensearch.security.securityconf; -import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Objects; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; @@ -33,12 +30,10 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.ListMultimap; import com.google.common.collect.MultimapBuilder.SetMultimapBuilder; import com.google.common.collect.SetMultimap; @@ -46,35 +41,24 @@ import org.apache.logging.log4j.Logger; import org.opensearch.ExceptionsHelper; -import org.opensearch.action.support.IndicesOptions; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.set.Sets; import org.opensearch.core.common.transport.TransportAddress; -import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.security.privileges.UserAttributes; -import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; import org.opensearch.security.securityconf.impl.v7.ActionGroupsV7; import org.opensearch.security.securityconf.impl.v7.RoleMappingsV7; import org.opensearch.security.securityconf.impl.v7.RoleV7; -import org.opensearch.security.securityconf.impl.v7.RoleV7.Index; import org.opensearch.security.securityconf.impl.v7.TenantV7; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.user.User; -import static org.opensearch.cluster.metadata.IndexAbstraction.Type.ALIAS; -import static org.opensearch.cluster.metadata.IndexAbstraction.Type.DATA_STREAM; - public class ConfigModelV7 extends ConfigModel { protected final Logger log = LogManager.getLogger(this.getClass()); private ConfigConstants.RolesMappingResolution rolesMappingResolution; private FlattenedActionGroups actionGroups; - private SecurityRoles securityRoles = null; private TenantHolder tenantHolder; private RoleMappingHolder roleMappingHolder; private SecurityDynamicConfiguration roles; @@ -105,7 +89,6 @@ public ConfigModelV7( } actionGroups = actiongroups != null ? new FlattenedActionGroups(actiongroups) : FlattenedActionGroups.EMPTY; - securityRoles = reload(roles); tenantHolder = new TenantHolder(roles, tenants); roleMappingHolder = new RoleMappingHolder(rolemappings, dcm.getHostsResolverMode()); } @@ -114,906 +97,6 @@ public Set getAllConfiguredTenantNames() { return Collections.unmodifiableSet(tenants.getCEntries().keySet()); } - public SecurityRoles getSecurityRoles() { - return securityRoles; - } - - private SecurityRoles reload(SecurityDynamicConfiguration settings) { - - final Set> futures = new HashSet<>(5000); - final ExecutorService execs = Executors.newFixedThreadPool(10); - - for (Entry securityRole : settings.getCEntries().entrySet()) { - - Future future = execs.submit(new Callable() { - - @Override - public SecurityRole call() throws Exception { - SecurityRole.Builder _securityRole = new SecurityRole.Builder(securityRole.getKey()); - - if (securityRole.getValue() == null) { - return null; - } - - final Set permittedClusterActions = actionGroups.resolve(securityRole.getValue().getCluster_permissions()); - _securityRole.addClusterPerms(permittedClusterActions); - - /*for(RoleV7.Tenant tenant: securityRole.getValue().getTenant_permissions()) { - - //if(tenant.equals(user.getName())) { - // continue; - //} - - if(isTenantsRw(tenant)) { - _securityRole.addTenant(new Tenant(tenant.getKey(), true)); - } else { - _securityRole.addTenant(new Tenant(tenant.getKey(), false)); - } - }*/ - - for (final Index permittedAliasesIndex : securityRole.getValue().getIndex_permissions()) { - - final String dls = permittedAliasesIndex.getDls(); - final List fls = permittedAliasesIndex.getFls(); - final List maskedFields = permittedAliasesIndex.getMasked_fields(); - - for (String pat : permittedAliasesIndex.getIndex_patterns()) { - IndexPattern _indexPattern = new IndexPattern(pat); - _indexPattern.setDlsQuery(dls); - _indexPattern.addFlsFields(fls); - _indexPattern.addMaskedFields(maskedFields); - _indexPattern.addPerm(actionGroups.resolve(permittedAliasesIndex.getAllowed_actions())); - - /*for(Entry> type: permittedAliasesIndex.getValue().getTypes(-).entrySet()) { - TypePerm typePerm = new TypePerm(type.getKey()); - final List perms = type.getValue(); - typePerm.addPerms(agr.resolvedActions(perms)); - _indexPattern.addTypePerms(typePerm); - }*/ - - _securityRole.addIndexPattern(_indexPattern); - - } - - } - - return _securityRole.build(); - } - }); - - futures.add(future); - } - - execs.shutdown(); - try { - execs.awaitTermination(30, TimeUnit.SECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.error("Thread interrupted (1) while loading roles"); - return null; - } - - try { - SecurityRoles _securityRoles = new SecurityRoles(futures.size()); - for (Future future : futures) { - _securityRoles.addSecurityRole(future.get()); - } - - return _securityRoles; - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.error("Thread interrupted (2) while loading roles"); - return null; - } catch (ExecutionException e) { - log.error("Error while updating roles: {}", e.getCause(), e.getCause()); - throw ExceptionsHelper.convertToOpenSearchException(e); - } - } - - // beans - - public static class SecurityRoles implements org.opensearch.security.securityconf.SecurityRoles { - - protected final Logger log = LogManager.getLogger(this.getClass()); - - final Set roles; - - private SecurityRoles(int roleCount) { - roles = new HashSet<>(roleCount); - } - - private SecurityRoles addSecurityRole(SecurityRole securityRole) { - if (securityRole != null) { - this.roles.add(securityRole); - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((roles == null) ? 0 : roles.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - SecurityRoles other = (SecurityRoles) obj; - if (roles == null) { - if (other.roles != null) return false; - } else if (!roles.equals(other.roles)) return false; - return true; - } - - @Override - public String toString() { - return "roles=" + roles; - } - - public Set getRoles() { - return Collections.unmodifiableSet(roles); - } - - public Set getRoleNames() { - return getRoles().stream().map(r -> r.getName()).collect(Collectors.toSet()); - } - - public SecurityRoles filter(Set keep) { - final SecurityRoles retVal = new SecurityRoles(roles.size()); - for (SecurityRole sr : roles) { - if (keep.contains(sr.getName())) { - retVal.addSecurityRole(sr); - } - } - return retVal; - } - - @Override - public EvaluatedDlsFlsConfig getDlsFls( - User user, - boolean dfmEmptyOverwritesAll, - IndexNameExpressionResolver resolver, - ClusterService cs, - NamedXContentRegistry namedXContentRegistry - ) { - - if (!containsDlsFlsConfig()) { - if (log.isDebugEnabled()) { - log.debug("No fls or dls found for {} in {} security roles", user, roles.size()); - } - - return EvaluatedDlsFlsConfig.EMPTY; - } - - Map> dlsQueriesByIndex = new HashMap>(); - Map> flsFields = new HashMap>(); - Map> maskedFieldsMap = new HashMap>(); - - // we capture all concrete indices that do not have any - // DLS/FLS/Masked Fields restrictions. If the dfm_empty_overwrites_all - // switch is enabled, this trumps any restrictions on those indices - // that may be imposed by other roles. - Set noDlsConcreteIndices = new HashSet<>(); - Set noFlsConcreteIndices = new HashSet<>(); - Set noMaskedFieldConcreteIndices = new HashSet<>(); - - for (SecurityRole role : roles) { - for (IndexPattern ip : role.getIpatterns()) { - final Set concreteIndices = ip.concreteIndexNames(user, resolver, cs, true); - String dls = ip.getDlsQuery(user); - - if (dls != null && dls.length() > 0) { - - for (String concreteIndex : concreteIndices) { - dlsQueriesByIndex.computeIfAbsent(concreteIndex, (key) -> new HashSet()).add(dls); - } - } else if (dfmEmptyOverwritesAll) { - noDlsConcreteIndices.addAll(concreteIndices); - } - - Set fls = ip.getFls(); - - if (fls != null && fls.size() > 0) { - - for (String concreteIndex : concreteIndices) { - if (flsFields.containsKey(concreteIndex)) { - flsFields.get(concreteIndex).addAll(Sets.newHashSet(fls)); - } else { - flsFields.put(concreteIndex, new HashSet()); - flsFields.get(concreteIndex).addAll(Sets.newHashSet(fls)); - } - } - } else if (dfmEmptyOverwritesAll) { - noFlsConcreteIndices.addAll(concreteIndices); - } - - Set maskedFields = ip.getMaskedFields(); - - if (maskedFields != null && maskedFields.size() > 0) { - - for (String concreteIndex : concreteIndices) { - if (maskedFieldsMap.containsKey(concreteIndex)) { - maskedFieldsMap.get(concreteIndex).addAll(Sets.newHashSet(maskedFields)); - } else { - maskedFieldsMap.put(concreteIndex, new HashSet()); - maskedFieldsMap.get(concreteIndex).addAll(Sets.newHashSet(maskedFields)); - } - } - } else if (dfmEmptyOverwritesAll) { - noMaskedFieldConcreteIndices.addAll(concreteIndices); - } - } - } - if (dfmEmptyOverwritesAll) { - if (log.isDebugEnabled()) { - log.debug( - "Index patterns with no dls queries attached: {} - They will be removed from {}", - noDlsConcreteIndices, - dlsQueriesByIndex.keySet() - ); - log.debug( - "Index patterns with no fls fields attached: {} - They will be removed from {}", - noFlsConcreteIndices, - flsFields.keySet() - ); - log.debug( - "Index patterns with no masked fields attached: {} - They will be removed from {}", - noMaskedFieldConcreteIndices, - maskedFieldsMap.keySet() - ); - } - // removing the indices that do not have D/M/F restrictions - // from the keySet will also modify the underlying map - dlsQueriesByIndex.keySet().removeAll(noDlsConcreteIndices); - flsFields.keySet().removeAll(noFlsConcreteIndices); - maskedFieldsMap.keySet().removeAll(noMaskedFieldConcreteIndices); - } - - return new EvaluatedDlsFlsConfig(dlsQueriesByIndex, flsFields, maskedFieldsMap); - } - - // opensearchDashboards special only, terms eval - public Set getAllPermittedIndicesForDashboards( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - Set retVal = new HashSet<>(); - for (SecurityRole sr : roles) { - retVal.addAll(sr.getAllResolvedPermittedIndices(Resolved._LOCAL_ALL, user, actions, resolver, cs, Function.identity())); - retVal.addAll(resolved.getRemoteIndices()); - } - return Collections.unmodifiableSet(retVal); - } - - // dnfof only - public Set reduce(Resolved resolved, User user, String[] actions, IndexNameExpressionResolver resolver, ClusterService cs) { - Set retVal = new HashSet<>(); - for (SecurityRole sr : roles) { - retVal.addAll(sr.getAllResolvedPermittedIndices(resolved, user, actions, resolver, cs, Function.identity())); - } - if (log.isDebugEnabled()) { - log.debug("Reduced requested resolved indices {} to permitted indices {}.", resolved, retVal.toString()); - } - return Collections.unmodifiableSet(retVal); - } - - // return true on success - public boolean get(Resolved resolved, User user, String[] actions, IndexNameExpressionResolver resolver, ClusterService cs) { - for (SecurityRole sr : roles) { - if (ConfigModelV7.impliesTypePerm(sr.getIpatterns(), resolved, user, actions, resolver, cs)) { - return true; - } - } - return false; - } - - @Override - public boolean impliesClusterPermissionPermission(String action) { - return roles.stream().filter(r -> r.impliesClusterPermission(action)).count() > 0; - } - - @Override - public boolean hasExplicitClusterPermissionPermission(String action) { - return roles.stream().map(r -> matchExplicitly(r.clusterPerms)).filter(m -> m.test(action)).count() > 0; - } - - private static WildcardMatcher matchExplicitly(final WildcardMatcher matcher) { - return matcher == WildcardMatcher.ANY ? WildcardMatcher.NONE : matcher; - } - - @Override - public boolean hasExplicitIndexPermission( - final Resolved resolved, - final User user, - final String[] actions, - final IndexNameExpressionResolver resolver, - final ClusterService cs - ) { - - final Set indicesForRequest = new HashSet<>(resolved.getAllIndicesResolved(cs, resolver)); - if (indicesForRequest.isEmpty()) { - // If no indices could be found on the request there is no way to check for the explicit permissions - return false; - } - - final Set explicitlyAllowedIndices = roles.stream() - .map(role -> role.getAllResolvedPermittedIndices(resolved, user, actions, resolver, cs, SecurityRoles::matchExplicitly)) - .flatMap(Collection::stream) - .collect(Collectors.toSet()); - - if (log.isDebugEnabled()) { - log.debug( - "ExplicitIndexPermission check indices for request {}, explicitly allowed indices {}", - indicesForRequest.toString(), - explicitlyAllowedIndices.toString() - ); - } - - indicesForRequest.removeAll(explicitlyAllowedIndices); - return indicesForRequest.isEmpty(); - } - - // rolespan - public boolean impliesTypePermGlobal( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - Set ipatterns = new HashSet(); - roles.stream().forEach(p -> ipatterns.addAll(p.getIpatterns())); - return ConfigModelV7.impliesTypePerm(ipatterns, resolved, user, actions, resolver, cs); - } - - private boolean containsDlsFlsConfig() { - for (SecurityRole role : roles) { - for (IndexPattern ip : role.getIpatterns()) { - if (ip.hasDlsQuery() || ip.hasFlsFields() || ip.hasMaskedFields()) { - return true; - } - } - } - - return false; - } - - @Override - public boolean isPermittedOnSystemIndex(String indexName) { - boolean isPatternMatched = false; - boolean isPermitted = false; - for (SecurityRole role : roles) { - for (IndexPattern ip : role.getIpatterns()) { - WildcardMatcher wildcardMatcher = WildcardMatcher.from(ip.indexPattern); - if (wildcardMatcher.test(indexName)) { - isPatternMatched = true; - } - if (ip.perms.contains(ConfigConstants.SYSTEM_INDEX_PERMISSION)) { - isPermitted = true; - } - } - } - return isPatternMatched && isPermitted; - } - } - - public static class SecurityRole { - private final String name; - private final Set ipatterns; - private final WildcardMatcher clusterPerms; - - public static final class Builder { - private final String name; - private final Set clusterPerms = new HashSet<>(); - private final Set ipatterns = new HashSet<>(); - - public Builder(String name) { - this.name = Objects.requireNonNull(name); - } - - public Builder addIndexPattern(IndexPattern indexPattern) { - this.ipatterns.add(indexPattern); - return this; - } - - public Builder addClusterPerms(Collection clusterPerms) { - if (clusterPerms != null) { - this.clusterPerms.addAll(clusterPerms); - } - return this; - } - - public SecurityRole build() { - return new SecurityRole(name, ipatterns, WildcardMatcher.from(clusterPerms)); - } - } - - private SecurityRole(String name, Set ipatterns, WildcardMatcher clusterPerms) { - this.name = Objects.requireNonNull(name); - this.ipatterns = ipatterns; - this.clusterPerms = clusterPerms; - } - - private boolean impliesClusterPermission(String action) { - return clusterPerms.test(action); - } - - // get indices which are permitted for the given types and actions - // dnfof + opensearchDashboards special only - private Set getAllResolvedPermittedIndices( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs, - Function matcherModification - ) { - - final Set retVal = new HashSet<>(); - for (IndexPattern p : ipatterns) { - // what if we cannot resolve one (for create purposes) - final boolean patternMatch = matcherModification.apply(p.getPerms()).matchAll(actions); - - // final Set tperms = p.getTypePerms(); - // for (TypePerm tp : tperms) { - // if (WildcardMatcher.matchAny(tp.typePattern, resolved.getTypes(-).toArray(new String[0]))) { - // patternMatch = WildcardMatcher.matchAll(tp.perms.toArray(new String[0]), actions); - // } - // } - if (patternMatch) { - // resolved but can contain patterns for nonexistent indices - final WildcardMatcher permitted = WildcardMatcher.from(p.attemptResolveIndexNames(user, resolver, cs)); // maybe they do - // not exist - final Set res = new HashSet<>(); - if (!resolved.isLocalAll() && !resolved.getAllIndices().contains("*") && !resolved.getAllIndices().contains("_all")) { - // resolved but can contain patterns for nonexistent indices - resolved.getAllIndices().stream().filter(permitted).forEach(res::add); - } else { - // we want all indices so just return what's permitted - - // #557 - // final String[] allIndices = resolver.concreteIndexNames(cs.state(), IndicesOptions.lenientExpandOpen(), "*"); - final String[] allIndices = cs.state().metadata().getConcreteAllOpenIndices(); - Arrays.stream(allIndices).filter(permitted).forEach(res::add); - } - retVal.addAll(res); - } - } - - // all that we want and all thats permitted of them - return Collections.unmodifiableSet(retVal); - } - - /*private SecurityRole addTenant(Tenant tenant) { - if (tenant != null) { - this.tenants.add(tenant); - } - return this; - }*/ - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((clusterPerms == null) ? 0 : clusterPerms.hashCode()); - result = prime * result + ((ipatterns == null) ? 0 : ipatterns.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - // result = prime * result + ((tenants == null) ? 0 : tenants.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - SecurityRole other = (SecurityRole) obj; - if (clusterPerms == null) { - if (other.clusterPerms != null) return false; - } else if (!clusterPerms.equals(other.clusterPerms)) return false; - if (ipatterns == null) { - if (other.ipatterns != null) return false; - } else if (!ipatterns.equals(other.ipatterns)) return false; - if (name == null) { - if (other.name != null) return false; - } else if (!name.equals(other.name)) return false; - // if (tenants == null) { - // if (other.tenants != null) - // return false; - // } else if (!tenants.equals(other.tenants)) - // return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " " - + name - + System.lineSeparator() - + " ipatterns=" - + ipatterns - + System.lineSeparator() - + " clusterPerms=" - + clusterPerms; - } - - // public Set getTenants(User user) { - // //TODO filter out user tenants - // return Collections.unmodifiableSet(tenants); - // } - - public Set getIpatterns() { - return Collections.unmodifiableSet(ipatterns); - } - - public String getName() { - return name; - } - - } - - // sg roles - public static class IndexPattern { - private final String indexPattern; - private String dlsQuery; - private final Set fls = new HashSet<>(); - private final Set maskedFields = new HashSet<>(); - private final Set perms = new HashSet<>(); - - public IndexPattern(String indexPattern) { - super(); - this.indexPattern = Objects.requireNonNull(indexPattern); - } - - public IndexPattern addFlsFields(List flsFields) { - if (flsFields != null) { - this.fls.addAll(flsFields); - } - return this; - } - - public IndexPattern addMaskedFields(List maskedFields) { - if (maskedFields != null) { - this.maskedFields.addAll(maskedFields); - } - return this; - } - - public IndexPattern addPerm(Set perms) { - if (perms != null) { - this.perms.addAll(perms); - } - return this; - } - - public IndexPattern setDlsQuery(String dlsQuery) { - if (dlsQuery != null) { - this.dlsQuery = dlsQuery; - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((dlsQuery == null) ? 0 : dlsQuery.hashCode()); - result = prime * result + ((fls == null) ? 0 : fls.hashCode()); - result = prime * result + ((maskedFields == null) ? 0 : maskedFields.hashCode()); - result = prime * result + ((indexPattern == null) ? 0 : indexPattern.hashCode()); - result = prime * result + ((perms == null) ? 0 : perms.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - IndexPattern other = (IndexPattern) obj; - if (dlsQuery == null) { - if (other.dlsQuery != null) return false; - } else if (!dlsQuery.equals(other.dlsQuery)) return false; - if (fls == null) { - if (other.fls != null) return false; - } else if (!fls.equals(other.fls)) return false; - if (maskedFields == null) { - if (other.maskedFields != null) return false; - } else if (!maskedFields.equals(other.maskedFields)) return false; - if (indexPattern == null) { - if (other.indexPattern != null) return false; - } else if (!indexPattern.equals(other.indexPattern)) return false; - if (perms == null) { - if (other.perms != null) return false; - } else if (!perms.equals(other.perms)) return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " indexPattern=" - + indexPattern - + System.lineSeparator() - + " dlsQuery=" - + dlsQuery - + System.lineSeparator() - + " fls=" - + fls - + System.lineSeparator() - + " perms=" - + perms; - } - - public String getUnresolvedIndexPattern(User user) { - return UserAttributes.replaceProperties(indexPattern, user); - } - - /** Finds the indices accessible to the user and resolves them to concrete names */ - public Set concreteIndexNames( - final User user, - final IndexNameExpressionResolver resolver, - final ClusterService cs, - final boolean includeClosed - ) { - return getResolvedIndexPattern(user, resolver, cs, false, includeClosed); - } - - /** Finds the indices accessible to the user and resolves them to concrete names */ - public Set concreteIndexNames(final User user, final IndexNameExpressionResolver resolver, final ClusterService cs) { - return getResolvedIndexPattern(user, resolver, cs, false, false); - } - - /** Finds the indices accessible to the user and attempts to resolve them to names, also includes any unresolved names */ - public Set attemptResolveIndexNames(final User user, final IndexNameExpressionResolver resolver, final ClusterService cs) { - return getResolvedIndexPattern(user, resolver, cs, true, false); - } - - public Set getResolvedIndexPattern( - final User user, - final IndexNameExpressionResolver resolver, - final ClusterService cs, - final boolean appendUnresolved, - final boolean includeClosed - ) { - final String unresolved = getUnresolvedIndexPattern(user); - final ImmutableSet.Builder resolvedIndices = new ImmutableSet.Builder<>(); - final IndicesOptions expansionMode = includeClosed ? IndicesOptions.lenientExpand() : IndicesOptions.lenientExpandOpen(); - - final WildcardMatcher matcher = WildcardMatcher.from(unresolved); - boolean includeDataStreams = true; - if (!(matcher instanceof WildcardMatcher.Exact)) { - final String[] aliasesAndDataStreamsForPermittedPattern = cs.state() - .getMetadata() - .getIndicesLookup() - .entrySet() - .stream() - .filter(e -> (e.getValue().getType() == ALIAS) || (e.getValue().getType() == DATA_STREAM)) - .filter(e -> matcher.test(e.getKey())) - .map(e -> e.getKey()) - .toArray(String[]::new); - if (aliasesAndDataStreamsForPermittedPattern.length > 0) { - final String[] resolvedAliasesAndDataStreamIndices = resolver.concreteIndexNames( - cs.state(), - expansionMode, - includeDataStreams, - aliasesAndDataStreamsForPermittedPattern - ); - resolvedIndices.addAll(Arrays.asList(resolvedAliasesAndDataStreamIndices)); - } - } - - if (!(unresolved == null || unresolved.isBlank())) { - final String[] resolvedIndicesFromPattern = resolver.concreteIndexNames( - cs.state(), - expansionMode, - includeDataStreams, - unresolved - ); - resolvedIndices.addAll(Arrays.asList(resolvedIndicesFromPattern)); - } - - if (appendUnresolved || resolvedIndices.build().isEmpty()) { - resolvedIndices.add(unresolved); - } - return resolvedIndices.build(); - } - - public String getDlsQuery(User user) { - return UserAttributes.replaceProperties(dlsQuery, user); - } - - public boolean hasDlsQuery() { - return dlsQuery != null && !dlsQuery.isEmpty(); - } - - public Set getFls() { - return Collections.unmodifiableSet(fls); - } - - public boolean hasFlsFields() { - return fls != null && !fls.isEmpty(); - } - - public Set getMaskedFields() { - return Collections.unmodifiableSet(maskedFields); - } - - public boolean hasMaskedFields() { - return maskedFields != null && !maskedFields.isEmpty(); - } - - public WildcardMatcher getPerms() { - return WildcardMatcher.from(perms); - } - - } - - /*public static class TypePerm { - private final String typePattern; - private final Set perms = new HashSet<>(); - - private TypePerm(String typePattern) { - super(); - this.typePattern = Objects.requireNonNull(typePattern); - /*if(IGNORED_TYPES.contains(typePattern)) { - throw new RuntimeException("typepattern '"+typePattern+"' not allowed"); - } - } - - private TypePerm addPerms(Collection perms) { - if (perms != null) { - this.perms.addAll(perms); - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((perms == null) ? 0 : perms.hashCode()); - result = prime * result + ((typePattern == null) ? 0 : typePattern.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - TypePerm other = (TypePerm) obj; - if (perms == null) { - if (other.perms != null) - return false; - } else if (!perms.equals(other.perms)) - return false; - if (typePattern == null) { - if (other.typePattern != null) - return false; - } else if (!typePattern.equals(other.typePattern)) - return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() + " typePattern=" + typePattern + System.lineSeparator() + " perms=" + perms; - } - - public String getTypePattern() { - return typePattern; - } - - public Set getPerms() { - return Collections.unmodifiableSet(perms); - } - - }*/ - - public static class Tenant { - private final String tenant; - private final boolean readWrite; - - private Tenant(String tenant, boolean readWrite) { - super(); - this.tenant = tenant; - this.readWrite = readWrite; - } - - public String getTenant() { - return tenant; - } - - public boolean isReadWrite() { - return readWrite; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (readWrite ? 1231 : 1237); - result = prime * result + ((tenant == null) ? 0 : tenant.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - Tenant other = (Tenant) obj; - if (readWrite != other.readWrite) return false; - if (tenant == null) { - if (other.tenant != null) return false; - } else if (!tenant.equals(other.tenant)) return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " tenant=" - + tenant - + System.lineSeparator() - + " readWrite=" - + readWrite; - } - } - - private static final class IndexMatcherAndPermissions { - private WildcardMatcher matcher; - private WildcardMatcher perms; - - public IndexMatcherAndPermissions(Set patterns, Set perms) { - this.matcher = WildcardMatcher.from(patterns); - this.perms = WildcardMatcher.from(perms); - } - - public boolean matches(String index, String action) { - return matcher.test(index) && perms.test(action); - } - } - - private static boolean impliesTypePerm( - Set ipatterns, - Resolved resolved, - User user, - String[] requestedActions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - Set resolvedRequestedIndices = resolved.getAllIndices(); - IndexMatcherAndPermissions[] indexMatcherAndPermissions; - if (resolved.isLocalAll()) { - indexMatcherAndPermissions = ipatterns.stream() - .filter(indexPattern -> "*".equals(indexPattern.getUnresolvedIndexPattern(user))) - .map(p -> new IndexMatcherAndPermissions(p.attemptResolveIndexNames(user, resolver, cs), p.perms)) - .toArray(IndexMatcherAndPermissions[]::new); - } else { - indexMatcherAndPermissions = ipatterns.stream() - .map(p -> new IndexMatcherAndPermissions(p.attemptResolveIndexNames(user, resolver, cs), p.perms)) - .toArray(IndexMatcherAndPermissions[]::new); - } - return resolvedRequestedIndices.stream() - .allMatch( - index -> Arrays.stream(requestedActions) - .allMatch(action -> Arrays.stream(indexMatcherAndPermissions).anyMatch(ipap -> ipap.matches(index, action))) - ); - } - private class TenantHolder { private SetMultimap> tenantsMM = null; diff --git a/src/main/java/org/opensearch/security/securityconf/DynamicConfigFactory.java b/src/main/java/org/opensearch/security/securityconf/DynamicConfigFactory.java index f7105e2386..fdc66fdb31 100644 --- a/src/main/java/org/opensearch/security/securityconf/DynamicConfigFactory.java +++ b/src/main/java/org/opensearch/security/securityconf/DynamicConfigFactory.java @@ -105,7 +105,7 @@ private void loadStaticConfig() throws IOException { staticTenants = SecurityDynamicConfiguration.fromNode(staticTenantsJsonNode, CType.TENANTS, 2, 0, 0); } - public final static SecurityDynamicConfiguration addStatics(SecurityDynamicConfiguration original) { + public final static SecurityDynamicConfiguration addStatics(SecurityDynamicConfiguration original) { if (original.getCType() == CType.ACTIONGROUPS && !staticActionGroups.getCEntries().isEmpty()) { original.add(staticActionGroups.deepClone()); } diff --git a/src/main/java/org/opensearch/security/securityconf/DynamicConfigModelV6.java b/src/main/java/org/opensearch/security/securityconf/DynamicConfigModelV6.java deleted file mode 100644 index c7edaf938c..0000000000 --- a/src/main/java/org/opensearch/security/securityconf/DynamicConfigModelV6.java +++ /dev/null @@ -1,485 +0,0 @@ -/* - * Copyright 2015-2017 floragunn GmbH - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.securityconf; - -import java.net.InetAddress; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map.Entry; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; - -import com.google.common.base.Strings; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.Multimap; -import com.google.common.collect.Multimaps; - -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.security.auth.AuthDomain; -import org.opensearch.security.auth.AuthFailureListener; -import org.opensearch.security.auth.AuthenticationBackend; -import org.opensearch.security.auth.AuthorizationBackend; -import org.opensearch.security.auth.Destroyable; -import org.opensearch.security.auth.HTTPAuthenticator; -import org.opensearch.security.auth.blocking.ClientBlockRegistry; -import org.opensearch.security.auth.internal.InternalAuthenticationBackend; -import org.opensearch.security.securityconf.impl.DashboardSignInOption; -import org.opensearch.security.securityconf.impl.v6.ConfigV6; -import org.opensearch.security.securityconf.impl.v6.ConfigV6.Authc; -import org.opensearch.security.securityconf.impl.v6.ConfigV6.AuthcDomain; -import org.opensearch.security.securityconf.impl.v6.ConfigV6.Authz; -import org.opensearch.security.securityconf.impl.v6.ConfigV6.AuthzDomain; -import org.opensearch.security.support.ReflectionHelper; - -public class DynamicConfigModelV6 extends DynamicConfigModel { - - private final ConfigV6 config; - private final Settings opensearchSettings; - private final Path configPath; - private SortedSet restAuthDomains; - private Set restAuthorizers; - private List destroyableComponents; - private final InternalAuthenticationBackend iab; - - private List ipAuthFailureListeners; - private Multimap authBackendFailureListeners; - private List> ipClientBlockRegistries; - private Multimap> authBackendClientBlockRegistries; - - public DynamicConfigModelV6(ConfigV6 config, Settings opensearchSettings, Path configPath, InternalAuthenticationBackend iab) { - super(); - this.config = config; - this.opensearchSettings = opensearchSettings; - this.configPath = configPath; - this.iab = iab; - buildAAA(); - } - - @Override - public SortedSet getRestAuthDomains() { - return Collections.unmodifiableSortedSet(restAuthDomains); - } - - @Override - public Set getRestAuthorizers() { - return Collections.unmodifiableSet(restAuthorizers); - } - - @Override - public boolean isAnonymousAuthenticationEnabled() { - return config.dynamic.http.anonymous_auth_enabled; - } - - @Override - public boolean isXffEnabled() { - return config.dynamic.http.xff.enabled; - } - - @Override - public String getInternalProxies() { - return config.dynamic.http.xff.internalProxies; - } - - @Override - public String getRemoteIpHeader() { - return config.dynamic.http.xff.remoteIpHeader; - } - - @Override - public boolean isRestAuthDisabled() { - return config.dynamic.disable_rest_auth; - } - - @Override - public boolean isInterTransportAuthDisabled() { - return config.dynamic.disable_intertransport_auth; - } - - @Override - public boolean isRespectRequestIndicesEnabled() { - return config.dynamic.respect_request_indices_options; - } - - @Override - public String getDashboardsServerUsername() { - return config.dynamic.kibana.server_username; - } - - @Override - public String getDashboardsOpenSearchRole() { - return config.dynamic.kibana.opendistro_role; - } - - @Override - public String getDashboardsIndexname() { - return config.dynamic.kibana.index; - } - - @Override - public boolean isDashboardsMultitenancyEnabled() { - return config.dynamic.kibana.multitenancy_enabled; - } - - @Override - public boolean isDashboardsPrivateTenantEnabled() { - return config.dynamic.kibana.private_tenant_enabled; - } - - @Override - public String getDashboardsDefaultTenant() { - return config.dynamic.kibana.default_tenant; - } - - @Override - public boolean isDnfofEnabled() { - return config.dynamic.do_not_fail_on_forbidden || config.dynamic.kibana.do_not_fail_on_forbidden; - } - - @Override - public boolean isMultiRolespanEnabled() { - return config.dynamic.multi_rolespan_enabled; - } - - @Override - public String getFilteredAliasMode() { - return config.dynamic.filtered_alias_mode; - } - - @Override - public boolean isDnfofForEmptyResultsEnabled() { - return config.dynamic.do_not_fail_on_forbidden_empty; - } - - @Override - public String getHostsResolverMode() { - return config.dynamic.hosts_resolver_mode; - } - - @Override - public List getIpAuthFailureListeners() { - return Collections.unmodifiableList(ipAuthFailureListeners); - } - - @Override - public Multimap getAuthBackendFailureListeners() { - return Multimaps.unmodifiableMultimap(authBackendFailureListeners); - } - - @Override - public List> getIpClientBlockRegistries() { - return Collections.unmodifiableList(ipClientBlockRegistries); - } - - @Override - public Multimap> getAuthBackendClientBlockRegistries() { - return Multimaps.unmodifiableMultimap(authBackendClientBlockRegistries); - } - - @Override - public List getSignInOptions() { - return config.dynamic.kibana.sign_in_options; - } - - @Override - public Settings getDynamicOnBehalfOfSettings() { - return Settings.EMPTY; - } - - private void buildAAA() { - - final SortedSet restAuthDomains0 = new TreeSet<>(); - final Set restAuthorizers0 = new HashSet<>(); - final List destroyableComponents0 = new LinkedList<>(); - final List ipAuthFailureListeners0 = new ArrayList<>(); - final Multimap authBackendFailureListeners0 = ArrayListMultimap.create(); - final List> ipClientBlockRegistries0 = new ArrayList<>(); - final Multimap> authBackendClientBlockRegistries0 = ArrayListMultimap.create(); - - final Authz authzDyn = config.dynamic.authz; - - for (final Entry ad : authzDyn.getDomains().entrySet()) { - final boolean enabled = ad.getValue().enabled; - final boolean httpEnabled = enabled && ad.getValue().http_enabled; - - if (httpEnabled) { - try { - - final String authzBackendClazz = ad.getValue().authorization_backend.type; - final AuthorizationBackend authorizationBackend; - - if (authzBackendClazz.equals(InternalAuthenticationBackend.class.getName()) // NOSONAR - || authzBackendClazz.equals("internal") - || authzBackendClazz.equals("intern")) { - authorizationBackend = iab; - ReflectionHelper.addLoadedModule(InternalAuthenticationBackend.class); - } else { - authorizationBackend = newInstance( - authzBackendClazz, - "z", - Settings.builder() - .put(opensearchSettings) - // .putProperties(ads.getAsStringMap(DotPath.of("authorization_backend.config")), - // DynamicConfiguration.checkKeyFunction()).build(), configPath); - .put( - Settings.builder() - .loadFromSource(ad.getValue().authorization_backend.configAsJson(), XContentType.JSON) - .build() - ) - .build(), - configPath - ); - } - - if (httpEnabled) { - restAuthorizers0.add(authorizationBackend); - } - - if (authorizationBackend instanceof Destroyable) { - destroyableComponents0.add((Destroyable) authorizationBackend); - } - } catch (final Exception e) { - log.error("Unable to initialize AuthorizationBackend {} due to {}", ad, e.toString(), e); - } - } - } - - final Authc authcDyn = config.dynamic.authc; - - for (final Entry ad : authcDyn.getDomains().entrySet()) { - final boolean enabled = ad.getValue().enabled; - final boolean httpEnabled = enabled && ad.getValue().http_enabled; - - if (httpEnabled) { - try { - AuthenticationBackend authenticationBackend; - final String authBackendClazz = ad.getValue().authentication_backend.type; - if (authBackendClazz.equals(InternalAuthenticationBackend.class.getName()) // NOSONAR - || authBackendClazz.equals("internal") - || authBackendClazz.equals("intern")) { - authenticationBackend = iab; - ReflectionHelper.addLoadedModule(InternalAuthenticationBackend.class); - } else { - authenticationBackend = newInstance( - authBackendClazz, - "c", - Settings.builder() - .put(opensearchSettings) - // .putProperties(ads.getAsStringMap(DotPath.of("authentication_backend.config")), - // DynamicConfiguration.checkKeyFunction()).build() - .put( - Settings.builder() - .loadFromSource(ad.getValue().authentication_backend.configAsJson(), XContentType.JSON) - .build() - ) - .build(), - configPath - ); - } - - String httpAuthenticatorType = ad.getValue().http_authenticator.type; // no default - HTTPAuthenticator httpAuthenticator = httpAuthenticatorType == null - ? null - : (HTTPAuthenticator) newInstance( - httpAuthenticatorType, - "h", - Settings.builder() - .put(opensearchSettings) - // .putProperties(ads.getAsStringMap(DotPath.of("http_authenticator.config")), - // DynamicConfiguration.checkKeyFunction()).build(), - .put( - Settings.builder() - .loadFromSource(ad.getValue().http_authenticator.configAsJson(), XContentType.JSON) - .build() - ) - .build() - - , - configPath - ); - - final AuthDomain _ad = new AuthDomain( - authenticationBackend, - httpAuthenticator, - ad.getValue().http_authenticator.challenge, - ad.getValue().order - ); - - if (httpEnabled && _ad.getHttpAuthenticator() != null) { - restAuthDomains0.add(_ad); - } - - if (httpAuthenticator instanceof Destroyable) { - destroyableComponents0.add((Destroyable) httpAuthenticator); - } - - if (authenticationBackend instanceof Destroyable) { - destroyableComponents0.add((Destroyable) authenticationBackend); - } - - } catch (final Exception e) { - log.error("Unable to initialize auth domain {} due to {}", ad, e.toString(), e); - } - - } - } - - List originalDestroyableComponents = destroyableComponents; - - restAuthDomains = Collections.unmodifiableSortedSet(restAuthDomains0); - restAuthorizers = Collections.unmodifiableSet(restAuthorizers0); - - destroyableComponents = Collections.unmodifiableList(destroyableComponents0); - - if (originalDestroyableComponents != null) { - destroyDestroyables(originalDestroyableComponents); - } - - originalDestroyableComponents = null; - - createAuthFailureListeners( - ipAuthFailureListeners0, - authBackendFailureListeners0, - ipClientBlockRegistries0, - authBackendClientBlockRegistries0, - destroyableComponents0 - ); - - ipAuthFailureListeners = Collections.unmodifiableList(ipAuthFailureListeners0); - ipClientBlockRegistries = Collections.unmodifiableList(ipClientBlockRegistries0); - authBackendClientBlockRegistries = Multimaps.unmodifiableMultimap(authBackendClientBlockRegistries0); - authBackendFailureListeners = Multimaps.unmodifiableMultimap(authBackendFailureListeners0); - - } - - private void destroyDestroyables(List destroyableComponents) { - for (Destroyable destroyable : destroyableComponents) { - try { - destroyable.destroy(); - } catch (Exception e) { - log.error("Error while destroying " + destroyable, e); - } - } - } - - private T newInstance(final String clazzOrShortcut, String type, final Settings settings, final Path configPath) { - - String clazz = clazzOrShortcut; - - if (authImplMap.containsKey(clazz + "_" + type)) { - clazz = authImplMap.get(clazz + "_" + type); - } - - return ReflectionHelper.instantiateAAA(clazz, settings, configPath); - } - - private String translateShortcutToClassName(final String clazzOrShortcut, final String type) { - - if (authImplMap.containsKey(clazzOrShortcut + "_" + type)) { - return authImplMap.get(clazzOrShortcut + "_" + type); - } else { - return clazzOrShortcut; - } - } - - private void createAuthFailureListeners( - List ipAuthFailureListeners, - Multimap authBackendFailureListeners, - List> ipClientBlockRegistries, - Multimap> authBackendUserClientBlockRegistries, - List destroyableComponents0 - ) { - - for (Entry entry : config.dynamic.auth_failure_listeners.getListeners().entrySet()) { - - Settings entrySettings = Settings.builder() - .put(opensearchSettings) - .put(Settings.builder().loadFromSource(entry.getValue().asJson(), XContentType.JSON).build()) - .build(); - - String type = entry.getValue().type; - String authenticationBackend = entry.getValue().authentication_backend; - - AuthFailureListener authFailureListener = newInstance(type, "authFailureListener", entrySettings, configPath); - - if (Strings.isNullOrEmpty(authenticationBackend)) { - ipAuthFailureListeners.add(authFailureListener); - - if (authFailureListener instanceof ClientBlockRegistry) { - if (InetAddress.class.isAssignableFrom(((ClientBlockRegistry) authFailureListener).getClientIdType())) { - @SuppressWarnings("unchecked") - ClientBlockRegistry clientBlockRegistry = (ClientBlockRegistry) authFailureListener; - - ipClientBlockRegistries.add(clientBlockRegistry); - } else { - log.error( - "Illegal ClientIdType for AuthFailureListener" - + entry.getKey() - + ": " - + ((ClientBlockRegistry) authFailureListener).getClientIdType() - + "; must be InetAddress." - ); - } - } - - } else { - - authenticationBackend = translateShortcutToClassName(authenticationBackend, "c"); - - authBackendFailureListeners.put(authenticationBackend, authFailureListener); - - if (authFailureListener instanceof ClientBlockRegistry) { - if (String.class.isAssignableFrom(((ClientBlockRegistry) authFailureListener).getClientIdType())) { - @SuppressWarnings("unchecked") - ClientBlockRegistry clientBlockRegistry = (ClientBlockRegistry) authFailureListener; - - authBackendUserClientBlockRegistries.put(authenticationBackend, clientBlockRegistry); - } else { - log.error( - "Illegal ClientIdType for AuthFailureListener" - + entry.getKey() - + ": " - + ((ClientBlockRegistry) authFailureListener).getClientIdType() - + "; must be InetAddress." - ); - } - } - } - - if (authFailureListener instanceof Destroyable) { - destroyableComponents0.add((Destroyable) authFailureListener); - } - } - - } -} diff --git a/src/main/java/org/opensearch/security/securityconf/EvaluatedDlsFlsConfig.java b/src/main/java/org/opensearch/security/securityconf/EvaluatedDlsFlsConfig.java deleted file mode 100644 index aa22e8729f..0000000000 --- a/src/main/java/org/opensearch/security/securityconf/EvaluatedDlsFlsConfig.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.securityconf; - -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.support.WildcardMatcher; - -public class EvaluatedDlsFlsConfig { - public static EvaluatedDlsFlsConfig EMPTY = new EvaluatedDlsFlsConfig( - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap() - ); - - private final Map> dlsQueriesByIndex; - private final Map> flsByIndex; - private final Map> fieldMaskingByIndex; - - public EvaluatedDlsFlsConfig( - Map> dlsQueriesByIndex, - Map> flsByIndex, - Map> fieldMaskingByIndex - ) { - this.dlsQueriesByIndex = Collections.unmodifiableMap(dlsQueriesByIndex); - this.flsByIndex = Collections.unmodifiableMap(flsByIndex); - this.fieldMaskingByIndex = Collections.unmodifiableMap(fieldMaskingByIndex); - } - - public Map> getDlsQueriesByIndex() { - return dlsQueriesByIndex; - } - - public Map> getFlsByIndex() { - return flsByIndex; - } - - public Map> getFieldMaskingByIndex() { - return fieldMaskingByIndex; - } - - public Set getAllQueries() { - int mapSize = dlsQueriesByIndex.size(); - - if (mapSize == 0) { - return Collections.emptySet(); - } else if (mapSize == 1) { - return dlsQueriesByIndex.values().iterator().next(); - } else { - Set result = new HashSet<>(); - - for (Set queries : dlsQueriesByIndex.values()) { - result.addAll(queries); - } - - return result; - } - } - - public boolean hasFls() { - return !flsByIndex.isEmpty(); - } - - public boolean hasFieldMasking() { - return !fieldMaskingByIndex.isEmpty(); - } - - public boolean hasDls() { - return !dlsQueriesByIndex.isEmpty(); - } - - public boolean isEmpty() { - return fieldMaskingByIndex.isEmpty() && flsByIndex.isEmpty() && dlsQueriesByIndex.isEmpty(); - } - - public EvaluatedDlsFlsConfig filter(Resolved indices) { - if (indices.isAllIndicesEmpty()) { - return EMPTY; - } else if (this.isEmpty() || indices.isLocalAll()) { - return this; - } else { - Set allIndices = indices.getAllIndices(); - - return new EvaluatedDlsFlsConfig( - filter(dlsQueriesByIndex, allIndices), - filter(flsByIndex, allIndices), - filter(fieldMaskingByIndex, allIndices) - ); - } - } - - public EvaluatedDlsFlsConfig withoutDls() { - if (!hasDls()) { - return this; - } else { - return new EvaluatedDlsFlsConfig(Collections.emptyMap(), flsByIndex, fieldMaskingByIndex); - } - } - - private Map> filter(Map> map, Set allIndices) { - if (allIndices.isEmpty() || map.isEmpty()) { - return map; - } - - HashMap> result = new HashMap<>(map.size()); - - for (Map.Entry> entry : map.entrySet()) { - if (WildcardMatcher.from(entry.getKey(), false).matchAny(allIndices)) { - result.put(entry.getKey(), entry.getValue()); - } - } - - return result; - } - - @Override - public String toString() { - return "EvaluatedDlsFlsConfig [dlsQueriesByIndex=" - + dlsQueriesByIndex - + ", flsByIndex=" - + flsByIndex - + ", fieldMaskingByIndex=" - + fieldMaskingByIndex - + "]"; - } - -} diff --git a/src/main/java/org/opensearch/security/securityconf/SecurityRoles.java b/src/main/java/org/opensearch/security/securityconf/SecurityRoles.java deleted file mode 100644 index fb25e1a21f..0000000000 --- a/src/main/java/org/opensearch/security/securityconf/SecurityRoles.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2015-2017 floragunn GmbH - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.securityconf; - -import java.util.Set; - -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.user.User; - -public interface SecurityRoles { - - boolean impliesClusterPermissionPermission(String action0); - - boolean hasExplicitClusterPermissionPermission(String action); - - /** - * Determines if the actions are explicitly granted for indices - * @return if all indices in the request have an explicit grant for all actions - */ - boolean hasExplicitIndexPermission( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ); - - Set getRoleNames(); - - Set reduce( - Resolved requestedResolved, - User user, - String[] strings, - IndexNameExpressionResolver resolver, - ClusterService clusterService - ); - - boolean impliesTypePermGlobal( - Resolved requestedResolved, - User user, - String[] allIndexPermsRequiredA, - IndexNameExpressionResolver resolver, - ClusterService clusterService - ); - - boolean get( - Resolved requestedResolved, - User user, - String[] allIndexPermsRequiredA, - IndexNameExpressionResolver resolver, - ClusterService clusterService - ); - - EvaluatedDlsFlsConfig getDlsFls( - User user, - boolean dfmEmptyOverwritesAll, - IndexNameExpressionResolver resolver, - ClusterService clusterService, - NamedXContentRegistry namedXContentRegistry - ); - - Set getAllPermittedIndicesForDashboards( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ); - - SecurityRoles filter(Set roles); - - boolean isPermittedOnSystemIndex(String indexName); -} diff --git a/src/main/java/org/opensearch/security/securityconf/impl/SecurityDynamicConfiguration.java b/src/main/java/org/opensearch/security/securityconf/impl/SecurityDynamicConfiguration.java index d99c9ad2d0..1237c917bf 100644 --- a/src/main/java/org/opensearch/security/securityconf/impl/SecurityDynamicConfiguration.java +++ b/src/main/java/org/opensearch/security/securityconf/impl/SecurityDynamicConfiguration.java @@ -243,6 +243,20 @@ public static SecurityDynamicConfiguration fromNode(JsonNode json, CType< ); } + /** + * For testing only + */ + public static SecurityDynamicConfiguration fromYaml(String yaml, CType ctype) throws JsonProcessingException { + Class implementationClass = ctype.getConfigClass(); + SecurityDynamicConfiguration result = DefaultObjectMapper.YAML_MAPPER.readValue( + yaml, + DefaultObjectMapper.getTypeFactory().constructParametricType(SecurityDynamicConfiguration.class, implementationClass) + ); + result.ctype = ctype; + result.version = 2; + return result; + } + // for Jackson private SecurityDynamicConfiguration() { super(); @@ -416,6 +430,18 @@ public Class getImplementingClass() { } @SuppressWarnings("unchecked") + @JsonIgnore + public SecurityDynamicConfiguration clone() { + SecurityDynamicConfiguration result = new SecurityDynamicConfiguration(); + result.version = this.version; + result.ctype = this.ctype; + result.primaryTerm = this.primaryTerm; + result.seqNo = this.seqNo; + result._meta = this._meta; + result.centries.putAll(this.centries); + return result; + } + @JsonIgnore public SecurityDynamicConfiguration deepClone() { try { @@ -431,6 +457,7 @@ public SecurityDynamicConfiguration deepClone() { return result; } else { // We are on a pre-v7 config version. This can be only if we skipped auto conversion. So, we do here the same. + @SuppressWarnings("unchecked") SecurityDynamicConfiguration result = (SecurityDynamicConfiguration) fromJsonWithoutAutoConversion( DefaultObjectMapper.writeValueAsString(this, false), ctypeUnsafe, diff --git a/src/main/java/org/opensearch/security/support/MapUtils.java b/src/main/java/org/opensearch/security/support/MapUtils.java deleted file mode 100644 index f530917824..0000000000 --- a/src/main/java/org/opensearch/security/support/MapUtils.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2015-2018 _floragunn_ GmbH - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.support; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public class MapUtils { - - public static void deepTraverseMap(final Map map, final Callback cb) { - deepTraverseMap(map, cb, null); - } - - private static void deepTraverseMap(final Map map, final Callback cb, final List stack) { - final List localStack; - if (stack == null) { - localStack = new ArrayList(30); - } else { - localStack = stack; - } - for (Map.Entry entry : map.entrySet()) { - if (entry.getValue() != null && entry.getValue() instanceof Map) { - @SuppressWarnings("unchecked") - final Map inner = (Map) entry.getValue(); - localStack.add(entry.getKey()); - deepTraverseMap(inner, cb, localStack); - if (!localStack.isEmpty()) { - localStack.remove(localStack.size() - 1); - } - } else { - cb.call(entry.getKey(), map, Collections.unmodifiableList(localStack)); - } - } - } - - public static interface Callback { - public void call(String key, Map map, List stack); - } -} diff --git a/src/main/java/org/opensearch/security/support/SecurityUtils.java b/src/main/java/org/opensearch/security/support/SecurityUtils.java index 4c278e3b37..5686f0076e 100644 --- a/src/main/java/org/opensearch/security/support/SecurityUtils.java +++ b/src/main/java/org/opensearch/security/support/SecurityUtils.java @@ -29,8 +29,6 @@ import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Locale; -import java.util.Map; -import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -68,25 +66,6 @@ private static Locale forEN() { return Locale.getDefault(); } - public static String evalMap(final Map> map, final String index) { - - if (map == null) { - return null; - } - - // TODO: check what to do with _all - /*if (map.get(index) != null) { - return index; - } else if (map.get("*") != null) { - return "*"; - } - if (map.get("_all") != null) { - return "_all"; - }*/ - - return map.keySet().stream().filter(key -> WildcardMatcher.from(key).test(index)).findAny().orElse(null); - } - public static String replaceEnvVars(String in, Settings settings) { if (in == null || in.isEmpty()) { return in; diff --git a/src/main/java/org/opensearch/security/support/WildcardMatcher.java b/src/main/java/org/opensearch/security/support/WildcardMatcher.java index d811a73730..537e2d473c 100644 --- a/src/main/java/org/opensearch/security/support/WildcardMatcher.java +++ b/src/main/java/org/opensearch/security/support/WildcardMatcher.java @@ -28,6 +28,7 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.Optional; @@ -282,6 +283,55 @@ public Optional findFirst(final String candidate) { return Optional.ofNullable(test(candidate) ? this : null); } + public Iterable iterateMatching(Iterable candidates) { + return iterateMatching(candidates, Function.identity()); + } + + public Iterable iterateMatching(Iterable candidates, Function toStringFunction) { + return new Iterable() { + + @Override + public Iterator iterator() { + Iterator delegate = candidates.iterator(); + + return new Iterator() { + private E next; + + @Override + public boolean hasNext() { + if (next == null) { + init(); + } + + return next != null; + } + + @Override + public E next() { + if (next == null) { + init(); + } + + E result = next; + next = null; + return result; + } + + private void init() { + while (delegate.hasNext()) { + E candidate = delegate.next(); + + if (test(toStringFunction.apply(candidate))) { + next = candidate; + break; + } + } + } + }; + } + }; + } + public static List matchers(Collection patterns) { return patterns.stream().map(p -> WildcardMatcher.from(p, true)).collect(Collectors.toList()); } @@ -294,6 +344,10 @@ public static List getAllMatchingPatterns(final Collection p.matchAny(candidates)).map(Objects::toString).collect(Collectors.toList()); } + public static boolean isExact(String pattern) { + return pattern == null || !(pattern.contains("*") || pattern.contains("?") || (pattern.startsWith("/") && pattern.endsWith("/"))); + } + // // --- Implementation specializations --- // diff --git a/src/main/java/org/opensearch/security/transport/SecurityInterceptor.java b/src/main/java/org/opensearch/security/transport/SecurityInterceptor.java index 9741014fda..7be544c9cd 100644 --- a/src/main/java/org/opensearch/security/transport/SecurityInterceptor.java +++ b/src/main/java/org/opensearch/security/transport/SecurityInterceptor.java @@ -57,6 +57,7 @@ import org.opensearch.security.auditlog.AuditLog.Origin; import org.opensearch.security.auth.BackendRegistry; import org.opensearch.security.configuration.ClusterInfoHolder; +import org.opensearch.security.privileges.dlsfls.DlsFlsLegacyHeaders; import org.opensearch.security.ssl.SslExceptionHandler; import org.opensearch.security.ssl.transport.PrincipalExtractor; import org.opensearch.security.ssl.transport.SSLConfig; @@ -150,6 +151,7 @@ public void sendRequestDecorate( final String origCCSTransientDls = getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_CCS); final String origCCSTransientFls = getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_CCS); final String origCCSTransientMf = getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_CCS); + final DlsFlsLegacyHeaders dlsFlsLegacyHeaders = getThreadContext().getTransient(DlsFlsLegacyHeaders.TRANSIENT_HEADER); final boolean isDebugEnabled = log.isDebugEnabled(); @@ -183,6 +185,10 @@ public void sendRequestDecorate( ) ); + if (dlsFlsLegacyHeaders != null) { + dlsFlsLegacyHeaders.performHeaderDecoration(connection, request, headerMap); + } + if (OpenSearchSecurityPlugin.GuiceHolder.getRemoteClusterService().isCrossClusterSearchEnabled() && clusterInfoHolder.isInitialized() && (action.equals(ClusterSearchShardsAction.NAME) || action.equals(SearchAction.NAME)) diff --git a/src/test/java/org/opensearch/security/IntegrationTests.java b/src/test/java/org/opensearch/security/IntegrationTests.java index 03a5dd3092..6fc953ef59 100644 --- a/src/test/java/org/opensearch/security/IntegrationTests.java +++ b/src/test/java/org/opensearch/security/IntegrationTests.java @@ -400,7 +400,7 @@ public void testRegexExcludes() throws Exception { @Test public void testMultiRoleSpan() throws Exception { - setup(); + setup(Settings.EMPTY, new DynamicSecurityConfig().setConfig("config_multirolespan.yml"), Settings.EMPTY); final RestHelper rh = nonSslRestHelper(); try (Client tc = getClient()) { @@ -411,24 +411,8 @@ public void testMultiRoleSpan() throws Exception { } HttpResponse res = rh.executeGetRequest("/mindex_1,mindex_2/_search", encodeBasicHeader("mindex12", "nagilum")); - assertThat(res.getStatusCode(), is(HttpStatus.SC_FORBIDDEN)); - Assert.assertFalse(res.getBody().contains("\"content\":1")); - Assert.assertFalse(res.getBody().contains("\"content\":2")); - - try (Client tc = getClient()) { - tc.index( - new IndexRequest(".opendistro_security").id("config") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .source("config", FileHelper.readYamlContent("config_multirolespan.yml")) - ).actionGet(); - - ConfigUpdateResponse cur = tc.execute(ConfigUpdateAction.INSTANCE, new ConfigUpdateRequest(new String[] { "config" })) - .actionGet(); - assertThat(cur.getNodes().size(), is(clusterInfo.numNodes)); - } - - res = rh.executeGetRequest("/mindex_1,mindex_2/_search", encodeBasicHeader("mindex12", "nagilum")); assertThat(res.getStatusCode(), is(HttpStatus.SC_OK)); + Assert.assertEquals(HttpStatus.SC_OK, res.getStatusCode()); Assert.assertTrue(res.getBody().contains("\"content\":1")); Assert.assertTrue(res.getBody().contains("\"content\":2")); diff --git a/src/test/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java b/src/test/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java deleted file mode 100644 index 706f5bcecd..0000000000 --- a/src/test/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.privileges; - -import org.apache.http.Header; -import org.apache.http.HttpStatus; -import org.junit.Before; -import org.junit.Test; - -import org.opensearch.common.settings.Settings; -import org.opensearch.security.test.DynamicSecurityConfig; -import org.opensearch.security.test.SingleClusterTest; -import org.opensearch.security.test.helper.rest.RestHelper; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; - -public class PrivilegesEvaluatorTest extends SingleClusterTest { - private static final Header NegativeLookaheadUserHeader = encodeBasicHeader("negative_lookahead_user", "negative_lookahead_user"); - private static final Header NegatedRegexUserHeader = encodeBasicHeader("negated_regex_user", "negated_regex_user"); - - @Before - public void setupSettingsIndexPattern() throws Exception { - Settings settings = Settings.builder().build(); - setup( - Settings.EMPTY, - new DynamicSecurityConfig().setSecurityRoles("roles_index_patterns.yml") - .setSecurityInternalUsers("internal_users_index_patterns.yml") - .setSecurityRolesMapping("roles_mapping_index_patterns.yml"), - settings, - true - ); - } - - @Test - public void testNegativeLookaheadPattern() throws Exception { - - RestHelper rh = nonSslRestHelper(); - RestHelper.HttpResponse response = rh.executeGetRequest("*/_search", NegativeLookaheadUserHeader); - assertThat(response.getStatusCode(), is(HttpStatus.SC_FORBIDDEN)); - response = rh.executeGetRequest("r*/_search", NegativeLookaheadUserHeader); - assertThat(response.getStatusCode(), is(HttpStatus.SC_OK)); - } - - @Test - public void testRegexPattern() throws Exception { - RestHelper rh = nonSslRestHelper(); - RestHelper.HttpResponse response = rh.executeGetRequest("*/_search", NegatedRegexUserHeader); - assertThat(response.getStatusCode(), is(HttpStatus.SC_FORBIDDEN)); - response = rh.executeGetRequest("r*/_search", NegatedRegexUserHeader); - assertThat(response.getStatusCode(), is(HttpStatus.SC_OK)); - } -} diff --git a/src/test/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluatorTest.java b/src/test/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluatorTest.java index c374a10c24..da35226d62 100644 --- a/src/test/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluatorTest.java +++ b/src/test/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluatorTest.java @@ -11,28 +11,33 @@ package org.opensearch.security.privileges; -import java.util.Collections; import java.util.Set; +import java.util.TreeMap; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.config.Configurator; -import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.opensearch.OpenSearchSecurityException; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.auditlog.NullAuditLog; import org.opensearch.security.securityconf.ConfigModel; -import org.opensearch.security.securityconf.SecurityRoles; +import org.opensearch.security.securityconf.DynamicConfigModel; +import org.opensearch.security.securityconf.impl.CType; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.user.User; -import org.opensearch.threadpool.ThreadPool; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; @@ -41,12 +46,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; import static org.mockito.Mockito.withSettings; @@ -56,11 +56,9 @@ public class RestLayerPrivilegesEvaluatorTest { @Mock(strictness = Mock.Strictness.LENIENT) private ClusterService clusterService; @Mock - private ThreadPool threadPool; - @Mock private ConfigModel configModel; - - private RestLayerPrivilegesEvaluator privilegesEvaluator; + @Mock + private DynamicConfigModel dynamicConfigModel; private static final User TEST_USER = new User("test_user"); @@ -71,16 +69,14 @@ private void setLoggingLevel(final Level level) { @Before public void setUp() { - when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); - when(clusterService.localNode()).thenReturn(mock(DiscoveryNode.class, withSettings().strictness(Strictness.LENIENT))); - privilegesEvaluator = new RestLayerPrivilegesEvaluator( - clusterService, - threadPool - ); - privilegesEvaluator.onConfigModelChanged(configModel); // Defaults to the mocked config model - verify(threadPool).getThreadContext(); // Called during construction of RestLayerPrivilegesEvaluator + when(configModel.mapSecurityRoles(TEST_USER, null)).thenReturn(Set.of("test_role")); setLoggingLevel(Level.DEBUG); // Enable debug logging scenarios for verification + ClusterState clusterState = mock(ClusterState.class); + when(clusterService.state()).thenReturn(clusterState); + Metadata metadata = mock(Metadata.class); + when(clusterState.metadata()).thenReturn(metadata); + when(metadata.getIndicesLookup()).thenReturn(new TreeMap<>()); } @After @@ -89,96 +85,89 @@ public void after() { } @Test - public void testEvaluate_Initialized_Success() { + public void testEvaluate_Initialized_Success() throws Exception { String action = "action"; - SecurityRoles securityRoles = mock(SecurityRoles.class); - when(configModel.getSecurityRoles()).thenReturn(securityRoles); - when(configModel.getSecurityRoles().filter(Collections.emptySet())).thenReturn(securityRoles); - when(securityRoles.impliesClusterPermissionPermission(action)).thenReturn(false); + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - any", CType.ROLES); + + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(roles); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); - PrivilegesEvaluatorResponse response = privilegesEvaluator.evaluate(TEST_USER, Set.of(action)); + PrivilegesEvaluatorResponse response = restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", Set.of(action)); assertThat(response.isAllowed(), equalTo(false)); assertThat(response.getMissingPrivileges(), equalTo(Set.of(action))); - assertThat(response.getResolvedSecurityRoles(), Matchers.empty()); - verify(configModel, times(3)).getSecurityRoles(); } @Test public void testEvaluate_NotInitialized_NullModel_ExceptionThrown() { - // Null out the config model - privilegesEvaluator.onConfigModelChanged(null); - final OpenSearchSecurityException exception = assertThrows( - OpenSearchSecurityException.class, - () -> privilegesEvaluator.evaluate(TEST_USER, null) - ); - assertThat(exception.getMessage(), equalTo("OpenSearch Security is not initialized.")); - verify(configModel, never()).getSecurityRoles(); - } - - @Test - public void testEvaluate_NotInitialized_NoSecurityRoles_ExceptionThrown() { + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(null); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); final OpenSearchSecurityException exception = assertThrows( OpenSearchSecurityException.class, - () -> privilegesEvaluator.evaluate(TEST_USER, null) + () -> restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", null) ); assertThat(exception.getMessage(), equalTo("OpenSearch Security is not initialized.")); - verify(configModel).getSecurityRoles(); } @Test - public void testMapRoles_ReturnsMappedRoles() { - final User user = mock(User.class); - final Set mappedRoles = Collections.singleton("role1"); - when(configModel.mapSecurityRoles(any(), any())).thenReturn(mappedRoles); - - final Set result = privilegesEvaluator.mapRoles(user, null); - - assertThat(result, equalTo(mappedRoles)); - verifyNoInteractions(user); - verify(configModel).mapSecurityRoles(user, null); - } - - @Test - public void testEvaluate_Successful_NewPermission() { + public void testEvaluate_Successful_NewPermission() throws Exception { String action = "hw:greet"; - SecurityRoles securityRoles = mock(SecurityRoles.class); - when(configModel.getSecurityRoles()).thenReturn(securityRoles); - when(configModel.getSecurityRoles().filter(Collections.emptySet())).thenReturn(securityRoles); - when(securityRoles.impliesClusterPermissionPermission(action)).thenReturn(true); - - PrivilegesEvaluatorResponse response = privilegesEvaluator.evaluate(TEST_USER, Set.of(action)); - + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - hw:greet", CType.ROLES); + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(roles); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); + PrivilegesEvaluatorResponse response = restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", Set.of(action)); assertThat(response.allowed, equalTo(true)); - verify(securityRoles).impliesClusterPermissionPermission(action); } @Test - public void testEvaluate_Successful_LegacyPermission() { + public void testEvaluate_Successful_LegacyPermission() throws Exception { String action = "cluster:admin/opensearch/hw/greet"; - SecurityRoles securityRoles = mock(SecurityRoles.class); - when(configModel.getSecurityRoles()).thenReturn(securityRoles); - when(configModel.getSecurityRoles().filter(Collections.emptySet())).thenReturn(securityRoles); - when(securityRoles.impliesClusterPermissionPermission(action)).thenReturn(true); - - PrivilegesEvaluatorResponse response = privilegesEvaluator.evaluate(TEST_USER, Set.of(action)); - + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:admin/opensearch/hw/greet", CType.ROLES); + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(roles); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); + PrivilegesEvaluatorResponse response = restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", Set.of(action)); assertThat(response.allowed, equalTo(true)); - verify(securityRoles).impliesClusterPermissionPermission(action); - verify(configModel, times(3)).getSecurityRoles(); } @Test - public void testEvaluate_Unsuccessful() { + public void testEvaluate_Unsuccessful() throws Exception { String action = "action"; - SecurityRoles securityRoles = mock(SecurityRoles.class); - when(configModel.getSecurityRoles()).thenReturn(securityRoles); - when(configModel.getSecurityRoles().filter(Collections.emptySet())).thenReturn(securityRoles); - when(securityRoles.impliesClusterPermissionPermission(action)).thenReturn(false); + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - other_action", CType.ROLES); + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(roles); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); + PrivilegesEvaluatorResponse response = restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", Set.of(action)); + assertThat(response.allowed, equalTo(false)); + } - PrivilegesEvaluatorResponse response = privilegesEvaluator.evaluate(TEST_USER, Set.of(action)); + PrivilegesEvaluator createPrivilegesEvaluator(SecurityDynamicConfiguration roles) { + PrivilegesEvaluator privilegesEvaluator = new PrivilegesEvaluator( + clusterService, + () -> clusterService.state(), + null, + new ThreadContext(Settings.EMPTY), + null, + new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)), + new NullAuditLog(), + Settings.EMPTY, + null, + null, + null, + null + ); + privilegesEvaluator.onConfigModelChanged(configModel); // Defaults to the mocked config model + privilegesEvaluator.onDynamicConfigModelChanged(dynamicConfigModel); - assertThat(response.allowed, equalTo(false)); - verify(securityRoles).impliesClusterPermissionPermission(action); + if (roles != null) { + privilegesEvaluator.updateConfiguration(SecurityDynamicConfiguration.empty(CType.ACTIONGROUPS), roles); + } + return privilegesEvaluator; } } diff --git a/src/test/java/org/opensearch/security/privileges/SystemIndexAccessEvaluatorTest.java b/src/test/java/org/opensearch/security/privileges/SystemIndexAccessEvaluatorTest.java index 58e811fa24..878033fd5c 100644 --- a/src/test/java/org/opensearch/security/privileges/SystemIndexAccessEvaluatorTest.java +++ b/src/test/java/org/opensearch/security/privileges/SystemIndexAccessEvaluatorTest.java @@ -11,13 +11,13 @@ package org.opensearch.security.privileges; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; +import java.util.Arrays; import java.util.List; import java.util.Set; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.fasterxml.jackson.core.JsonProcessingException; import org.apache.logging.log4j.Logger; import org.junit.After; import org.junit.Test; @@ -27,6 +27,7 @@ import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.IndicesOptions; +import org.opensearch.cluster.metadata.IndexAbstraction; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -34,10 +35,13 @@ import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.securityconf.ConfigModelV7; -import org.opensearch.security.securityconf.SecurityRoles; +import org.opensearch.security.securityconf.FlattenedActionGroups; +import org.opensearch.security.securityconf.impl.CType; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.user.User; +import org.opensearch.security.util.MockIndexMetadataBuilder; import org.opensearch.tasks.Task; import org.mockito.Mock; @@ -46,9 +50,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.opensearch.security.support.ConfigConstants.SYSTEM_INDEX_PERMISSION; -import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; @@ -81,12 +83,12 @@ public class SystemIndexAccessEvaluatorTest { private static final String TEST_INDEX = ".test"; private static final String SECURITY_INDEX = ConfigConstants.OPENDISTRO_SECURITY_DEFAULT_CONFIG_INDEX; - @Mock - SecurityRoles securityRoles; + ImmutableMap indexMetadata = MockIndexMetadataBuilder.indices(TEST_INDEX, TEST_SYSTEM_INDEX, SECURITY_INDEX) + .build(); User user; - IndexNameExpressionResolver indexNameExpressionResolver; + ActionPrivileges actionPrivileges; private ThreadContext createThreadContext() { return new ThreadContext(Settings.EMPTY); @@ -105,29 +107,29 @@ public void setup( ThreadContext threadContext = createThreadContext(); indexNameExpressionResolver = createIndexNameExpressionResolver(threadContext); - // create a security role - ConfigModelV7.IndexPattern ip = spy(new ConfigModelV7.IndexPattern(index)); - ConfigModelV7.SecurityRole.Builder _securityRole = new ConfigModelV7.SecurityRole.Builder("role_a"); - ip.addPerm(createIndexPatternWithSystemIndexPermission ? Set.of("*", SYSTEM_INDEX_PERMISSION) : Set.of("*")); - _securityRole.addIndexPattern(ip); - _securityRole.addClusterPerms(List.of("*")); - ConfigModelV7.SecurityRole secRole = _securityRole.build(); - try { - // create an instance of Security Role - Constructor constructor = ConfigModelV7.SecurityRoles.class.getDeclaredConstructor(int.class); - constructor.setAccessible(true); - securityRoles = constructor.newInstance(1); - - // add security role to Security Roles - Method addSecurityRoleMethod = ConfigModelV7.SecurityRoles.class.getDeclaredMethod( - "addSecurityRole", - ConfigModelV7.SecurityRole.class + SecurityDynamicConfiguration rolesConfig = SecurityDynamicConfiguration.fromMap( + ImmutableMap.of( + "role_a", + ImmutableMap.of( + "index_permissions", + Arrays.asList( + ImmutableMap.of( + "index_patterns", + Arrays.asList(index), + "allowed_actions", + createIndexPatternWithSystemIndexPermission ? Set.of("*", SYSTEM_INDEX_PERMISSION) : Set.of("*") + ) + ), + "cluster_permissions", + Arrays.asList("*") + ) + ), + CType.ROLES ); - addSecurityRoleMethod.setAccessible(true); - addSecurityRoleMethod.invoke(securityRoles, secRole); - } catch (NoSuchMethodException | InvocationTargetException | InstantiationException | IllegalAccessException e) { + this.actionPrivileges = new ActionPrivileges(rolesConfig, FlattenedActionGroups.EMPTY, () -> indexMetadata, Settings.EMPTY); + } catch (JsonProcessingException e) { throw new RuntimeException(e); } @@ -150,8 +152,19 @@ public void setup( when(log.isDebugEnabled()).thenReturn(true); when(log.isInfoEnabled()).thenReturn(true); + } - doReturn(ImmutableSet.of(index)).when(ip).getResolvedIndexPattern(user, indexNameExpressionResolver, cs, true, false); + PrivilegesEvaluationContext ctx(String action) { + return new PrivilegesEvaluationContext( + user, + ImmutableSet.of("role_a"), + action, + request, + null, + null, + indexNameExpressionResolver, + null + ); } @After @@ -171,10 +184,9 @@ public void testUnprotectedActionOnRegularIndex_systemIndexDisabled() { UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -193,10 +205,9 @@ public void testUnprotectedActionOnRegularIndex_systemIndexPermissionDisabled() UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -214,10 +225,9 @@ public void testUnprotectedActionOnRegularIndex_systemIndexPermissionEnabled() { UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -235,10 +245,9 @@ public void testUnprotectedActionOnSystemIndex_systemIndexDisabled() { UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -256,10 +265,9 @@ public void testUnprotectedActionOnSystemIndex_systemIndexPermissionDisabled() { UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -277,17 +285,21 @@ public void testUnprotectedActionOnSystemIndex_systemIndexPermissionEnabled_With UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verify(presponse).markComplete(); assertThat(response, is(presponse)); verify(auditLog).logSecurityIndexAttempt(request, UNPROTECTED_ACTION, null); verify(log).isInfoEnabled(); - verify(log).info("No {} permission for user roles {} to System Indices {}", UNPROTECTED_ACTION, securityRoles, TEST_SYSTEM_INDEX); + verify(log).info( + "No {} permission for user roles {} to System Indices {}", + UNPROTECTED_ACTION, + user.getSecurityRoles(), + TEST_SYSTEM_INDEX + ); } @Test @@ -302,10 +314,9 @@ public void testUnprotectedActionOnSystemIndex_systemIndexPermissionEnabled_With UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); assertThat(response, is(presponse)); // unprotected action is not allowed on a system index @@ -321,29 +332,9 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexDisabled() { final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); - evaluator.evaluate( - searchRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); - evaluator.evaluate( - realtimeRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); + evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(searchRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(realtimeRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); verifyNoInteractions(presponse); } @@ -357,29 +348,9 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexPermissionDisable final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); - evaluator.evaluate( - searchRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); - evaluator.evaluate( - realtimeRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); + evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(searchRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(realtimeRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); verify(searchRequest).requestCache(Boolean.FALSE); verify(realtimeRequest).realtime(Boolean.FALSE); @@ -398,29 +369,9 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexPermissionEnabled final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); - evaluator.evaluate( - searchRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); - evaluator.evaluate( - realtimeRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); + evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(searchRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(realtimeRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); verify(searchRequest).requestCache(Boolean.FALSE); verify(realtimeRequest).realtime(Boolean.FALSE); @@ -437,7 +388,7 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexPermissionEnabled verify(log, times(3)).info( "No {} permission for user roles {} to System Indices {}", UNPROTECTED_ACTION, - securityRoles, + user.getSecurityRoles(), TEST_SYSTEM_INDEX ); verify(log).debug("Disable search request cache for this request"); @@ -453,29 +404,9 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexPermissionEnabled final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); - evaluator.evaluate( - searchRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); - evaluator.evaluate( - realtimeRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); + evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(searchRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(realtimeRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); verify(searchRequest).requestCache(Boolean.FALSE); verify(realtimeRequest).realtime(Boolean.FALSE); @@ -491,7 +422,7 @@ public void testProtectedActionLocalAll_systemIndexDisabled() { final Resolved resolved = Resolved._LOCAL_ALL; // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -505,7 +436,7 @@ public void testProtectedActionLocalAll_systemIndexPermissionDisabled() { final Resolved resolved = Resolved._LOCAL_ALL; // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -519,7 +450,7 @@ public void testProtectedActionLocalAll_systemIndexPermissionEnabled() { final Resolved resolved = Resolved._LOCAL_ALL; // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -533,7 +464,7 @@ public void testProtectedActionOnRegularIndex_systemIndexDisabled() { final Resolved resolved = createResolved(TEST_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -544,7 +475,7 @@ public void testProtectedActionOnRegularIndex_systemIndexPermissionDisabled() { final Resolved resolved = createResolved(TEST_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -555,7 +486,7 @@ public void testProtectedActionOnRegularIndex_systemIndexPermissionEnabled() { final Resolved resolved = createResolved(TEST_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -566,7 +497,7 @@ public void testProtectedActionOnSystemIndex_systemIndexDisabled() { final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -577,7 +508,7 @@ public void testProtectedActionOnSystemIndex_systemIndexPermissionDisabled() { final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -591,13 +522,18 @@ public void testProtectedActionOnSystemIndex_systemIndexPermissionEnabled_withou final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); verify(presponse).markComplete(); verify(log).isInfoEnabled(); - verify(log).info("No {} permission for user roles {} to System Indices {}", PROTECTED_ACTION, securityRoles, TEST_SYSTEM_INDEX); + verify(log).info( + "No {} permission for user roles {} to System Indices {}", + PROTECTED_ACTION, + user.getSecurityRoles(), + TEST_SYSTEM_INDEX + ); } @Test @@ -607,7 +543,7 @@ public void testProtectedActionOnSystemIndex_systemIndexPermissionEnabled_withSy final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -618,7 +554,7 @@ public void testProtectedActionOnProtectedSystemIndex_systemIndexDisabled() { final Resolved resolved = createResolved(SECURITY_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -633,7 +569,7 @@ public void testProtectedActionOnProtectedSystemIndex_systemIndexPermissionDisab final Resolved resolved = createResolved(SECURITY_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -668,14 +604,19 @@ private void testSecurityIndexAccess(String action) { final Resolved resolved = createResolved(SECURITY_INDEX); // Action - evaluator.evaluate(request, task, action, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, action, resolved, presponse, ctx(action), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, action, task); assertThat(presponse.allowed, is(false)); verify(presponse).markComplete(); verify(log).isInfoEnabled(); - verify(log).info("{} not permitted for a regular user {} on protected system indices {}", action, securityRoles, SECURITY_INDEX); + verify(log).info( + "{} not permitted for a regular user {} on protected system indices {}", + action, + user.getSecurityRoles(), + SECURITY_INDEX + ); } private Resolved createResolved(final String... indexes) { diff --git a/src/test/java/org/opensearch/security/securityconf/impl/v7/IndexPatternTests.java b/src/test/java/org/opensearch/security/securityconf/impl/v7/IndexPatternTests.java deleted file mode 100644 index 4ea364f663..0000000000 --- a/src/test/java/org/opensearch/security/securityconf/impl/v7/IndexPatternTests.java +++ /dev/null @@ -1,260 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.securityconf.impl.v7; - -import java.util.Arrays; -import java.util.Set; -import java.util.TreeMap; - -import com.google.common.collect.ImmutableSet; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.opensearch.action.support.IndicesOptions; -import org.opensearch.cluster.ClusterState; -import org.opensearch.cluster.metadata.IndexAbstraction; -import org.opensearch.cluster.metadata.IndexAbstraction.Type; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.metadata.Metadata; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.security.securityconf.ConfigModelV7.IndexPattern; -import org.opensearch.security.user.User; - -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.quality.Strictness; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.contains; -import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; -import static org.mockito.Mockito.withSettings; - -@RunWith(MockitoJUnitRunner.class) -public class IndexPatternTests { - - @Mock - private User user; - @Mock - private IndexNameExpressionResolver resolver; - @Mock - private ClusterService clusterService; - - private IndexPattern ip; - - @Before - public void before() { - ip = spy(new IndexPattern("defaultPattern")); - } - - @After - public void after() { - verifyNoMoreInteractions(user, resolver, clusterService); - } - - @Test - public void testCtor() { - assertThrows(NullPointerException.class, () -> new IndexPattern(null)); - } - - /** Ensure that concreteIndexNames sends correct parameters are sent to getResolvedIndexPattern */ - @Test - public void testConcreteIndexNamesOverload() { - doReturn(ImmutableSet.of("darn")).when(ip).getResolvedIndexPattern(user, resolver, clusterService, false, false); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("darn")); - - verify(ip).getResolvedIndexPattern(user, resolver, clusterService, false, false); - verify(ip).concreteIndexNames(user, resolver, clusterService); - verifyNoMoreInteractions(ip); - } - - /** Ensure that attemptResolveIndexNames sends correct parameters are sent to getResolvedIndexPattern */ - @Test - public void testAttemptResolveIndexNamesOverload() { - doReturn(ImmutableSet.of("yarn")).when(ip).getResolvedIndexPattern(user, resolver, clusterService, true, false); - - final Set results = ip.attemptResolveIndexNames(user, resolver, clusterService); - - assertThat(results, contains("yarn")); - - verify(ip).getResolvedIndexPattern(user, resolver, clusterService, true, false); - verify(ip).attemptResolveIndexNames(user, resolver, clusterService); - verifyNoMoreInteractions(ip); - } - - /** Verify concreteIndexNames when there are no matches */ - @Test - public void testExactNameWithNoMatches() { - doReturn("index-17").when(ip).getUnresolvedIndexPattern(user); - when(clusterService.state()).thenReturn(mock(ClusterState.class)); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-17"))).thenReturn( - new String[] {} - ); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("index-17")); - - verify(clusterService).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-17")); - } - - /** Verify concreteIndexNames on exact name matches */ - @Test - public void testExactName() { - doReturn("index-17").when(ip).getUnresolvedIndexPattern(user); - when(clusterService.state()).thenReturn(mock(ClusterState.class)); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-17"))).thenReturn( - new String[] { "resolved-index-17" } - ); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("resolved-index-17")); - - verify(clusterService).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-17")); - } - - /** Verify concreteIndexNames on exact name matches */ - @Test - public void testExactNameOnClosedIndex() { - doReturn("index-17").when(ip).getUnresolvedIndexPattern(user); - when(clusterService.state()).thenReturn(mock(ClusterState.class)); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpand()), eq(true), eq("index-17"))).thenReturn( - new String[] { "resolved-index-17" } - ); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService, true); - - assertThat(results, contains("resolved-index-17")); - - verify(clusterService).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpand()), eq(true), eq("index-17")); - } - - /** Verify concreteIndexNames on multiple matches */ - @Test - public void testMultipleConcreteIndices() { - doReturn("index-1*").when(ip).getUnresolvedIndexPattern(user); - doReturn(createClusterState()).when(clusterService).state(); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*"))).thenReturn( - new String[] { "resolved-index-17", "resolved-index-18" } - ); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("resolved-index-17", "resolved-index-18")); - - verify(clusterService, times(2)).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*")); - } - - /** Verify concreteIndexNames when there is an alias */ - @Test - public void testMultipleConcreteIndicesWithOneAlias() { - doReturn("index-1*").when(ip).getUnresolvedIndexPattern(user); - - doReturn( - createClusterState( - new IndexShorthand("index-100", Type.ALIAS), // Name and type match - new IndexShorthand("19", Type.ALIAS) // Type matches/wrong name - ) - ).when(clusterService).state(); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-100"))).thenReturn( - new String[] { "resolved-index-100" } - ); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*"))).thenReturn( - new String[] { "resolved-index-17", "resolved-index-18" } - ); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("resolved-index-100", "resolved-index-17", "resolved-index-18")); - - verify(clusterService, times(3)).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-100")); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*")); - } - - /** Verify attemptResolveIndexNames with multiple aliases */ - @Test - public void testMultipleConcreteAliasedAndUnresolved() { - doReturn("index-1*").when(ip).getUnresolvedIndexPattern(user); - doReturn( - createClusterState( - new IndexShorthand("index-100", Type.ALIAS), // Name and type match - new IndexShorthand("index-101", Type.ALIAS), // Name and type match - new IndexShorthand("19", Type.ALIAS) // Type matches/wrong name - ) - ).when(clusterService).state(); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-100"), eq("index-101"))) - .thenReturn(new String[] { "resolved-index-100", "resolved-index-101" }); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*"))).thenReturn( - new String[] { "resolved-index-17", "resolved-index-18" } - ); - - final Set results = ip.attemptResolveIndexNames(user, resolver, clusterService); - - assertThat(results, contains("resolved-index-100", "resolved-index-101", "resolved-index-17", "resolved-index-18", "index-1*")); - - verify(clusterService, times(3)).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-100"), eq("index-101")); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*")); - } - - private ClusterState createClusterState(final IndexShorthand... indices) { - final TreeMap indexMap = new TreeMap(); - Arrays.stream(indices).forEach(indexShorthand -> { - final IndexAbstraction indexAbstraction = mock(IndexAbstraction.class); - when(indexAbstraction.getType()).thenReturn(indexShorthand.type); - indexMap.put(indexShorthand.name, indexAbstraction); - }); - - final Metadata mockMetadata = mock(Metadata.class, withSettings().strictness(Strictness.LENIENT)); - when(mockMetadata.getIndicesLookup()).thenReturn(indexMap); - - final ClusterState mockClusterState = mock(ClusterState.class, withSettings().strictness(Strictness.LENIENT)); - when(mockClusterState.getMetadata()).thenReturn(mockMetadata); - - return mockClusterState; - } - - private class IndexShorthand { - public final String name; - public final Type type; - - public IndexShorthand(final String name, final Type type) { - this.name = name; - this.type = type; - } - } -} diff --git a/src/test/java/org/opensearch/security/util/MockIndexMetadataBuilder.java b/src/test/java/org/opensearch/security/util/MockIndexMetadataBuilder.java new file mode 100644 index 0000000000..e8af0f1384 --- /dev/null +++ b/src/test/java/org/opensearch/security/util/MockIndexMetadataBuilder.java @@ -0,0 +1,194 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.util; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import com.google.common.collect.ImmutableMap; + +import org.opensearch.Version; +import org.opensearch.cluster.metadata.AliasMetadata; +import org.opensearch.cluster.metadata.DataStream; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.index.Index; + +/** + * Creates mocks of org.opensearch.cluster.metadata.IndexAbstraction maps. Useful for unit testing code which + * operates on index metadata. + * + * TODO: This is the evil twin of the same class in the integrationTest module. Possibly tests depending on this + * should be moved to the integrationTest module? + */ +public class MockIndexMetadataBuilder { + + private final static Settings INDEX_SETTINGS = Settings.builder() + .put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.CURRENT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + + private Map nameToIndexAbstractionMap = new HashMap<>(); + private Map nameToIndexMetadataMap = new HashMap<>(); + private Map> indicesToAliases = new HashMap<>(); + private Map> aliasesToIndices = new HashMap<>(); + + public static MockIndexMetadataBuilder indices(String... indices) { + MockIndexMetadataBuilder builder = new MockIndexMetadataBuilder(); + + for (String index : indices) { + builder.index(index); + } + + return builder; + } + + public static MockIndexMetadataBuilder dataStreams(String... dataStreams) { + MockIndexMetadataBuilder builder = new MockIndexMetadataBuilder(); + + for (String dataStream : dataStreams) { + builder.dataStream(dataStream); + } + + return builder; + } + + public ImmutableMap build() { + Map aliasMetadataMap = new HashMap<>(); + + for (Map.Entry> aliasEntry : this.aliasesToIndices.entrySet()) { + String alias = aliasEntry.getKey(); + AliasMetadata aliasMetadata = AliasMetadata.builder(alias).build(); + aliasMetadataMap.put(alias, aliasMetadata); + } + + for (Map.Entry> indexEntry : this.indicesToAliases.entrySet()) { + String index = indexEntry.getKey(); + Set aliases = indexEntry.getValue(); + + IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(index).settings(INDEX_SETTINGS); + + for (String alias : aliases) { + indexMetadataBuilder.putAlias(aliasMetadataMap.get(alias)); + } + + IndexMetadata indexMetadata = indexMetadataBuilder.build(); + nameToIndexMetadataMap.put(index, indexMetadata); + nameToIndexAbstractionMap.put(index, new IndexAbstraction.Index(indexMetadata)); + } + + for (Map.Entry> aliasEntry : this.aliasesToIndices.entrySet()) { + String alias = aliasEntry.getKey(); + Set indices = aliasEntry.getValue(); + AliasMetadata aliasMetadata = aliasMetadataMap.get(alias); + + String firstIndex = indices.iterator().next(); + indices.remove(firstIndex); + + IndexMetadata firstIndexMetadata = nameToIndexMetadataMap.get(firstIndex); + IndexAbstraction.Alias indexAbstraction = new IndexAbstraction.Alias(aliasMetadata, firstIndexMetadata); + + for (String index : indices) { + indexAbstraction.getIndices().add(nameToIndexMetadataMap.get(index)); + } + + nameToIndexAbstractionMap.put(alias, indexAbstraction); + } + + return ImmutableMap.copyOf(this.nameToIndexAbstractionMap); + } + + public MockIndexMetadataBuilder index(String index) { + if (!this.indicesToAliases.containsKey(index)) { + this.indicesToAliases.put(index, new HashSet<>()); + } + return this; + } + + public AliasBuilder alias(String alias) { + return new AliasBuilder(alias); + } + + public MockIndexMetadataBuilder dataStream(String dataStream) { + return dataStream(dataStream, 3); + } + + public MockIndexMetadataBuilder dataStream(String dataStream, int generations) { + List backingIndices = new ArrayList<>(); + + for (int i = 1; i <= generations; i++) { + String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, i); + backingIndices.add(new Index(backingIndexName, backingIndexName)); + } + + DataStream dataStreamMetadata = new DataStream(dataStream, new DataStream.TimestampField("@timestamp"), backingIndices); + IndexAbstraction.DataStream dataStreamIndexAbstraction = new IndexAbstraction.DataStream( + dataStreamMetadata, + backingIndices.stream().map(i -> getIndexMetadata(i.getName())).collect(Collectors.toList()) + ); + this.nameToIndexAbstractionMap.put(dataStream, dataStreamIndexAbstraction); + + for (Index backingIndex : backingIndices) { + this.nameToIndexAbstractionMap.put( + backingIndex.getName(), + new IndexAbstraction.Index(getIndexMetadata(backingIndex.getName()), dataStreamIndexAbstraction) + ); + } + + return this; + } + + private IndexMetadata getIndexMetadata(String index) { + IndexMetadata result = this.nameToIndexMetadataMap.get(index); + + if (result == null) { + result = IndexMetadata.builder(index) + .settings(Settings.builder().put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + this.nameToIndexMetadataMap.put(index, result); + } + + return result; + } + + public class AliasBuilder { + private String alias; + + private AliasBuilder(String alias) { + this.alias = alias; + } + + public MockIndexMetadataBuilder of(String firstIndex, String... moreIndices) { + MockIndexMetadataBuilder.this.indicesToAliases.computeIfAbsent(firstIndex, (k) -> new HashSet<>()).add(this.alias); + + Set indices = new HashSet<>(); + indices.add(firstIndex); + + for (String index : moreIndices) { + MockIndexMetadataBuilder.this.indicesToAliases.computeIfAbsent(index, (k) -> new HashSet<>()).add(this.alias); + indices.add(index); + } + + MockIndexMetadataBuilder.this.aliasesToIndices.put(this.alias, indices); + + return MockIndexMetadataBuilder.this; + } + } +}