Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[stag] GOV-1295 Remove unwanted logging #2311

Merged
merged 12 commits into from
Aug 28, 2023
35 changes: 35 additions & 0 deletions addons/policies/bootstrap_heka_policies.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
"entities":
[
{
"typeName": "AuthPolicy",
"attributes":
{
"name": "DENY_DATA_ACCESS_GUEST",
"qualifiedName": "DENY_DATA_ACCESS_GUEST",
"description": "deny data access for guest users",
"policyCategory": "bootstrap",
"policySubCategory": "default",
"policyServiceName": "heka",
"policyType": "deny",
"policyPriority": 1,
"policyUsers": [],
"policyGroups": [],
"policyRoles":
[
"$guest"
],
"policyResourceCategory": "ENTITY",
"policyResources":
[
"entity:*",
"entity-type:*"
],
"policyActions":
[
"select"
]
}
}
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -111,12 +111,12 @@
<!-- Log4j audit provider configuration -->
<property>
<name>xasecure.audit.destination.log4j</name>
<value>true</value>
<value>false</value>
</property>

<property>
<name>xasecure.audit.destination.log4j.logger</name>
<value>AUDIT</value>
<value>AUTH_AUDIT</value>
</property>

<!-- ElasticSearch audit provider configuration -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,6 @@ public RangerAccessResult evaluatePolicies(RangerAccessRequest request, String p
String requestHashCode = Integer.toHexString(System.identityHashCode(request)) + "_" + policyType;

perf = RangerPerfTracer.getPerfTracer(PERF_POLICYENGINE_REQUEST_LOG, "RangerPolicyEngine.evaluatePolicies(requestHashCode=" + requestHashCode + ")");

LOG.info("RangerPolicyEngineImpl.evaluatePolicies(" + requestHashCode + ", " + request + ")");
}

RangerAccessResult ret;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public class RangerPerfTracer {
private final static String tagEndMarker = "(";

public static Log getPerfLogger(String name) {
return LogFactory.getLog("org.apache.atlas.perf." + name);
return LogFactory.getLog("org.apache.atlas.auth.perf." + name);
}

public static Log getPerfLogger(Class<?> cls) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@ public AbstractKeycloakClient(KeycloakConfig keycloakConfig) {
okhttp3.Response response = chain.proceed(request);
this.metricUtils.recordHttpTimer(timerSample, request.method(), rawPath, response.code(),
INTEGRATION, KEYCLOAK);
LOG.info("Keycloak: Request for url {} Status:{}", request.url(), response.code());
return response;
};

Expand Down Expand Up @@ -135,10 +134,11 @@ protected <T> retrofit2.Response<T> processResponse(retrofit2.Call<T> req) throw
return response;
}
String errMsg = response.errorBody().string();
LOG.error("Keycloak: Client request processing failed code {} message:{}", response.code(), errMsg);
LOG.error("Keycloak: Client request processing failed code {} message:{}, request: {} {}",
response.code(), errMsg, req.request().method(), req.request().url());
throw new AtlasBaseException(ERROR_CODE_MAP.getOrDefault(response.code(), BAD_REQUEST), errMsg);
} catch (Exception e) {
LOG.error("Keycloak: request failed", e);
LOG.error("Keycloak: request failed, request: {} {}, Exception: {}", req.request().method(), req.request().url(), e);
throw new AtlasBaseException(BAD_REQUEST, "Keycloak request failed");
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -991,7 +991,6 @@ public AtlasSearchResult directIndexSearch(SearchParams searchParams) throws Atl
}

String indexName = getIndexName(params);
LOG.info("directIndexSearch.indexName {}", indexName);

indexQuery = graph.elasticsearchQuery(indexName);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,26 +284,23 @@ private AtlasLineageOnDemandInfo getLineageInfoOnDemand(String guid, AtlasLineag
LineageOnDemandConstraints lineageConstraintsByGuid = getAndValidateLineageConstraintsByGuid(guid, atlasLineageOnDemandContext);
AtlasLineageOnDemandInfo.LineageDirection direction = lineageConstraintsByGuid.getDirection();
int depth = lineageConstraintsByGuid.getDepth();

AtlasLineageOnDemandInfo ret = initializeLineageOnDemandInfo(guid);

if (depth == 0) {
if (depth == 0)
depth = -1;
}

if (!ret.getRelationsOnDemand().containsKey(guid)) {
if (!ret.getRelationsOnDemand().containsKey(guid))
ret.getRelationsOnDemand().put(guid, new LineageInfoOnDemand(lineageConstraintsByGuid));
}

AtomicInteger inputEntitiesTraversed = new AtomicInteger(0);
AtomicInteger outputEntitiesTraversed = new AtomicInteger(0);
if (isDataSet) {
AtlasVertex datasetVertex = AtlasGraphUtilsV2.findByGuid(this.graph, guid);
if (direction == AtlasLineageOnDemandInfo.LineageDirection.INPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) {
if (direction == AtlasLineageOnDemandInfo.LineageDirection.INPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH)
traverseEdgesOnDemand(datasetVertex, true, depth, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, inputEntitiesTraversed);
}
if (direction == AtlasLineageOnDemandInfo.LineageDirection.OUTPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH) {
if (direction == AtlasLineageOnDemandInfo.LineageDirection.OUTPUT || direction == AtlasLineageOnDemandInfo.LineageDirection.BOTH)
traverseEdgesOnDemand(datasetVertex, false, depth, new HashSet<>(), atlasLineageOnDemandContext, ret, guid, outputEntitiesTraversed);
}
AtlasEntityHeader baseEntityHeader = entityRetriever.toAtlasEntityHeader(datasetVertex, atlasLineageOnDemandContext.getAttributes());
ret.getGuidEntityMap().put(guid, baseEntityHeader);
} else {
AtlasVertex processVertex = AtlasGraphUtilsV2.findByGuid(this.graph, guid);
// make one hop to the next dataset vertices from process vertex and traverse with 'depth = depth - 1'
Expand All @@ -317,7 +314,6 @@ private AtlasLineageOnDemandInfo getLineageInfoOnDemand(String guid, AtlasLineag
}
}
RequestContext.get().endMetricRecord(metricRecorder);

return ret;
}

Expand Down
Loading