Skip to content

Commit

Permalink
Merge remote-tracking branch 'datahub-project/feat/elasticsearch-opti…
Browse files Browse the repository at this point in the history
…mization-ext' into david-leifker/elasticsearch-optimization-ext
  • Loading branch information
david-leifker committed Jan 9, 2023
2 parents 68825c9 + 0548e77 commit d88d930
Show file tree
Hide file tree
Showing 120 changed files with 8,264 additions and 3,484 deletions.
10 changes: 9 additions & 1 deletion .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,14 @@ concurrency:

jobs:
build:
strategy:
matrix:
command:
[
"./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test -x :metadata-ingestion-modules:airflow-plugin:build -x :datahub-frontend:build -x :datahub-web-react:build --parallel",
"./gradlew :datahub-frontend:build :datahub-web-react:build --parallel",
"./gradlew :metadata-ingestion-modules:airflow-plugin:build --parallel"
]
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
Expand All @@ -37,7 +45,7 @@ jobs:
python-version: "3.7"
- name: Gradle build (and test)
run: |
./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test
${{ matrix.command }}
- uses: actions/upload-artifact@v3
if: always()
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/metadata-ingestion.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ on:
branches:
- master
paths:
- ".github/**"
- ".github/workflows/metadata-ingestion.yml"
- "metadata-ingestion/**"
- "metadata-models/**"
pull_request:
Expand Down
1 change: 1 addition & 0 deletions datahub-frontend/app/client/KafkaTrackingProducer.java
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ private static KafkaProducer createKafkaProducer(Config config) {
setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config");
setConfig(config, props, SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "analytics.kafka.sasl.kerberos.service.name");
setConfig(config, props, SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.login.callback.handler.class");
setConfig(config, props, SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.client.callback.handler.class");
}
}

Expand Down
1 change: 1 addition & 0 deletions datahub-frontend/conf/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ analytics.kafka.sasl.mechanism = ${?KAFKA_PROPERTIES_SASL_MECHANISM}
analytics.kafka.sasl.jaas.config = ${?KAFKA_PROPERTIES_SASL_JAAS_CONFIG}
analytics.kafka.sasl.kerberos.service.name = ${?KAFKA_PROPERTIES_SASL_KERBEROS_SERVICE_NAME}
analytics.kafka.sasl.login.callback.handler.class = ${?KAFKA_PROPERTIES_SASL_LOGIN_CALLBACK_HANDLER_CLASS}
analytics.kafka.sasl.client.callback.handler.class = ${?KAFKA_PROPERTIES_SASL_CLIENT_CALLBACK_HANDLER_CLASS}

# Required Elastic Client Configuration
analytics.elastic.host = ${?ELASTIC_CLIENT_HOST}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@
import com.linkedin.datahub.graphql.resolvers.domain.ListDomainsResolver;
import com.linkedin.datahub.graphql.resolvers.domain.SetDomainResolver;
import com.linkedin.datahub.graphql.resolvers.domain.UnsetDomainResolver;
import com.linkedin.datahub.graphql.resolvers.embed.UpdateEmbedResolver;
import com.linkedin.datahub.graphql.resolvers.entity.EntityExistsResolver;
import com.linkedin.datahub.graphql.resolvers.entity.EntityPrivilegesResolver;
import com.linkedin.datahub.graphql.resolvers.glossary.AddRelatedTermsResolver;
Expand Down Expand Up @@ -852,6 +853,7 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("updateGlobalViewsSettings", new UpdateGlobalViewsSettingsResolver(this.settingsService))
.dataFetcher("updateCorpUserViewsSettings", new UpdateCorpUserViewsSettingsResolver(this.settingsService))
.dataFetcher("updateLineage", new UpdateLineageResolver(this.entityService, this.lineageService))
.dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService))
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import com.linkedin.datahub.graphql.generated.ManagedIngestionConfig;
import com.linkedin.datahub.graphql.generated.PoliciesConfig;
import com.linkedin.datahub.graphql.generated.Privilege;
import com.linkedin.datahub.graphql.generated.QueriesTabConfig;
import com.linkedin.datahub.graphql.generated.ResourcePrivileges;
import com.linkedin.datahub.graphql.generated.TelemetryConfig;
import com.linkedin.datahub.graphql.generated.TestsConfig;
Expand Down Expand Up @@ -122,6 +123,11 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen
visualConfig.setLogoUrl(_visualConfiguration.getAssets().getLogoUrl());
visualConfig.setFaviconUrl(_visualConfiguration.getAssets().getFaviconUrl());
}
if (_visualConfiguration != null && _visualConfiguration.getQueriesTab() != null) {
QueriesTabConfig queriesTabConfig = new QueriesTabConfig();
queriesTabConfig.setQueriesTabResultSize(_visualConfiguration.getQueriesTab().getQueriesTabResultSize());
visualConfig.setQueriesTab(queriesTabConfig);
}
appConfig.setVisualConfig(visualConfig);

final TelemetryConfig telemetryConfig = new TelemetryConfig();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
package com.linkedin.datahub.graphql.resolvers.embed;

import com.linkedin.common.AuditStamp;
import com.linkedin.common.Embed;
import com.linkedin.common.urn.Urn;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.data.template.SetMode;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.exception.AuthorizationException;
import com.linkedin.datahub.graphql.generated.UpdateEmbedInput;
import com.linkedin.datahub.graphql.resolvers.mutate.util.EmbedUtils;
import com.linkedin.events.metadata.ChangeType;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.utils.GenericRecordUtils;
import com.linkedin.mxe.MetadataChangeProposal;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import java.util.concurrent.CompletableFuture;
import javax.annotation.Nonnull;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;

import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*;
import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*;


/**
* Resolver used for updating the embed render URL for an asset.
*/
@Slf4j
@RequiredArgsConstructor
public class UpdateEmbedResolver implements DataFetcher<CompletableFuture<Boolean>> {

private final EntityService _entityService;

@Override
public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception {

final QueryContext context = environment.getContext();
final UpdateEmbedInput input = bindArgument(environment.getArgument("input"), UpdateEmbedInput.class);
final Urn entityUrn = UrnUtils.getUrn(input.getUrn());

return CompletableFuture.supplyAsync(() -> {

if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) {
throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator.");
}
validateUpdateEmbedInput(
input,
_entityService
);
try {
final Embed embed = (Embed) getAspectFromEntity(
entityUrn.toString(),
Constants.EMBED_ASPECT_NAME,
_entityService,
new Embed());

updateEmbed(embed, input);

final MetadataChangeProposal proposal = new MetadataChangeProposal();
proposal.setEntityUrn(entityUrn);
proposal.setEntityType(entityUrn.getEntityType());
proposal.setAspectName(Constants.EMBED_ASPECT_NAME);
proposal.setAspect(GenericRecordUtils.serializeAspect(embed));
proposal.setChangeType(ChangeType.UPSERT);
_entityService.ingestProposal(
proposal,
new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis()),
false
);
return true;
} catch (Exception e) {
throw new RuntimeException(String.format("Failed to update Embed for to resource with entity urn %s", entityUrn), e);
}
});
}

/**
* Validates an instance of {@link UpdateEmbedInput}, and throws an {@link IllegalArgumentException} if the input
* is not valid.
*
* For an input to be valid, the target URN must exist.
*
* @param input the input to validate
* @param entityService an instance of {@link EntityService} used to validate the input.
*/
private static void validateUpdateEmbedInput(@Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) {
if (!entityService.exists(UrnUtils.getUrn(input.getUrn()))) {
throw new IllegalArgumentException(
String.format("Failed to update embed for entity with urn %s. Entity does not exist!", input.getUrn()));
}
}

/**
* Applies an instance of {@link UpdateEmbedInput} to a base instance of {@link Embed}.
* @param embed an embed to update
* @param input the updates to apply
*/
private static void updateEmbed(@Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) {
embed.setRenderUrl(input.getRenderUrl(), SetMode.IGNORE_NULL);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.EntityPrivileges;
import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils;
import com.linkedin.datahub.graphql.resolvers.mutate.util.EmbedUtils;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.authorization.PoliciesConfig;
Expand Down Expand Up @@ -110,18 +111,21 @@ private boolean canEditEntityLineage(Urn urn, QueryContext context) {
private EntityPrivileges getDatasetPrivileges(Urn urn, QueryContext context) {
final EntityPrivileges result = new EntityPrivileges();
result.setCanEditLineage(canEditEntityLineage(urn, context));
result.setCanEditEmbed(EmbedUtils.isAuthorizedToUpdateEmbedForEntity(urn, context));
return result;
}

private EntityPrivileges getChartPrivileges(Urn urn, QueryContext context) {
final EntityPrivileges result = new EntityPrivileges();
result.setCanEditLineage(canEditEntityLineage(urn, context));
result.setCanEditEmbed(EmbedUtils.isAuthorizedToUpdateEmbedForEntity(urn, context));
return result;
}

private EntityPrivileges getDashboardPrivileges(Urn urn, QueryContext context) {
final EntityPrivileges result = new EntityPrivileges();
result.setCanEditLineage(canEditEntityLineage(urn, context));
result.setCanEditEmbed(EmbedUtils.isAuthorizedToUpdateEmbedForEntity(urn, context));
return result;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
package com.linkedin.datahub.graphql.resolvers.mutate.util;

import com.google.common.collect.ImmutableList;

import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.authorization.AuthorizationUtils;
import com.linkedin.datahub.graphql.authorization.ConjunctivePrivilegeGroup;
import com.linkedin.datahub.graphql.authorization.DisjunctivePrivilegeGroup;
import com.linkedin.metadata.authorization.PoliciesConfig;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;


@Slf4j
public class EmbedUtils {
private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of(
PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()
));

private EmbedUtils() { }

public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn entityUrn, @Nonnull final QueryContext context) {
final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of(
ALL_PRIVILEGES_GROUP,
new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType()))
));

return AuthorizationUtils.isAuthorized(
context.getAuthorizer(),
context.getActorUrn(),
entityUrn.getEntityType(),
entityUrn.toString(),
orPrivilegeGroups);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ public class ChartType implements SearchableEntityType<Chart, String>, Browsable
DOMAINS_ASPECT_NAME,
DEPRECATION_ASPECT_NAME,
DATA_PLATFORM_INSTANCE_ASPECT_NAME,
INPUT_FIELDS_ASPECT_NAME
INPUT_FIELDS_ASPECT_NAME,
EMBED_ASPECT_NAME
);
private static final Set<String> FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import com.linkedin.chart.EditableChartProperties;
import com.linkedin.common.DataPlatformInstance;
import com.linkedin.common.Deprecation;
import com.linkedin.common.Embed;
import com.linkedin.common.GlobalTags;
import com.linkedin.common.GlossaryTerms;
import com.linkedin.common.InputFields;
Expand All @@ -26,6 +27,7 @@
import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper;
import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper;
import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper;
import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper;
import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper;
import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper;
import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper;
Expand Down Expand Up @@ -89,7 +91,8 @@ public Chart apply(@Nonnull final EntityResponse entityResponse) {
dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))));
mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (chart, dataMap) ->
chart.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn)));

mappingHelper.mapToResult(EMBED_ASPECT_NAME, (chart, dataMap) ->
chart.setEmbed(EmbedMapper.map(new Embed(dataMap))));
return mappingHelper.getResult();
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
package com.linkedin.datahub.graphql.types.common.mappers;

import com.linkedin.datahub.graphql.generated.Embed;
import com.linkedin.datahub.graphql.types.mappers.ModelMapper;

import javax.annotation.Nonnull;

public class EmbedMapper implements ModelMapper<com.linkedin.common.Embed, Embed> {

public static final EmbedMapper INSTANCE = new EmbedMapper();

public static Embed map(@Nonnull final com.linkedin.common.Embed metadata) {
return INSTANCE.apply(metadata);
}

@Override
public Embed apply(@Nonnull final com.linkedin.common.Embed input) {
final Embed result = new Embed();
result.setRenderUrl(input.getRenderUrl());
return result;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,8 @@ public class DashboardType implements SearchableEntityType<Dashboard, String>, B
DEPRECATION_ASPECT_NAME,
DATA_PLATFORM_INSTANCE_ASPECT_NAME,
INPUT_FIELDS_ASPECT_NAME,
SUB_TYPES_ASPECT_NAME
SUB_TYPES_ASPECT_NAME,
EMBED_ASPECT_NAME
);
private static final Set<String> FACET_FIELDS = ImmutableSet.of("access", "tool");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import com.linkedin.common.DataPlatformInstance;
import com.linkedin.common.Deprecation;
import com.linkedin.common.Embed;
import com.linkedin.common.GlobalTags;
import com.linkedin.common.GlossaryTerms;
import com.linkedin.common.InputFields;
Expand All @@ -25,6 +26,7 @@
import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper;
import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper;
import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper;
import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper;
import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper;
import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper;
import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper;
Expand Down Expand Up @@ -88,7 +90,8 @@ public Dashboard apply(@Nonnull final EntityResponse entityResponse) {
mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) ->
dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn)));
mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes);

mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) ->
dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap))));
return mappingHelper.getResult();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ public class DatasetType implements SearchableEntityType<Dataset, String>, Brows
DOMAINS_ASPECT_NAME,
SCHEMA_METADATA_ASPECT_NAME,
DATA_PLATFORM_INSTANCE_ASPECT_NAME,
SIBLINGS_ASPECT_NAME
SIBLINGS_ASPECT_NAME,
EMBED_ASPECT_NAME
);

private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import com.linkedin.common.DataPlatformInstance;
import com.linkedin.common.Deprecation;
import com.linkedin.common.Embed;
import com.linkedin.common.GlobalTags;
import com.linkedin.common.GlossaryTerms;
import com.linkedin.common.InstitutionalMemory;
Expand All @@ -18,6 +19,7 @@
import com.linkedin.datahub.graphql.generated.FabricType;
import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper;
import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper;
import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper;
import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper;
import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper;
import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper;
Expand Down Expand Up @@ -101,7 +103,8 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) {
dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap))));
mappingHelper.mapToResult(UPSTREAM_LINEAGE_ASPECT_NAME, (dataset, dataMap) ->
dataset.setFineGrainedLineages(UpstreamLineagesMapper.map(new UpstreamLineage(dataMap))));

mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dataset, dataMap) ->
dataset.setEmbed(EmbedMapper.map(new Embed(dataMap))));
return mappingHelper.getResult();
}

Expand Down
Loading

0 comments on commit d88d930

Please sign in to comment.