Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
anshbansal authored Feb 8, 2024
2 parents f2c4134 + 4551164 commit 8f0c8e6
Show file tree
Hide file tree
Showing 54 changed files with 818 additions and 2,854 deletions.
15 changes: 8 additions & 7 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ buildscript {

ext.junitJupiterVersion = '5.6.1'
// Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md
ext.pegasusVersion = '29.48.4'
ext.pegasusVersion = '29.51.0'
ext.mavenVersion = '3.6.3'
ext.springVersion = '6.1.2'
ext.springBootVersion = '3.2.1'
Expand Down Expand Up @@ -269,13 +269,14 @@ allprojects {
apply plugin: 'eclipse'
// apply plugin: 'org.gradlex.java-ecosystem-capabilities'

tasks.withType(Test).configureEach {
// https://docs.gradle.org/current/userguide/performance.html
maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1
tasks.withType(Test).configureEach { task -> if (task.project.name != "metadata-io") {
// https://docs.gradle.org/current/userguide/performance.html
maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1

if (project.configurations.getByName("testImplementation").getDependencies()
.any{ it.getName().contains("testng") }) {
useTestNG()
if (project.configurations.getByName("testImplementation").getDependencies()
.any { it.getName().contains("testng") }) {
useTestNG()
}
}
}

Expand Down
3 changes: 0 additions & 3 deletions buildSrc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,4 @@ dependencies {

compileOnly 'org.projectlombok:lombok:1.18.30'
annotationProcessor 'org.projectlombok:lombok:1.18.30'

// pegasus dependency, overrides for tasks
implementation 'com.linkedin.pegasus:gradle-plugins:29.48.4'
}
2,444 changes: 0 additions & 2,444 deletions buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -2436,7 +2436,9 @@ private void configureAssertionResolvers(final RuntimeWiring.Builder builder) {
? assertion.getDataPlatformInstance().getUrn()
: null;
}))
.dataFetcher("runEvents", new AssertionRunEventResolver(entityClient)));
.dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))
.dataFetcher(
"aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)));
}

private void configurePolicyResolvers(final RuntimeWiring.Builder builder) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,12 @@ public class WeaklyTypedAspectsResolver implements DataFetcher<CompletableFuture
private static final JacksonDataCodec CODEC = new JacksonDataCodec();

private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) {
return !params.getAutoRenderOnly() || aspectSpec.isAutoRender();
return (params.getAutoRenderOnly() == null
|| !params.getAutoRenderOnly()
|| aspectSpec.isAutoRender())
&& (params.getAspectNames() == null
|| params.getAspectNames().isEmpty()
|| params.getAspectNames().contains(aspectSpec.getName()));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.browse.BrowseResultV2;
import com.linkedin.metadata.query.SearchFlags;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.service.FormService;
import com.linkedin.metadata.service.ViewService;
Expand Down Expand Up @@ -52,6 +53,7 @@ public CompletableFuture<BrowseResultsV2> get(DataFetchingEnvironment environmen
final int start = input.getStart() != null ? input.getStart() : DEFAULT_START;
final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT;
final String query = input.getQuery() != null ? input.getQuery() : "*";
final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags());
// escape forward slash since it is a reserved character in Elasticsearch
final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query);

Expand Down Expand Up @@ -83,7 +85,8 @@ public CompletableFuture<BrowseResultsV2> get(DataFetchingEnvironment environmen
sanitizedQuery,
start,
count,
context.getAuthentication());
context.getAuthentication(),
searchFlags);
return mapBrowseResults(browseResults);
} catch (Exception e) {
throw new RuntimeException("Failed to execute browse V2", e);
Expand Down
12 changes: 12 additions & 0 deletions datahub-graphql-core/src/main/resources/entity.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -1763,6 +1763,12 @@ input AspectParams {
Only fetch auto render aspects
"""
autoRenderOnly: Boolean

"""
Fetch using aspect names
If absent, returns all aspects matching other inputs
"""
aspectNames: [String!]
}


Expand Down Expand Up @@ -6788,6 +6794,12 @@ type Assertion implements EntityWithRelationships & Entity {
Edges extending from this entity grouped by direction in the lineage graph
"""
lineage(input: LineageInput!): EntityLineageResult

"""
Experimental API.
For fetching extra aspects that do not have custom UI code yet
"""
aspects(input: AspectParams): [RawAspect!]
}

"""
Expand Down
5 changes: 5 additions & 0 deletions datahub-graphql-core/src/main/resources/search.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -1230,6 +1230,11 @@ input BrowseV2Input {
The search query string
"""
query: String

"""
Flags controlling search options
"""
searchFlags: SearchFlags
}

"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.linkedin.metadata.browse.BrowseResultGroupV2Array;
import com.linkedin.metadata.browse.BrowseResultMetadata;
import com.linkedin.metadata.browse.BrowseResultV2;
import com.linkedin.metadata.query.SearchFlags;
import com.linkedin.metadata.query.filter.ConjunctiveCriterion;
import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray;
import com.linkedin.metadata.query.filter.Criterion;
Expand Down Expand Up @@ -262,7 +263,8 @@ private static EntityClient initMockEntityClient(
Mockito.eq(query),
Mockito.eq(start),
Mockito.eq(limit),
Mockito.any(Authentication.class)))
Mockito.any(Authentication.class),
Mockito.nullable(SearchFlags.class)))
.thenReturn(result);
return client;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@

import com.linkedin.gms.factory.auth.AuthorizerChainFactory;
import com.linkedin.gms.factory.auth.DataHubAuthorizerFactory;
import com.linkedin.gms.factory.graphql.GraphQLEngineFactory;
import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory;
import com.linkedin.gms.factory.kafka.SimpleKafkaConsumerFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory;
import com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.SpringBootApplication;
Expand All @@ -24,7 +28,11 @@
classes = {
ScheduledAnalyticsFactory.class,
AuthorizerChainFactory.class,
DataHubAuthorizerFactory.class
DataHubAuthorizerFactory.class,
SimpleKafkaConsumerFactory.class,
KafkaEventConsumerFactory.class,
InternalSchemaRegistryFactory.class,
GraphQLEngineFactory.class
})
})
public class UpgradeCliApplication {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.search.SearchService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

Expand All @@ -11,7 +12,12 @@ public class BackfillBrowsePathsV2Config {

@Bean
public BackfillBrowsePathsV2 backfillBrowsePathsV2(
EntityService<?> entityService, SearchService searchService) {
return new BackfillBrowsePathsV2(entityService, searchService);
EntityService<?> entityService,
SearchService searchService,
@Value("${systemUpdate.browsePathsV2.enabled}") final boolean enabled,
@Value("${systemUpdate.browsePathsV2.reprocess.enabled}") final boolean reprocessEnabled,
@Value("${systemUpdate.browsePathsV2.batchSize}") final Integer batchSize) {
return new BackfillBrowsePathsV2(
entityService, searchService, enabled, reprocessEnabled, batchSize);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,18 @@

import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL;
import com.linkedin.metadata.entity.EntityService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

@Configuration
public class ReindexDataJobViaNodesCLLConfig {

@Bean
public ReindexDataJobViaNodesCLL _reindexDataJobViaNodesCLL(EntityService<?> entityService) {
return new ReindexDataJobViaNodesCLL(entityService);
public ReindexDataJobViaNodesCLL _reindexDataJobViaNodesCLL(
EntityService<?> entityService,
@Value("${systemUpdate.dataJobNodeCLL.enabled}") final boolean enabled,
@Value("${systemUpdate.dataJobNodeCLL.batchSize}") final Integer batchSize) {
return new ReindexDataJobViaNodesCLL(entityService, enabled, batchSize);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
package com.linkedin.datahub.upgrade.config;

import org.springframework.boot.ApplicationArguments;
import org.springframework.context.annotation.Condition;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.core.type.AnnotatedTypeMetadata;

public class SystemUpdateCondition implements Condition {
@Override
public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) {
return context.getBeanFactory().getBean(ApplicationArguments.class).getNonOptionArgs().stream()
.anyMatch("SystemUpdate"::equals);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import com.linkedin.gms.factory.common.TopicConventionFactory;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig;
import com.linkedin.metadata.config.kafka.KafkaConfiguration;
import com.linkedin.metadata.dao.producer.KafkaEventProducer;
Expand All @@ -21,9 +22,12 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;

@Slf4j
@Configuration
Expand Down Expand Up @@ -74,4 +78,23 @@ protected KafkaEventProducer duheKafkaEventProducer(
duheSchemaRegistryConfig, kafkaConfiguration, properties));
return new KafkaEventProducer(producer, topicConvention, kafkaHealthChecker);
}

/**
* The ReindexDataJobViaNodesCLLConfig step requires publishing to MCL. Overriding the default
* producer with this special producer which doesn't require an active registry.
*
* <p>Use when INTERNAL registry and is SYSTEM_UPDATE
*
* <p>This forces this producer into the EntityService
*/
@Primary
@Bean(name = "kafkaEventProducer")
@Conditional(SystemUpdateCondition.class)
@ConditionalOnProperty(
name = "kafka.schemaRegistry.type",
havingValue = InternalSchemaRegistryFactory.TYPE)
protected KafkaEventProducer kafkaEventProducer(
@Qualifier("duheKafkaEventProducer") KafkaEventProducer kafkaEventProducer) {
return kafkaEventProducer;
}
}
Loading

0 comments on commit 8f0c8e6

Please sign in to comment.