Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
anshbansal authored Sep 6, 2024
2 parents 833ce7b + c3a7850 commit d43053a
Show file tree
Hide file tree
Showing 215 changed files with 159,796 additions and 2,090 deletions.
7 changes: 7 additions & 0 deletions .github/workflows/docker-unified.yml
Original file line number Diff line number Diff line change
Expand Up @@ -967,6 +967,13 @@ jobs:
docker pull '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head'
docker tag '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
fi
if [ '${{ needs.setup.outputs.integrations_service_change }}' == 'false' ]; then
echo 'datahub-integration-service head images'
docker pull '${{ env.DATAHUB_INTEGRATIONS_IMAGE }}:head'
docker tag '${{ env.DATAHUB_INTEGRATIONS_IMAGE }}:head' '${{ env.DATAHUB_INTEGRATIONS_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
fi
- name: CI Slim Head Images
run: |
if [ '${{ needs.setup.outputs.ingestion_change }}' == 'false' ]; then
echo 'datahub-ingestion head-slim images'
docker pull '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim'
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ Here are the companies that have officially adopted DataHub. Please feel free to
- [Haibo Technology](https://www.botech.com.cn)
- [hipages](https://hipages.com.au/)
- [inovex](https://www.inovex.de/)
- [Inter&Co](https://inter.co/)
- [IOMED](https://iomed.health)
- [Klarna](https://www.klarna.com)
- [LinkedIn](http://linkedin.com)
Expand Down
15 changes: 7 additions & 8 deletions datahub-frontend/app/client/KafkaTrackingProducer.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package client;

import com.linkedin.metadata.config.kafka.KafkaConfiguration;
import com.linkedin.metadata.config.kafka.ProducerConfiguration;
import com.typesafe.config.Config;
import config.ConfigurationProvider;
Expand Down Expand Up @@ -46,7 +47,7 @@ public KafkaTrackingProducer(

if (_isEnabled) {
_logger.debug("Analytics tracking is enabled");
_producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer());
_producer = createKafkaProducer(config, configurationProvider.getKafka());

lifecycle.addStopHook(
() -> {
Expand All @@ -69,7 +70,8 @@ public void send(ProducerRecord<String, String> record) {
}

private static KafkaProducer createKafkaProducer(
Config config, ProducerConfiguration producerConfiguration) {
Config config, KafkaConfiguration kafkaConfiguration) {
final ProducerConfiguration producerConfiguration = kafkaConfiguration.getProducer();
final Properties props = new Properties();
props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend");
props.put(
Expand All @@ -78,12 +80,9 @@ private static KafkaProducer createKafkaProducer(
props.put(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
config.getString("analytics.kafka.bootstrap.server"));
props.put(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer"); // Actor urn.
props.put(
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer"); // JSON object.
// key: Actor urn.
// value: JSON object.
props.putAll(kafkaConfiguration.getSerde().getUsageEvent().getProducerProperties(null));
props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize());
props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType());

Expand Down
2 changes: 1 addition & 1 deletion datahub-frontend/app/utils/SearchUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ private SearchUtil() {
@Nonnull
public static String escapeForwardSlash(@Nonnull String input) {
if (input.contains("/")) {
input = input.replace("/", "\\\\/");
input = input.replace("/", "\\/");
}
return input;
}
Expand Down
4 changes: 2 additions & 2 deletions datahub-frontend/test/utils/SearchUtilTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ public class SearchUtilTest {
@Test
public void testEscapeForwardSlash() {
// escape "/"
assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar"));
assertEquals("\\/foo\\/bar", SearchUtil.escapeForwardSlash("/foo/bar"));
// "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to
// retain the regex behaviour with "*"
assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*"));
assertEquals("\\/foo\\/bar\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*"));
assertEquals("", "");
assertEquals("foo", "foo");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -724,7 +724,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) {
* Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from a {@link
* LoadableType}.
*/
public Map<String, Function<QueryContext, DataLoader<?, ?>>> loaderSuppliers(
public static Map<String, Function<QueryContext, DataLoader<?, ?>>> loaderSuppliers(
final Collection<? extends LoadableType<?, ?>> loadableTypes) {
return loadableTypes.stream()
.collect(
Expand Down Expand Up @@ -1135,16 +1135,16 @@ private DataFetcher getEntityResolver() {
});
}

private DataFetcher getResolver(LoadableType<?, String> loadableType) {
return getResolver(loadableType, this::getUrnField);
private static DataFetcher getResolver(LoadableType<?, String> loadableType) {
return getResolver(loadableType, GmsGraphQLEngine::getUrnField);
}

private <T, K> DataFetcher getResolver(
private static <T, K> DataFetcher getResolver(
LoadableType<T, K> loadableType, Function<DataFetchingEnvironment, K> keyProvider) {
return new LoadableTypeResolver<>(loadableType, keyProvider);
}

private String getUrnField(DataFetchingEnvironment env) {
private static String getUrnField(DataFetchingEnvironment env) {
return env.getArgument(URN_FIELD_NAME);
}

Expand Down Expand Up @@ -3025,7 +3025,7 @@ private void configureTestResultResolvers(final RuntimeWiring.Builder builder) {
})));
}

private <T, K> DataLoader<K, DataFetcherResult<T>> createDataLoader(
private static <T, K> DataLoader<K, DataFetcherResult<T>> createDataLoader(
final LoadableType<T, K> graphType, final QueryContext queryContext) {
BatchLoaderContextProvider contextProvider = () -> queryContext;
DataLoaderOptions loaderOptions =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.dataloader.DataLoader;
import org.dataloader.DataLoaderRegistry;

/**
* Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and
Expand Down Expand Up @@ -100,7 +99,7 @@ public ExecutionResult execute(
/*
* Init DataLoaderRegistry - should be created for each request.
*/
DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context);
LazyDataLoaderRegistry register = new LazyDataLoaderRegistry(context, _dataLoaderSuppliers);

/*
* Construct execution input
Expand Down Expand Up @@ -218,14 +217,4 @@ public GraphQLEngine build() {
graphQLQueryIntrospectionEnabled);
}
}

private DataLoaderRegistry createDataLoaderRegistry(
final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers,
final QueryContext context) {
final DataLoaderRegistry registry = new DataLoaderRegistry();
for (String key : dataLoaderSuppliers.keySet()) {
registry.register(key, dataLoaderSuppliers.get(key).apply(context));
}
return registry;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package com.linkedin.datahub.graphql;

import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.extern.slf4j.Slf4j;
import org.dataloader.DataLoader;
import org.dataloader.DataLoaderRegistry;

/**
* The purpose of this class is to avoid loading 42+ dataLoaders when many of the graphql queries do
* not use all of them.
*/
@Slf4j
public class LazyDataLoaderRegistry extends DataLoaderRegistry {
private final QueryContext queryContext;
private final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers;

public LazyDataLoaderRegistry(
QueryContext queryContext,
Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers) {
super();
this.queryContext = queryContext;
this.dataLoaderSuppliers = new ConcurrentHashMap<>(dataLoaderSuppliers);
}

@Override
public <K, V> DataLoader<K, V> getDataLoader(String key) {
return super.computeIfAbsent(
key,
k -> {
Function<QueryContext, DataLoader<?, ?>> supplier = dataLoaderSuppliers.get(key);
if (supplier == null) {
throw new IllegalArgumentException("No DataLoader registered for key: " + key);
}
return supplier.apply(queryContext);
});
}

@Override
public Set<String> getKeys() {
return Stream.concat(dataLoaders.keySet().stream(), dataLoaderSuppliers.keySet().stream())
.collect(Collectors.toSet());
}

@Override
public DataLoaderRegistry combine(DataLoaderRegistry registry) {
throw new UnsupportedOperationException();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import com.datahub.authentication.Actor;
import com.datahub.authentication.Authentication;
import com.datahub.plugins.auth.authorization.Authorizer;
import com.linkedin.metadata.config.DataHubAppConfiguration;
import io.datahubproject.metadata.context.OperationContext;

/** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */
Expand Down Expand Up @@ -31,4 +32,6 @@ default String getActorUrn() {
* @return Returns the operational context
*/
OperationContext getOperationContext();

DataHubAppConfiguration getDataHubAppConfig();
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableSet;
import com.linkedin.common.urn.Urn;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.data.template.StringArray;
import com.linkedin.datahub.graphql.QueryContext;
Expand All @@ -23,7 +22,6 @@
import com.linkedin.metadata.query.filter.CriterionArray;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.search.utils.ESUtils;
import com.linkedin.metadata.search.utils.QueryUtils;
import com.linkedin.metadata.service.ViewService;
import com.linkedin.view.DataHubViewInfo;
import graphql.schema.DataFetchingEnvironment;
Expand Down Expand Up @@ -72,7 +70,7 @@ public static <T> T bindArgument(Object argument, Class<T> clazz) {
@Nonnull
public static String escapeForwardSlash(@Nonnull String input) {
if (input.contains("/")) {
input = input.replace("/", "\\\\/");
input = input.replace("/", "\\/");
}
return input;
}
Expand Down Expand Up @@ -222,27 +220,6 @@ private static String getFilterField(
return ESUtils.toKeywordField(originalField, skipKeywordSuffix, aspectRetriever);
}

public static Filter buildFilterWithUrns(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters) {
Criterion urnMatchCriterion =
new Criterion()
.setField("urn")
.setValue("")
.setValues(
new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList())));
if (inputFilters == null) {
return QueryUtils.newFilter(urnMatchCriterion);
}

// Add urn match criterion to each or clause
if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) {
for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) {
conjunctiveCriterion.getAnd().add(urnMatchCriterion);
}
return inputFilters;
}
return QueryUtils.newFilter(urnMatchCriterion);
}

public static Filter viewFilter(
OperationContext opContext, ViewService viewService, String viewUrn) {
if (viewUrn == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ private CustomAssertionInfo createCustomAssertionInfo(

if (input.getFieldPath() != null) {
customAssertionInfo.setField(
SchemaFieldUtils.generateSchemaFieldUrn(entityUrn.toString(), input.getFieldPath()));
SchemaFieldUtils.generateSchemaFieldUrn(entityUrn, input.getFieldPath()));
}
return customAssertionInfo;
}
Expand Down
Loading

0 comments on commit d43053a

Please sign in to comment.