diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 9f89e95e43a23..41f82c194345c 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -4,12 +4,14 @@ on: branches: - master paths-ignore: + - "docs-website/**" - "docs/**" - "**.md" pull_request: branches: - "**" paths-ignore: + - "docs-website/**" - "docs/**" - "**.md" release: diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 96229642244b6..c964352c3e129 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -40,9 +40,6 @@ jobs: python-version: "3.10" cache: "pip" - name: Gradle build (and test) - # there is some race condition in gradle build, which makes gradle never terminate in ~30% of the runs - # running build first without datahub-web-react:yarnBuild and then with it is 100% stable - # datahub-frontend:unzipAssets depends on datahub-web-react:yarnBuild but gradle does not know about it run: | ./gradlew :metadata-io:test - uses: actions/upload-artifact@v3 diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml index 82bb90f68f4c3..c0b2bf807b24b 100644 --- a/.github/workflows/pr-labeler.yml +++ b/.github/workflows/pr-labeler.yml @@ -14,3 +14,31 @@ jobs: with: repo-token: "${{ secrets.GITHUB_TOKEN }}" configuration-path: ".github/pr-labeler-config.yml" + - uses: actions-ecosystem/action-add-labels@v1.1.0 + # only add names of Acryl Data team members here + if: + ${{ + !contains( + fromJson('[ + "skrydal", + "siladitya2", + "sgomezvillamor", + "ngamanda", + "HarveyLeo", + "frsann", + "bossenti", + "nikolakasev", + "PatrickfBraz", + "cuong-pham", + "sudhakarast", + "tkdrahn", + "rtekal", + "sgm44" + ]'), + github.actor + ) + }} + with: + github_token: ${{ github.token }} + labels: | + datahub-community-champion diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 94692bd3c2336..ee7ead27f2965 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -51,10 +51,15 @@ jobs: -x :datahub-web-react:yarnLint \ -x :datahub-web-react:yarnGenerate \ -x :datahub-web-react:yarnInstall \ - -x :datahub-web-react:yarnQuickBuild \ - -x :datahub-web-react:copyAssets \ + -x :datahub-web-react:yarnBuild \ -x :datahub-web-react:distZip \ -x :datahub-web-react:jar + - name: Upload logs + uses: actions/upload-artifact@v3 + if: failure() + with: + name: docker logs + path: "docker/build/container-logs/*.log" - uses: actions/upload-artifact@v3 if: always() with: diff --git a/build.gradle b/build.gradle index bb01a15a7db8d..4680598165d28 100644 --- a/build.gradle +++ b/build.gradle @@ -325,6 +325,10 @@ subprojects { } plugins.withType(JavaPlugin).configureEach { + if (project.name == 'datahub-web-react') { + return + } + dependencies { implementation externalDependency.annotationApi constraints { diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index 437c72e6394ea..1174c5c5cfd5d 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -1,5 +1,4 @@ plugins { - id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" id 'scala' id 'com.palantir.docker' id 'org.gradle.playframework' @@ -39,23 +38,6 @@ artifacts { archives myTar } -graphqlCodegen { - // For options: https://github.com/kobylynskyi/graphql-java-codegen/blob/master/docs/codegen-options.md - graphqlSchemaPaths = ["$projectDir/conf/datahub-frontend.graphql".toString()] - outputDir = new File("$projectDir/app/graphql") - packageName = "generated" - generateApis = true - modelValidationAnnotation = "" - customTypesMapping = [ - Long: "Long", - ] -} - -tasks.withType(Checkstyle) { - exclude "**/generated/**" -} - - /* PLAY UPGRADE NOTE Generates the distribution jars under the expected names. The playFramework plugin only accepts certain name values diff --git a/datahub-frontend/conf/routes b/datahub-frontend/conf/routes index 3102c26497fed..6b53a2789e7cc 100644 --- a/datahub-frontend/conf/routes +++ b/datahub-frontend/conf/routes @@ -36,11 +36,14 @@ PUT /openapi/*path c HEAD /openapi/*path controllers.Application.proxy(path: String, request: Request) PATCH /openapi/*path controllers.Application.proxy(path: String, request: Request) -# Map static resources from the /public folder to the /assets URL path -GET /assets/*file controllers.Assets.at(path="/public", file) - # Analytics route POST /track controllers.TrackingController.track(request: Request) -# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle +# Known React asset routes +GET /assets/*file controllers.Assets.at(path="/public/assets", file) +GET /node_modules/*file controllers.Assets.at(path="/public/node_modules", file) +GET /manifest.json controllers.Assets.at(path="/public", file="manifest.json") +GET /robots.txt controllers.Assets.at(path="/public", file="robots.txt") + +# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle's index.html GET /*path controllers.Application.index(path) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java index ecf36769dfa9f..2519d91aa3a84 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java @@ -3,16 +3,20 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.BatchLoadUtils; +import graphql.execution.DataFetcherResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class BatchGetEntitiesResolver implements DataFetcher>> { private final List> _entityTypes; @@ -30,13 +34,21 @@ public CompletableFuture> get(DataFetchingEnvironment environment) final List entities = _entitiesProvider.apply(environment); Map> entityTypeToEntities = new HashMap<>(); - entities.forEach( - (entity) -> { - EntityType type = entity.getType(); - List entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); - entitiesList.add(entity); - entityTypeToEntities.put(type, entitiesList); - }); + Map> entityIndexMap = new HashMap<>(); + int index = 0; + for (Entity entity : entities) { + List indexList = new ArrayList<>(); + if (entityIndexMap.containsKey(entity.getUrn())) { + indexList = entityIndexMap.get(entity.getUrn()); + } + indexList.add(index); + entityIndexMap.put(entity.getUrn(), indexList); + index++; + EntityType type = entity.getType(); + List entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); + entitiesList.add(entity); + entityTypeToEntities.put(type, entitiesList); + } List>> entitiesFutures = new ArrayList<>(); @@ -49,9 +61,32 @@ public CompletableFuture> get(DataFetchingEnvironment environment) return CompletableFuture.allOf(entitiesFutures.toArray(new CompletableFuture[0])) .thenApply( - v -> - entitiesFutures.stream() - .flatMap(future -> future.join().stream()) - .collect(Collectors.toList())); + v -> { + Entity[] finalEntityList = new Entity[entities.size()]; + // Returned objects can be either of type Entity or wrapped as + // DataFetcherResult + // Therefore we need to be working with raw Objects in this area of the code + List returnedList = + entitiesFutures.stream() + .flatMap(future -> future.join().stream()) + .collect(Collectors.toList()); + for (Object element : returnedList) { + Entity entity = null; + if (element instanceof DataFetcherResult) { + entity = ((DataFetcherResult) element).getData(); + } else if (element instanceof Entity) { + entity = (Entity) element; + } else { + throw new RuntimeException( + String.format( + "Cannot process entity because it is neither an Entity not a DataFetcherResult. %s", + element)); + } + for (int idx : entityIndexMap.get(entity.getUrn())) { + finalEntityList[idx] = entity; + } + } + return Arrays.asList(finalEntityList); + }); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java new file mode 100644 index 0000000000000..6bd5b4f8c3f38 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java @@ -0,0 +1,117 @@ +package com.linkedin.datahub.graphql.resolvers.load; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.graphql.generated.Dashboard; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.types.dataset.DatasetType; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.entity.EntityService; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class BatchGetEntitiesResolverTest { + private EntityClient _entityClient; + private EntityService _entityService; + private DataFetchingEnvironment _dataFetchingEnvironment; + + @BeforeMethod + public void setupTest() { + _entityService = mock(EntityService.class); + _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + _entityClient = mock(EntityClient.class); + } + + List getRequestEntities(List urnList) { + + return urnList.stream() + .map( + urn -> { + if (urn.startsWith("urn:li:dataset")) { + Dataset entity = new Dataset(); + entity.setUrn(urn); + return entity; + } else if (urn.startsWith("urn:li:dashboard")) { + Dashboard entity = new Dashboard(); + entity.setUrn(urn); + return entity; + } else { + throw new RuntimeException("Can't handle urn " + urn); + } + }) + .collect(Collectors.toList()); + } + + @Test + /** Tests that if responses come back out of order, we stitch them back correctly */ + public void testReordering() throws Exception { + Function entityProvider = mock(Function.class); + List inputEntities = + getRequestEntities(ImmutableList.of("urn:li:dataset:1", "urn:li:dataset:2")); + when(entityProvider.apply(any())).thenReturn(inputEntities); + BatchGetEntitiesResolver resolver = + new BatchGetEntitiesResolver( + ImmutableList.of(new DatasetType(_entityClient)), entityProvider); + + DataLoaderRegistry mockDataLoaderRegistry = mock(DataLoaderRegistry.class); + when(_dataFetchingEnvironment.getDataLoaderRegistry()).thenReturn(mockDataLoaderRegistry); + DataLoader mockDataLoader = mock(DataLoader.class); + when(mockDataLoaderRegistry.getDataLoader(any())).thenReturn(mockDataLoader); + + Dataset mockResponseEntity1 = new Dataset(); + mockResponseEntity1.setUrn("urn:li:dataset:1"); + + Dataset mockResponseEntity2 = new Dataset(); + mockResponseEntity2.setUrn("urn:li:dataset:2"); + + CompletableFuture mockFuture = + CompletableFuture.completedFuture( + ImmutableList.of(mockResponseEntity2, mockResponseEntity1)); + when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); + when(_entityService.exists(any())).thenReturn(true); + List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); + assertEquals(batchGetResponse.size(), 2); + assertEquals(batchGetResponse.get(0), mockResponseEntity1); + assertEquals(batchGetResponse.get(1), mockResponseEntity2); + } + + @Test + /** Tests that if input list contains duplicates, we stitch them back correctly */ + public void testDuplicateUrns() throws Exception { + Function entityProvider = mock(Function.class); + List inputEntities = + getRequestEntities(ImmutableList.of("urn:li:dataset:foo", "urn:li:dataset:foo")); + when(entityProvider.apply(any())).thenReturn(inputEntities); + BatchGetEntitiesResolver resolver = + new BatchGetEntitiesResolver( + ImmutableList.of(new DatasetType(_entityClient)), entityProvider); + + DataLoaderRegistry mockDataLoaderRegistry = mock(DataLoaderRegistry.class); + when(_dataFetchingEnvironment.getDataLoaderRegistry()).thenReturn(mockDataLoaderRegistry); + DataLoader mockDataLoader = mock(DataLoader.class); + when(mockDataLoaderRegistry.getDataLoader(any())).thenReturn(mockDataLoader); + + Dataset mockResponseEntity = new Dataset(); + mockResponseEntity.setUrn("urn:li:dataset:foo"); + + CompletableFuture mockFuture = + CompletableFuture.completedFuture(ImmutableList.of(mockResponseEntity)); + when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); + when(_entityService.exists(any())).thenReturn(true); + List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); + assertEquals(batchGetResponse.size(), 2); + assertEquals(batchGetResponse.get(0), mockResponseEntity); + assertEquals(batchGetResponse.get(1), mockResponseEntity); + } +} diff --git a/datahub-web-react/.env b/datahub-web-react/.env index e5529bbdaa56d..7c02340752104 100644 --- a/datahub-web-react/.env +++ b/datahub-web-react/.env @@ -1,5 +1,3 @@ -PUBLIC_URL=/assets REACT_APP_THEME_CONFIG=theme_light.config.json SKIP_PREFLIGHT_CHECK=true -BUILD_PATH=build/yarn -REACT_APP_PROXY_TARGET=http://localhost:9002 \ No newline at end of file +REACT_APP_PROXY_TARGET=http://localhost:9002 diff --git a/datahub-web-react/.eslintrc.js b/datahub-web-react/.eslintrc.js index 2806942dd1053..e48dfdb23a4e7 100644 --- a/datahub-web-react/.eslintrc.js +++ b/datahub-web-react/.eslintrc.js @@ -5,7 +5,7 @@ module.exports = { 'airbnb-typescript', 'airbnb/hooks', 'plugin:@typescript-eslint/recommended', - 'plugin:jest/recommended', + 'plugin:vitest/recommended', 'prettier', ], plugins: ['@typescript-eslint'], @@ -46,6 +46,7 @@ module.exports = { argsIgnorePattern: '^_', }, ], + 'vitest/prefer-to-be': 'off', }, settings: { react: { diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index 72821d8b97dc0..c0355b935137a 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -19,7 +19,7 @@ node { version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.1' + yarnVersion = '1.22.21' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -44,10 +44,33 @@ node { */ task yarnInstall(type: YarnTask) { args = ['install'] + + // The node_modules directory can contain built artifacts, so + // it's not really safe to cache it. + outputs.cacheIf { false } + + inputs.files( + file('yarn.lock'), + file('package.json'), + ) + outputs.dir('node_modules') } task yarnGenerate(type: YarnTask, dependsOn: yarnInstall) { args = ['run', 'generate'] + + outputs.cacheIf { true } + + inputs.files( + yarnInstall.inputs.files, + file('codegen.yml'), + project.fileTree(dir: "../datahub-graphql-core/src/main/resources/", include: "*.graphql"), + project.fileTree(dir: "src", include: "**/*.graphql"), + ) + + outputs.files( + project.fileTree(dir: "src", include: "**/*.generated.ts"), + ) } task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -55,7 +78,8 @@ task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { } task yarnTest(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { - args = ['run', 'test', '--watchAll', 'false'] + // Explicitly runs in non-watch mode. + args = ['run', 'test', 'run'] } task yarnLint(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -68,13 +92,24 @@ task yarnLintFix(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { args = ['run', 'lint-fix'] } -task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnTest, yarnLint]) { - args = ['run', 'build'] -} - -task yarnQuickBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { +task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { environment = [NODE_OPTIONS: "--max-old-space-size=3072 --openssl-legacy-provider"] args = ['run', 'build'] + + outputs.cacheIf { true } + inputs.files( + file('index.html'), + project.fileTree(dir: "src"), + project.fileTree(dir: "public"), + + yarnInstall.inputs.files, + yarnGenerate.outputs.files, + + file('.env'), + file('vite.config.ts'), + file('tsconfig.json'), + ) + outputs.dir('dist') } task cleanExtraDirs { @@ -82,9 +117,8 @@ task cleanExtraDirs { delete 'dist' delete 'tmp' delete 'just' - delete 'src/types.generated.ts' delete fileTree('../datahub-frontend/public') - delete fileTree(dir: 'src/graphql', include: '*.generated.ts') + delete fileTree(dir: 'src', include: '*.generated.ts') } clean.finalizedBy(cleanExtraDirs) @@ -93,24 +127,16 @@ configurations { } distZip { - dependsOn yarnQuickBuild + dependsOn yarnBuild archiveFileName = "datahub-web-react-${archiveVersion}.${archiveExtension}" from 'dist' } -task copyAssets(dependsOn: distZip) { - doLast { - copy { - from zipTree(distZip.outputs.files.first()) - into "../datahub-frontend/public" - } - } -} - jar { - dependsOn distZip, copyAssets + dependsOn distZip into('public') { from zipTree(distZip.outputs.files.first()) } archiveClassifier = 'assets' } +build.dependsOn jar diff --git a/datahub-web-react/craco.config.js b/datahub-web-react/craco.config.js deleted file mode 100644 index 6ede45902128f..0000000000000 --- a/datahub-web-react/craco.config.js +++ /dev/null @@ -1,75 +0,0 @@ -/* eslint-disable @typescript-eslint/no-var-requires */ -require('dotenv').config(); -const { whenProd } = require('@craco/craco'); -const CracoAntDesignPlugin = require('craco-antd'); -const path = require('path'); -const CopyWebpackPlugin = require('copy-webpack-plugin'); - -// eslint-disable-next-line import/no-dynamic-require -const themeConfig = require(`./src/conf/theme/${process.env.REACT_APP_THEME_CONFIG}`); - -function addLessPrefixToKeys(styles) { - const output = {}; - Object.keys(styles).forEach((key) => { - output[`@${key}`] = styles[key]; - }); - return output; -} - -module.exports = { - webpack: { - configure: { - optimization: whenProd(() => ({ - splitChunks: { - cacheGroups: { - vendor: { - test: /[\\/]node_modules[\\/]/, - name: 'vendors', - chunks: 'all', - }, - }, - }, - })), - // Webpack 5 no longer automatically pollyfill core Node.js modules - resolve: { fallback: { fs: false } }, - // Ignore Webpack 5's missing source map warnings from node_modules - ignoreWarnings: [{ module: /node_modules/, message: /source-map-loader/ }], - }, - plugins: { - add: [ - // Self host images by copying them to the build directory - new CopyWebpackPlugin({ - patterns: [{ from: 'src/images', to: 'platforms' }], - }), - // Copy monaco-editor files to the build directory - new CopyWebpackPlugin({ - patterns: [ - { from: 'node_modules/monaco-editor/min/vs/', to: 'monaco-editor/vs' }, - { from: 'node_modules/monaco-editor/min-maps/vs/', to: 'monaco-editor/min-maps/vs' }, - ], - }), - ], - }, - }, - plugins: [ - { - plugin: CracoAntDesignPlugin, - options: { - customizeThemeLessPath: path.join(__dirname, 'src/conf/theme/global-variables.less'), - customizeTheme: addLessPrefixToKeys(themeConfig.styles), - }, - }, - ], - jest: { - configure: { - // Use dist files instead of source files - moduleNameMapper: { - '^d3-interpolate-path': `d3-interpolate-path/build/d3-interpolate-path`, - '^d3-(.*)$': `d3-$1/dist/d3-$1`, - '^lib0/((?!dist).*)$': 'lib0/dist/$1.cjs', - '^y-protocols/(.*)$': 'y-protocols/dist/$1.cjs', - '\\.(css|less)$': '/src/__mocks__/styleMock.js', - }, - }, - }, -}; diff --git a/datahub-web-react/datahub-frontend.graphql b/datahub-web-react/datahub-frontend.graphql deleted file mode 100644 index 6df3c387e14fe..0000000000000 --- a/datahub-web-react/datahub-frontend.graphql +++ /dev/null @@ -1,389 +0,0 @@ -scalar Long - -schema { - query: Query - mutation: Mutation -} - -type Query { - dataset(urn: String!): Dataset - user(urn: String!): CorpUser - search(input: SearchInput!): SearchResults - autoComplete(input: AutoCompleteInput!): AutoCompleteResults - browse(input: BrowseInput!): BrowseResults - browsePaths(input: BrowsePathsInput!): [[String!]!] -} - -type Mutation { - logIn(username: String!, password: String!): CorpUser - updateDataset(input: DatasetUpdateInput!): Dataset -} - -input DatasetUpdateInput { - urn: String! - ownership: OwnershipUpdate -} - -input OwnershipUpdate { - owners: [OwnerUpdate!] -} - -input OwnerUpdate { - # The owner URN, eg urn:li:corpuser:1 - owner: String! - - # The owner role type - type: OwnershipType! -} - -enum OwnershipSourceType { - AUDIT - DATABASE - FILE_SYSTEM - ISSUE_TRACKING_SYSTEM - MANUAL - SERVICE - SOURCE_CONTROL - OTHER -} - -type OwnershipSource { - """ - The type of the source - """ - type: OwnershipSourceType! - - """ - A reference URL for the source - """ - url: String -} - -enum OwnershipType { - """ - A person or group that is in charge of developing the code - """ - DEVELOPER - - """ - A person or group that is owning the data - """ - DATAOWNER - - """ - A person or a group that overseas the operation, e.g. a DBA or SRE. - """ - DELEGATE - - """ - A person, group, or service that produces/generates the data - """ - PRODUCER - - """ - A person, group, or service that consumes the data - """ - CONSUMER - - """ - A person or a group that has direct business interest - """ - STAKEHOLDER -} - -type Owner { - """ - Owner object - """ - owner: CorpUser! - - """ - The type of the ownership - """ - type: OwnershipType - - """ - Source information for the ownership - """ - source: OwnershipSource -} - -type Ownership { - owners: [Owner!] - - lastModified: Long! -} - -enum FabricType { - """ - Designates development fabrics - """ - DEV - - """ - Designates early-integration (staging) fabrics - """ - EI - - """ - Designates production fabrics - """ - PROD - - """ - Designates corporation fabrics - """ - CORP -} - -enum PlatformNativeType { - """ - Table - """ - TABLE - - """ - View - """ - VIEW - - """ - Directory in file system - """ - DIRECTORY - - """ - Stream - """ - STREAM - - """ - Bucket in key value store - """ - BUCKET -} - -type PropertyTuple { - key: String! - value: String -} - -type SubTypes { - typeNames: [String!] -} - -type Dataset { - urn: String! - - platform: String! - - name: String! - - origin: FabricType! - - description: String - - uri: String - - platformNativeType: PlatformNativeType - - tags: [String!]! - - properties: [PropertyTuple!] - - createdTime: Long! - - modifiedTime: Long! - - ownership: Ownership - - subTypes: SubTypes -} - -type CorpUserInfo { - active: Boolean! - - displayName: String - - email: String! - - title: String - - manager: CorpUser - - departmentId: Long - - departmentName: String - - firstName: String - - lastName: String - - fullName: String - - countryCode: String -} - -type CorpUserEditableInfo { - aboutMe: String - - teams: [String!] - - skills: [String!] - - pictureLink: String -} - -type CorpUser { - urn: String! - - username: String! - - info: CorpUserInfo - - editableInfo: CorpUserEditableInfo -} - -type CorpGroup implements Entity { - """ - The unique user URN - """ - urn: String! - - """ - GMS Entity Type - """ - type: EntityType! - - """ - group name e.g. wherehows-dev, ask_metadata - """ - name: String - - """ - Information of the corp group - """ - info: CorpGroupInfo -} - - -type CorpGroupInfo { - """ - email of this group - """ - email: String! - - """ - owners of this group - """ - admins: [String!]! - - """ - List of ldap urn in this group. - """ - members: [String!]! - - """ - List of groups in this group. - """ - groups: [String!]! -} - -enum EntityType { - DATASET - USER - DATA_FLOW - DATA_JOB - CORP_USER - CORP_GROUP -} - -# Search Input -input SearchInput { - type: EntityType! - query: String! - start: Int - count: Int - filters: [FacetFilterInput!] -} - -input FacetFilterInput { - field: String! # Facet Field Name - value: String! # Facet Value -} - -# Search Output -type SearchResults { - start: Int! - count: Int! - total: Int! - elements: [SearchResult!]! - facets: [FacetMetadata!] -} - -union SearchResult = Dataset | CorpUser - -type FacetMetadata { - field: String! - aggregations: [AggregationMetadata!]! -} - -type AggregationMetadata { - value: String! - count: Long! -} - -# Autocomplete Input -input AutoCompleteInput { - type: EntityType! - query: String! - field: String # Field name - limit: Int - filters: [FacetFilterInput!] -} - -# Autocomplete Output -type AutoCompleteResults { - query: String! - suggestions: [String!]! -} - -# Browse Inputs -input BrowseInput { - type: EntityType! - path: [String!] - start: Int - count: Int - filters: [FacetFilterInput!] -} - -# Browse Output -type BrowseResults { - entities: [BrowseResultEntity!]! - start: Int! - count: Int! - total: Int! - metadata: BrowseResultMetadata! -} - -type BrowseResultEntity { - name: String! - urn: String! -} - -type BrowseResultMetadata { - path: [String!] - groups: [BrowseResultGroup!]! - totalNumEntities: Long! -} - -type BrowseResultGroup { - name: String! - count: Long! -} - -# Browse Paths Input -input BrowsePathsInput { - type: EntityType! - urn: String! -} diff --git a/datahub-web-react/public/index.html b/datahub-web-react/index.html similarity index 66% rename from datahub-web-react/public/index.html rename to datahub-web-react/index.html index ead3a0aba82cb..9490881246e12 100644 --- a/datahub-web-react/public/index.html +++ b/datahub-web-react/index.html @@ -2,7 +2,7 @@ - + @@ -10,21 +10,13 @@ manifest.json provides metadata used when your web app is installed on a user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/ --> - - + DataHub
+