Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
treff7es authored Nov 27, 2023
2 parents de2e1c4 + ae5969f commit bd57de8
Show file tree
Hide file tree
Showing 16 changed files with 1,493 additions and 44 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => {
resources: [...urns.map((urn) => ({ resourceUrn: urn }))],
deprecated: true,
note: formData.note,
decommissionTime: formData.decommissionTime && formData.decommissionTime.unix(),
decommissionTime: formData.decommissionTime && formData.decommissionTime.unix() * 1000,
},
},
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,15 +83,24 @@ export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }:
* Deprecation Decommission Timestamp
*/
const localeTimezone = getLocaleTimezone();

let decommissionTimeSeconds;
if (deprecation.decommissionTime) {
if (deprecation.decommissionTime < 943920000000) {
// Time is set in way past if it was milli-second so considering this as set in seconds
decommissionTimeSeconds = deprecation.decommissionTime;
} else {
decommissionTimeSeconds = deprecation.decommissionTime / 1000;
}
}
const decommissionTimeLocal =
(deprecation.decommissionTime &&
(decommissionTimeSeconds &&
`Scheduled to be decommissioned on ${moment
.unix(deprecation.decommissionTime)
.unix(decommissionTimeSeconds)
.format('DD/MMM/YYYY')} (${localeTimezone})`) ||
undefined;
const decommissionTimeGMT =
deprecation.decommissionTime &&
moment.unix(deprecation.decommissionTime).utc().format('dddd, DD/MMM/YYYY HH:mm:ss z');
decommissionTimeSeconds && moment.unix(decommissionTimeSeconds).utc().format('dddd, DD/MMM/YYYY HH:mm:ss z');

const hasDetails = deprecation.note !== '' || deprecation.decommissionTime !== null;
const isDividerNeeded = deprecation.note !== '' && deprecation.decommissionTime !== null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ export const TRINO_HOST_PORT: RecipeField = {
name: 'host_port',
label: 'Host and Port',
tooltip:
"The host and port where Trino is running. For example, 'trino-server:5432'. Note: this host must be accessible on the network where DataHub is running (or allowed via an IP Allow List, AWS PrivateLink, etc).",
"The host (without protocol and ://) and port where Trino is running. For example, 'trino-server:5432'. Note: this host must be accessible on the network where DataHub is running (or allowed via an IP Allow List, AWS PrivateLink, etc).",
type: FieldType.TEXT,
fieldPath: 'source.config.host_port',
placeholder: 'trino-server:5432',
Expand Down
2 changes: 1 addition & 1 deletion datahub-web-react/src/app/settings/SettingsPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@ import ManagePosts from './posts/ManagePosts';
const PageContainer = styled.div`
display: flex;
overflow: auto;
flex: 1;
`;

const SettingsBarContainer = styled.div`
padding-top: 20px;
max-height: 100vh;
border-right: 1px solid ${ANTD_GRAY[5]};
display: flex;
flex-direction: column;
Expand Down
3 changes: 3 additions & 0 deletions metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,9 @@ def _gen_lineage_mcps(self) -> Iterable[MetadataChangeProposalWrapper]:
upstreams.append(edge.gen_upstream_aspect())
fine_upstreams.extend(edge.gen_fine_grained_lineage_aspects())

if not upstreams:
continue

upstream_lineage = UpstreamLineageClass(
upstreams=sorted(upstreams, key=lambda x: x.dataset),
fineGrainedLineages=sorted(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -895,6 +895,7 @@ def _infer_schemas_and_update_cll(self, all_nodes_map: Dict[str, DBTNode]) -> No
(upstream, node.dbt_name)
for node in all_nodes_map.values()
for upstream in node.upstream_nodes
if upstream in all_nodes_map
),
):
node = all_nodes_map[dbt_name]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1982,9 +1982,16 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901
self.reporter,
)

# some views can be mentioned by multiple 'include' statements and can be included via different connections.
# So this set is used to prevent creating duplicate events
# Some views can be mentioned by multiple 'include' statements and can be included via different connections.

# This map is used to keep track of which views files have already been processed
# for a connection in order to prevent creating duplicate events.
# Key: connection name, Value: view file paths
processed_view_map: Dict[str, Set[str]] = {}

# This map is used to keep track of the connection that a view is processed with.
# Key: view unique identifier - determined by variables present in config `view_naming_pattern`
# Value: Tuple(model file name, connection name)
view_connection_map: Dict[str, Tuple[str, str]] = {}

# The ** means "this directory and all subdirectories", and hence should
Expand Down Expand Up @@ -2148,13 +2155,17 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901
if self.source_config.view_pattern.allowed(
maybe_looker_view.id.view_name
):
view_urn = maybe_looker_view.id.get_urn(
self.source_config
)
view_connection_mapping = view_connection_map.get(
maybe_looker_view.id.view_name
view_urn
)
if not view_connection_mapping:
view_connection_map[
maybe_looker_view.id.view_name
] = (model_name, model.connection)
view_connection_map[view_urn] = (
model_name,
model.connection,
)
# first time we are discovering this view
logger.debug(
f"Generating MCP for view {raw_view['name']}"
Expand Down
Loading

0 comments on commit bd57de8

Please sign in to comment.