Skip to content

Commit

Permalink
Merge pull request #114 from MPEGGroup/pr_check_fixes
Browse files Browse the repository at this point in the history
PR Check Fixes
  • Loading branch information
DenizUgur authored Oct 19, 2023
2 parents f152df7 + 49e8000 commit 65ae0d7
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 10 deletions.
4 changes: 3 additions & 1 deletion conformance-search/src/types/json.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ export type Coverage = {
percentage: number;
boxes: string[];
missing_extensions: string[];
paths: string[];
paths: {
[path: string]: string[];
};
};
lists: {
boxes: {
Expand Down
7 changes: 6 additions & 1 deletion data/schemas/gpac-extension.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,18 @@
},
"@Specification": { "type": "string" },
"@Container": { "type": "string" },
"@data": {
"type": "string",
"description": "dump of the box in hexadecimal format",
"pattern": "^0x[0-9a-fA-F]+$"
},
"children": {
"type": "array",
"items": { "$ref": "#/$defs/box" }
}
},
"additionalProperties": true,
"required": ["@Type"]
"required": ["@Type", "@data"]
}
},
"type": "object",
Expand Down
16 changes: 14 additions & 2 deletions src/construct/coverage.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,17 +94,29 @@ def main():
"percentage": len(files["not_found"]) / len(files["path_file_map"]),
"boxes": list(set(p.split(".")[-1] for p in files["not_found"])),
"missing_extensions": list(missing_extensions),
"paths": files["not_found"],
"paths": list(files["not_found"].keys()),
}

# FIXME: All the logs here should be errors, except for info
for upath in NOT_FOUND["paths"]:
for upath, in_files in files["not_found"].items():
# Easy to access variables
container_path = upath.split(".")[:-1]
box_fourcc = upath.split(".")[-1]
known_box = box_fourcc in dictionary["fourccs"]

if not known_box:
# Check if this was in under consideration files
if any(["under_consideration" in f for f in in_files]):
extra = ""
if box_fourcc in get_mp4ra_boxes():
extra = " It exists in MP4RA though."

logger.error(
f"Box {box_fourcc} was found in under consideration files but it is not in our database."
+ extra
)
continue

if box_fourcc not in get_mp4ra_boxes():
logger.info(f"Box {box_fourcc} is not in standard features or MP4RA")
else:
Expand Down
8 changes: 5 additions & 3 deletions src/construct/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def main():
logger.warning(f"Found {len(ignored)} ignored files.")

file_metadata = {}
not_found = set()
not_found = {}

for file in files:
# metadata is the one without the _gpac.json
Expand Down Expand Up @@ -187,7 +187,9 @@ def main():

for path, variants in paths_contained.items():
if path not in path_file_map:
not_found.add(path)
if path not in not_found:
not_found[path] = set()
not_found[path].add(key_name)
continue

for variant in variants:
Expand Down Expand Up @@ -247,7 +249,7 @@ def main():
with open("output/files.json", "w", encoding="utf-8") as f:
json.dump(
{
"not_found": list(not_found),
"not_found": {path: sorted(files) for path, files in not_found.items()},
"path_file_map": path_file_map,
"feature_file_map": feature_file_map,
"file_metadata": file_metadata,
Expand Down
8 changes: 5 additions & 3 deletions src/tests/test_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,9 +313,11 @@ def test_gpac_ext_consistency(check):
with open(gpac_ext, "r", encoding="utf-8") as f:
gpac_ext_dict = json.load(f)

# Test if locations are the same
gt_locations = [ub["location"] for ub in unknown_boxes]
ref_locations = [ub["location"] for ub in gpac_ext_dict["extensions"]]
# Test if boxes are the same
gt_locations = [(ub["location"], ub["box"]["@data"]) for ub in unknown_boxes]
ref_locations = [
(ub["location"], ub["box"]["@data"]) for ub in gpac_ext_dict["extensions"]
]

# Reference must match exactly
with check:
Expand Down

0 comments on commit 65ae0d7

Please sign in to comment.