-
-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Added tools for faster extraction of the artifacts.
- Loading branch information
1 parent
035e7a9
commit 8cb0726
Showing
3 changed files
with
173 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -360,4 +360,6 @@ MigrationBackup/ | |
.ionide/ | ||
|
||
# Fody - auto-generated XML schema | ||
FodyWeavers.xsd | ||
FodyWeavers.xsd | ||
|
||
token.txt |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
import os | ||
import zipfile | ||
import shutil | ||
import re | ||
|
||
# Define the base folder names for each OS | ||
base_folders = { | ||
'osx': 'osx-{arch}', # macOS (now osx) | ||
'windows': 'win-{arch}', # Windows | ||
'ubuntu': 'linux-{arch}', # Ubuntu | ||
'android': 'android-{arch}', # Android folders | ||
} | ||
|
||
def extract_zip_files(): | ||
# Get the list of all zip files in the current directory | ||
zip_files = [f for f in os.listdir() if f.endswith('.zip')] | ||
|
||
# Extract each zip file into its respective folder | ||
for zip_file in zip_files: | ||
try: | ||
# Determine the OS and architecture from the filename | ||
match = re.match(r'(\w*)-(\w*)-latest-(\w*)-artifacts\.zip', zip_file) | ||
if match: | ||
lib_name, os_name, arch = match.groups() | ||
|
||
# Map OS names and architecture formats | ||
if os_name == 'macos': | ||
os_name = 'osx' | ||
if arch == 'x86_64': | ||
arch = 'x64' | ||
|
||
# Create target folder using actual architecture | ||
target_folder = base_folders[os_name].format(arch=arch) | ||
os.makedirs(target_folder, exist_ok=True) | ||
|
||
# Extract the zip file | ||
with zipfile.ZipFile(zip_file, 'r') as zip_ref: | ||
zip_ref.extractall(target_folder) | ||
print(f"Extracted {zip_file} into {target_folder}/") | ||
|
||
# Handle special cases for Android binaries | ||
if os_name == 'android': | ||
# Move contents from arm64-v8a and x86_64 to their respective folders | ||
for arch_folder in ['arm64-v8a', 'x86_64']: | ||
arch_path = os.path.join(target_folder, arch_folder) | ||
if os.path.exists(arch_path): | ||
new_arch_folder = base_folders['android'].format(arch=arch_folder.replace('arm64-v8a', 'arm64').replace('x86_64', 'x64')) | ||
os.makedirs(new_arch_folder, exist_ok=True) | ||
# Move all files from the arch_path to the new_arch_folder | ||
for item in os.listdir(arch_path): | ||
source_file = os.path.join(arch_path, item) | ||
destination_file = os.path.join(new_arch_folder, item) | ||
shutil.move(source_file, destination_file) | ||
print(f"Moved {item} from {arch_path}/ to {new_arch_folder}/") | ||
# Optionally delete the now-empty directory | ||
shutil.rmtree(arch_path) | ||
print(f"Deleted the directory at {arch_path}/") | ||
|
||
# Clean up the android-all orphan folder if it exists | ||
if os.path.exists(target_folder): | ||
shutil.rmtree(target_folder) | ||
print(f"Deleted the orphan directory at {target_folder}/") | ||
|
||
else: | ||
print(f"Unknown file format for {zip_file}. Skipping...") | ||
|
||
except Exception as e: | ||
print(f"Failed to extract {zip_file}: {e}") | ||
|
||
if __name__ == "__main__": | ||
extract_zip_files() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
import os | ||
import requests | ||
|
||
# Configuration | ||
TOKEN_FILE = 'token.txt' # File containing the personal access token | ||
REPO_OWNER = 'HexaEngine' | ||
REPO_NAME = 'Hexa.NET.ImGui' | ||
BASE_URL = f'https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}' | ||
|
||
# Set up headers for API requests | ||
headers = { | ||
'Accept': 'application/vnd.github.v3+json', | ||
} | ||
|
||
# Define the workflows you want to target | ||
TARGET_WORKFLOWS = [ | ||
'Build all Libraries (Android)', | ||
'Build cimgui Libraries', | ||
'Build cimguizmo Libraries', | ||
'Build cimnodes Libraries', | ||
'Build cimplot Libraries', | ||
] | ||
|
||
def read_token_from_file(token_file): | ||
"""Read the personal access token from a file.""" | ||
with open(token_file, 'r') as file: | ||
return file.read().strip() | ||
|
||
def get_workflow_id(workflow_name): | ||
"""Get the workflow ID based on the workflow name.""" | ||
url = f'{BASE_URL}/actions/workflows' | ||
response = requests.get(url, headers=headers) | ||
response.raise_for_status() # Raise an error for bad responses | ||
workflows = response.json().get('workflows', []) | ||
|
||
# Find the workflow ID based on the name | ||
for workflow in workflows: | ||
if workflow['name'] == workflow_name: | ||
return workflow['id'] | ||
return None | ||
|
||
def get_latest_workflow_run(workflow_id): | ||
"""Get the latest workflow run for a specified workflow ID.""" | ||
url = f'{BASE_URL}/actions/workflows/{workflow_id}/runs?per_page=1' | ||
response = requests.get(url, headers=headers) | ||
response.raise_for_status() # Raise an error for bad responses | ||
runs = response.json().get('workflow_runs', []) | ||
return runs[0] if runs else None # Return the most recent run or None if no runs exist | ||
|
||
def get_artifacts_for_run(run_id): | ||
"""Get artifacts for a specified run ID.""" | ||
url = f'{BASE_URL}/actions/runs/{run_id}/artifacts' | ||
response = requests.get(url, headers=headers) | ||
response.raise_for_status() | ||
return response.json().get('artifacts', []) | ||
|
||
def download_artifact(artifact_url, artifact_name): | ||
"""Download the artifact from the given URL.""" | ||
response = requests.get(artifact_url, headers=headers) | ||
response.raise_for_status() | ||
with open(artifact_name + ".zip", 'wb') as f: | ||
f.write(response.content) | ||
|
||
def main(): | ||
# Read the GitHub token from the file | ||
GITHUB_TOKEN = read_token_from_file(TOKEN_FILE) | ||
|
||
# Update headers with the token | ||
headers['Authorization'] = f'token {GITHUB_TOKEN}' | ||
|
||
for workflow_name in TARGET_WORKFLOWS: | ||
print(f'Checking workflow: {workflow_name}') | ||
|
||
# Get the workflow ID for the specified workflow name | ||
workflow_id = get_workflow_id(workflow_name) | ||
if not workflow_id: | ||
print(f' Workflow not found: {workflow_name}') | ||
continue | ||
|
||
# Get the latest run for the workflow | ||
latest_run = get_latest_workflow_run(workflow_id) | ||
if not latest_run: | ||
print(f' No runs found for workflow: {workflow_name}') | ||
continue | ||
|
||
run_id = latest_run['id'] | ||
print(f' Latest run ID: {run_id}') | ||
|
||
# Get artifacts for the latest run | ||
artifacts = get_artifacts_for_run(run_id) | ||
for artifact in artifacts: | ||
# Use the original artifact name | ||
artifact_name = artifact['name'] # Keep the original name | ||
download_url = artifact['archive_download_url'] | ||
print(f' Downloading artifact: {artifact_name}') | ||
download_artifact(download_url, artifact_name) | ||
|
||
if __name__ == '__main__': | ||
main() |