formatting all python files

This commit is contained in:
Nikhil Sonti
2025-09-18 11:02:08 -07:00
parent 213539efc5
commit c55aff20f5
26 changed files with 1109 additions and 779 deletions

View File

@@ -24,12 +24,13 @@ from utils import log_info, log_error, log_success, log_warning, join_paths
@dataclass
class DevCliConfig:
"""Configuration for Dev CLI from various sources"""
chromium_src: Optional[Path] = None
auto_commit: bool = False
interactive: bool = True
@classmethod
def load(cls, cli_chromium_src: Optional[Path] = None) -> 'DevCliConfig':
def load(cls, cli_chromium_src: Optional[Path] = None) -> "DevCliConfig":
"""Load configuration from various sources with precedence:
1. CLI arguments (highest priority)
2. Environment variables
@@ -39,27 +40,35 @@ class DevCliConfig:
config = cls()
# Load from config file if exists
config_file = Path.cwd() / '.dev-cli.yaml'
config_file = Path.cwd() / ".dev-cli.yaml"
if config_file.exists():
try:
with open(config_file, 'r') as f:
with open(config_file, "r") as f:
file_config = yaml.safe_load(f)
if file_config and 'defaults' in file_config:
defaults = file_config['defaults']
if 'chromium_src' in defaults:
config.chromium_src = Path(defaults['chromium_src'])
config.auto_commit = defaults.get('auto_commit', False)
config.interactive = defaults.get('interactive', True)
if file_config and "defaults" in file_config:
defaults = file_config["defaults"]
if "chromium_src" in defaults:
config.chromium_src = Path(defaults["chromium_src"])
config.auto_commit = defaults.get("auto_commit", False)
config.interactive = defaults.get("interactive", True)
except Exception as e:
log_warning(f"Failed to load config file: {e}")
# Override with environment variables
if 'DEV_CLI_CHROMIUM_SRC' in os.environ:
config.chromium_src = Path(os.environ['DEV_CLI_CHROMIUM_SRC'])
if 'DEV_CLI_AUTO_COMMIT' in os.environ:
config.auto_commit = os.environ['DEV_CLI_AUTO_COMMIT'].lower() in ('true', '1', 'yes')
if 'DEV_CLI_INTERACTIVE' in os.environ:
config.interactive = os.environ['DEV_CLI_INTERACTIVE'].lower() in ('true', '1', 'yes')
if "DEV_CLI_CHROMIUM_SRC" in os.environ:
config.chromium_src = Path(os.environ["DEV_CLI_CHROMIUM_SRC"])
if "DEV_CLI_AUTO_COMMIT" in os.environ:
config.auto_commit = os.environ["DEV_CLI_AUTO_COMMIT"].lower() in (
"true",
"1",
"yes",
)
if "DEV_CLI_INTERACTIVE" in os.environ:
config.interactive = os.environ["DEV_CLI_INTERACTIVE"].lower() in (
"true",
"1",
"yes",
)
# Override with CLI arguments (highest priority)
if cli_chromium_src:
@@ -83,7 +92,7 @@ def create_build_context(chromium_src: Optional[Path] = None) -> Optional[BuildC
# For dev CLI, we just need it to be a git repository
# Don't enforce strict Chromium structure
if not (config.chromium_src / '.git').exists():
if not (config.chromium_src / ".git").exists():
log_warning(f"Warning: Not a git repository: {config.chromium_src}")
# Continue anyway - patches might still work
@@ -92,7 +101,7 @@ def create_build_context(chromium_src: Optional[Path] = None) -> Optional[BuildC
root_dir=Path.cwd(),
chromium_src=config.chromium_src,
architecture="", # Not needed for patch operations
build_type="debug" # Not needed for patch operations
build_type="debug", # Not needed for patch operations
)
# Store config in context for access by commands
@@ -105,10 +114,14 @@ def create_build_context(chromium_src: Optional[Path] = None) -> Optional[BuildC
@click.group()
@click.option('--chromium-src', '-S', type=click.Path(exists=True, path_type=Path),
help='Path to Chromium source directory')
@click.option('--verbose', '-v', is_flag=True, help='Enable verbose output')
@click.option('--quiet', '-q', is_flag=True, help='Suppress non-essential output')
@click.option(
"--chromium-src",
"-S",
type=click.Path(exists=True, path_type=Path),
help="Path to Chromium source directory",
)
@click.option("--verbose", "-v", is_flag=True, help="Enable verbose output")
@click.option("--quiet", "-q", is_flag=True, help="Suppress non-essential output")
@click.pass_context
def cli(ctx, chromium_src, verbose, quiet):
"""Dev CLI - Chromium patch management tool
@@ -133,9 +146,9 @@ def cli(ctx, chromium_src, verbose, quiet):
"""
# Store options in context for subcommands
ctx.ensure_object(dict)
ctx.obj['chromium_src'] = chromium_src
ctx.obj['verbose'] = verbose
ctx.obj['quiet'] = quiet
ctx.obj["chromium_src"] = chromium_src
ctx.obj["verbose"] = verbose
ctx.obj["quiet"] = quiet
# Import and register subcommand groups
@@ -158,24 +171,24 @@ except ImportError as e:
log_info("Dev CLI Status")
log_info("-" * 40)
build_ctx = create_build_context(ctx.obj.get('chromium_src'))
build_ctx = create_build_context(ctx.obj.get("chromium_src"))
if build_ctx:
log_success(f"Chromium source: {build_ctx.chromium_src}")
# Check for patches directory
patches_dir = build_ctx.root_dir / 'chromium_src'
patches_dir = build_ctx.root_dir / "chromium_src"
if patches_dir.exists():
patch_count = len(list(patches_dir.rglob('*.patch')))
patch_count = len(list(patches_dir.rglob("*.patch")))
log_info(f"Individual patches: {patch_count}")
else:
log_warning("No patches directory found")
# Check for features.yaml
features_file = build_ctx.root_dir / 'features.yaml'
features_file = build_ctx.root_dir / "features.yaml"
if features_file.exists():
with open(features_file) as f:
features = yaml.safe_load(f)
feature_count = len(features.get('features', {}))
feature_count = len(features.get("features", {}))
log_info(f"Features defined: {feature_count}")
else:
log_warning("No features.yaml found")
@@ -191,13 +204,14 @@ def main():
log_warning("\nInterrupted by user")
sys.exit(1)
except Exception as e:
if '--verbose' in sys.argv or '-v' in sys.argv:
if "--verbose" in sys.argv or "-v" in sys.argv:
import traceback
traceback.print_exc()
else:
log_error(f"Error: {e}")
sys.exit(1)
if __name__ == '__main__':
main()
if __name__ == "__main__":
main()

View File

@@ -1,2 +1,2 @@
# Build system modules
from .string_replaces import apply_string_replacements
from .string_replaces import apply_string_replacements

View File

@@ -13,18 +13,18 @@ from utils import run_command, log_info, log_success, safe_rmtree
def clean(ctx: BuildContext) -> bool:
"""Clean build artifacts"""
log_info("🧹 Cleaning build artifacts...")
out_path = ctx.chromium_src / ctx.out_dir
if out_path.exists():
safe_rmtree(out_path)
log_success("Cleaned build directory")
log_info("\n🔀 Resetting git branch and removing all tracked files...")
git_reset(ctx)
log_info("\n🧹 Cleaning Sparkle build artifacts...")
clean_sparkle(ctx)
return True
@@ -43,18 +43,24 @@ def git_reset(ctx: BuildContext) -> bool:
os.chdir(ctx.chromium_src)
run_command(["git", "reset", "--hard", "HEAD"])
os.chdir(ctx.root_dir)
log_info("\n🧹 Running git clean with exclusions for important directories...")
os.chdir(ctx.chromium_src)
run_command([
"git", "clean", "-fdx", "chrome/", "components/",
"--exclude=third_party/",
"--exclude=build_tools/",
"--exclude=uc_staging/",
"--exclude=buildtools/",
"--exclude=tools/",
"--exclude=build/"
])
run_command(
[
"git",
"clean",
"-fdx",
"chrome/",
"components/",
"--exclude=third_party/",
"--exclude=build_tools/",
"--exclude=uc_staging/",
"--exclude=buildtools/",
"--exclude=tools/",
"--exclude=build/",
]
)
os.chdir(ctx.root_dir)
log_success("Git reset and clean complete")
return True

View File

@@ -9,49 +9,59 @@ import shutil
import multiprocessing
from pathlib import Path
from context import BuildContext
from utils import run_command, log_info, log_success, log_warning, join_paths, IS_WINDOWS, IS_MACOS
from utils import (
run_command,
log_info,
log_success,
log_warning,
join_paths,
IS_WINDOWS,
IS_MACOS,
)
def build(ctx: BuildContext) -> bool:
"""Run the actual build"""
log_info("\n🔨 Building Nxtscape (this will take a while)...")
# Create VERSION file with nxtscape_chromium_version
if ctx.nxtscape_chromium_version:
# Parse the nxtscape_chromium_version back into components
parts = ctx.nxtscape_chromium_version.split('.')
parts = ctx.nxtscape_chromium_version.split(".")
if len(parts) == 4:
version_content = f"MAJOR={parts[0]}\nMINOR={parts[1]}\nBUILD={parts[2]}\nPATCH={parts[3]}"
# Create temporary VERSION file
with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file:
with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_file:
temp_file.write(version_content)
temp_path = temp_file.name
# Copy VERSION file to chrome/VERSION
chrome_version_path = join_paths(ctx.chromium_src, "chrome", "VERSION")
shutil.copy2(temp_path, chrome_version_path)
# Clean up temp file
os.unlink(temp_path)
log_info(f"Created VERSION file with nxtscape_chromium_version: {ctx.nxtscape_chromium_version}")
log_info(
f"Created VERSION file with nxtscape_chromium_version: {ctx.nxtscape_chromium_version}"
)
else:
log_warning("No nxtscape_chromium_version set. Not building")
os.chdir(ctx.chromium_src)
# Use default autoninja parallelism (it handles this automatically)
autoninja_cmd = "autoninja.bat" if IS_WINDOWS else "autoninja"
log_info("Using default autoninja parallelism")
run_command([autoninja_cmd, "-C", ctx.out_dir, "chrome", "chromedriver"])
# Rename Chromium.app to Nxtscape.app
app_path = ctx.get_chromium_app_path()
new_path = ctx.get_app_path()
if app_path.exists() and not new_path.exists():
shutil.move(str(app_path), str(new_path))
log_success("Build complete!")
return True

View File

@@ -14,32 +14,32 @@ from utils import run_command, log_info, log_error, log_success, join_paths, IS_
def configure(ctx: BuildContext, gn_flags_file: Optional[Path] = None) -> bool:
"""Configure the build with GN"""
log_info(f"\n⚙️ Configuring {ctx.build_type} build for {ctx.architecture}...")
# Create output directory
out_path = join_paths(ctx.chromium_src, ctx.out_dir)
out_path.mkdir(parents=True, exist_ok=True)
# Copy build flags
if gn_flags_file is None:
flags_file = ctx.get_gn_flags_file()
else:
flags_file = join_paths(ctx.root_dir, gn_flags_file)
if not flags_file.exists():
log_error(f"GN flags file not found: {flags_file}")
raise FileNotFoundError(f"GN flags file not found: {flags_file}")
args_file = ctx.get_gn_args_file()
args_content = flags_file.read_text()
args_content += f'\ntarget_cpu = "{ctx.architecture}"\n'
args_file.write_text(args_content)
# Run gn gen
os.chdir(ctx.chromium_src)
gn_cmd = "gn.bat" if IS_WINDOWS else "gn"
run_command([gn_cmd, "gen", ctx.out_dir, "--fail-on-unused-args"])
log_success("Build configured")
return True
return True

View File

@@ -3,4 +3,4 @@ Dev CLI modules for Chromium patch management
"""
# This will be populated as modules are created
__all__ = ['extract', 'apply', 'feature', 'utils']
__all__ = ["extract", "apply", "feature", "utils"]

View File

@@ -11,27 +11,35 @@ from pathlib import Path
from typing import Optional, List, Dict
from context import BuildContext
from modules.dev_cli.utils import (
FilePatch, FileOperation, GitError,
run_git_command, validate_git_repository, validate_commit_exists,
parse_diff_output, write_patch_file, create_deletion_marker,
create_binary_marker, log_extraction_summary, get_commit_info,
get_commit_changed_files
FilePatch,
FileOperation,
GitError,
run_git_command,
validate_git_repository,
validate_commit_exists,
parse_diff_output,
write_patch_file,
create_deletion_marker,
create_binary_marker,
log_extraction_summary,
get_commit_info,
get_commit_changed_files,
)
from utils import log_info, log_error, log_success, log_warning
@click.group(name='extract')
@click.group(name="extract")
def extract_group():
"""Extract patches from git commits"""
pass
@extract_group.command(name='commit')
@click.argument('commit')
@click.option('--verbose', '-v', is_flag=True, help='Show detailed output')
@click.option('--force', '-f', is_flag=True, help='Overwrite existing patches')
@click.option('--include-binary', is_flag=True, help='Include binary files')
@click.option('--base', help='Extract full diff from base commit for files in COMMIT')
@extract_group.command(name="commit")
@click.argument("commit")
@click.option("--verbose", "-v", is_flag=True, help="Show detailed output")
@click.option("--force", "-f", is_flag=True, help="Overwrite existing patches")
@click.option("--include-binary", is_flag=True, help="Include binary files")
@click.option("--base", help="Extract full diff from base commit for files in COMMIT")
@click.pass_context
def extract_commit(ctx, commit, verbose, force, include_binary, base):
"""Extract patches from a single commit
@@ -47,10 +55,11 @@ def extract_commit(ctx, commit, verbose, force, include_binary, base):
the full diff from base..COMMIT for those files.
"""
# Get chromium source from parent context
chromium_src = ctx.parent.obj.get('chromium_src')
chromium_src = ctx.parent.obj.get("chromium_src")
# Create build context
from dev import create_build_context
build_ctx = create_build_context(chromium_src)
if not build_ctx:
@@ -87,20 +96,26 @@ def extract_commit(ctx, commit, verbose, force, include_binary, base):
log_error(f"Unexpected error: {e}")
if verbose:
import traceback
traceback.print_exc()
ctx.exit(1)
@extract_group.command(name='range')
@click.argument('base_commit')
@click.argument('head_commit')
@click.option('--verbose', '-v', is_flag=True, help='Show detailed output')
@click.option('--force', '-f', is_flag=True, help='Overwrite existing patches')
@click.option('--include-binary', is_flag=True, help='Include binary files')
@click.option('--squash', is_flag=True, help='Squash all commits into single patches')
@click.option('--base', help='Use different base for diff (gets full diff from base for files in range)')
@extract_group.command(name="range")
@click.argument("base_commit")
@click.argument("head_commit")
@click.option("--verbose", "-v", is_flag=True, help="Show detailed output")
@click.option("--force", "-f", is_flag=True, help="Overwrite existing patches")
@click.option("--include-binary", is_flag=True, help="Include binary files")
@click.option("--squash", is_flag=True, help="Squash all commits into single patches")
@click.option(
"--base",
help="Use different base for diff (gets full diff from base for files in range)",
)
@click.pass_context
def extract_range(ctx, base_commit, head_commit, verbose, force, include_binary, squash, base):
def extract_range(
ctx, base_commit, head_commit, verbose, force, include_binary, squash, base
):
"""Extract patches from a range of commits
\b
@@ -111,10 +126,11 @@ def extract_range(ctx, base_commit, head_commit, verbose, force, include_binary,
dev extract range HEAD~5 HEAD --base upstream/main
"""
# Get chromium source from parent context
chromium_src = ctx.parent.obj.get('chromium_src')
chromium_src = ctx.parent.obj.get("chromium_src")
# Create build context
from dev import create_build_context
build_ctx = create_build_context(chromium_src)
if not build_ctx:
@@ -126,7 +142,9 @@ def extract_range(ctx, base_commit, head_commit, verbose, force, include_binary,
ctx.exit(1)
if base:
log_info(f"Extracting patches from range: {base_commit}..{head_commit} (with base: {base})")
log_info(
f"Extracting patches from range: {base_commit}..{head_commit} (with base: {base})"
)
else:
log_info(f"Extracting patches from range: {base_commit}..{head_commit}")
@@ -134,12 +152,24 @@ def extract_range(ctx, base_commit, head_commit, verbose, force, include_binary,
if squash:
# Extract as single cumulative diff
extracted = extract_commit_range(
build_ctx, base_commit, head_commit, verbose, force, include_binary, base
build_ctx,
base_commit,
head_commit,
verbose,
force,
include_binary,
base,
)
else:
# Extract each commit separately
extracted = extract_commits_individually(
build_ctx, base_commit, head_commit, verbose, force, include_binary, base
build_ctx,
base_commit,
head_commit,
verbose,
force,
include_binary,
base,
)
if extracted > 0:
@@ -154,14 +184,19 @@ def extract_range(ctx, base_commit, head_commit, verbose, force, include_binary,
log_error(f"Unexpected error: {e}")
if verbose:
import traceback
traceback.print_exc()
ctx.exit(1)
def extract_single_commit(ctx: BuildContext, commit_hash: str,
verbose: bool = False, force: bool = False,
include_binary: bool = False,
base: Optional[str] = None) -> int:
def extract_single_commit(
ctx: BuildContext,
commit_hash: str,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
base: Optional[str] = None,
) -> int:
"""Extract patches from a single commit
Args:
@@ -182,7 +217,9 @@ def extract_single_commit(ctx: BuildContext, commit_hash: str,
# Get commit info for logging
commit_info = get_commit_info(commit_hash, ctx.chromium_src)
if commit_info and verbose:
log_info(f" Author: {commit_info['author_name']} <{commit_info['author_email']}>")
log_info(
f" Author: {commit_info['author_name']} <{commit_info['author_email']}>"
)
log_info(f" Subject: {commit_info['subject']}")
if base:
@@ -193,14 +230,19 @@ def extract_single_commit(ctx: BuildContext, commit_hash: str,
return extract_normal(ctx, commit_hash, verbose, force, include_binary)
def extract_normal(ctx: BuildContext, commit_hash: str,
verbose: bool, force: bool, include_binary: bool) -> int:
def extract_normal(
ctx: BuildContext,
commit_hash: str,
verbose: bool,
force: bool,
include_binary: bool,
) -> int:
"""Extract patches normally (diff against parent)"""
# Get diff against parent
diff_cmd = ['git', 'diff', f'{commit_hash}^..{commit_hash}']
diff_cmd = ["git", "diff", f"{commit_hash}^..{commit_hash}"]
if include_binary:
diff_cmd.append('--binary')
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src)
@@ -222,8 +264,14 @@ def extract_normal(ctx: BuildContext, commit_hash: str,
return write_patches(ctx, file_patches, verbose, include_binary)
def extract_with_base(ctx: BuildContext, commit_hash: str, base: str,
verbose: bool, force: bool, include_binary: bool) -> int:
def extract_with_base(
ctx: BuildContext,
commit_hash: str,
base: str,
verbose: bool,
force: bool,
include_binary: bool,
) -> int:
"""Extract patches with custom base (full diff from base for files in commit)"""
# Step 1: Get list of files changed in the commit
@@ -244,9 +292,9 @@ def extract_with_base(ctx: BuildContext, commit_hash: str, base: str,
log_info(f" Getting diff for: {file_path}")
# Get diff for this specific file from base to commit
diff_cmd = ['git', 'diff', f'{base}..{commit_hash}', '--', file_path]
diff_cmd = ["git", "diff", f"{base}..{commit_hash}", "--", file_path]
if include_binary:
diff_cmd.append('--binary')
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src)
@@ -263,21 +311,27 @@ def extract_with_base(ctx: BuildContext, commit_hash: str, base: str,
else:
# File might have been added/deleted
# Check if file exists in base and commit
base_exists = run_git_command(
['git', 'cat-file', '-e', f'{base}:{file_path}'],
cwd=ctx.chromium_src
).returncode == 0
base_exists = (
run_git_command(
["git", "cat-file", "-e", f"{base}:{file_path}"],
cwd=ctx.chromium_src,
).returncode
== 0
)
commit_exists = run_git_command(
['git', 'cat-file', '-e', f'{commit_hash}:{file_path}'],
cwd=ctx.chromium_src
).returncode == 0
commit_exists = (
run_git_command(
["git", "cat-file", "-e", f"{commit_hash}:{file_path}"],
cwd=ctx.chromium_src,
).returncode
== 0
)
if not base_exists and commit_exists:
# File was added - get full content
diff_cmd = ['git', 'diff', f'{base}..{commit_hash}', '--', file_path]
diff_cmd = ["git", "diff", f"{base}..{commit_hash}", "--", file_path]
if include_binary:
diff_cmd.append('--binary')
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src)
if result.stdout.strip():
patches = parse_diff_output(result.stdout)
@@ -289,7 +343,7 @@ def extract_with_base(ctx: BuildContext, commit_hash: str, base: str,
file_path=file_path,
operation=FileOperation.DELETE,
patch_content=None,
is_binary=False
is_binary=False,
)
if not file_patches:
@@ -328,8 +382,12 @@ def check_overwrite(ctx: BuildContext, file_patches: Dict, verbose: bool) -> boo
return True
def write_patches(ctx: BuildContext, file_patches: Dict[str, FilePatch],
verbose: bool, include_binary: bool) -> int:
def write_patches(
ctx: BuildContext,
file_patches: Dict[str, FilePatch],
verbose: bool,
include_binary: bool,
) -> int:
"""Write patches to disk"""
success_count = 0
fail_count = 0
@@ -370,7 +428,7 @@ def write_patches(ctx: BuildContext, file_patches: Dict[str, FilePatch],
else:
# Pure rename - create marker
marker_path = ctx.get_dev_patches_dir() / file_path
marker_path = marker_path.with_suffix(marker_path.suffix + '.rename')
marker_path = marker_path.with_suffix(marker_path.suffix + ".rename")
marker_path.parent.mkdir(parents=True, exist_ok=True)
try:
marker_content = f"Renamed from: {patch.old_path}\nSimilarity: {patch.similarity}%\n"
@@ -403,10 +461,15 @@ def write_patches(ctx: BuildContext, file_patches: Dict[str, FilePatch],
return success_count
def extract_commit_range(ctx: BuildContext, base_commit: str,
head_commit: str, verbose: bool = False,
force: bool = False, include_binary: bool = False,
custom_base: Optional[str] = None) -> int:
def extract_commit_range(
ctx: BuildContext,
base_commit: str,
head_commit: str,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
custom_base: Optional[str] = None,
) -> int:
"""Extract patches from a commit range as a single cumulative diff
Returns:
@@ -422,8 +485,8 @@ def extract_commit_range(ctx: BuildContext, base_commit: str,
# Count commits in range for progress
result = run_git_command(
['git', 'rev-list', '--count', f'{base_commit}..{head_commit}'],
cwd=ctx.chromium_src
["git", "rev-list", "--count", f"{base_commit}..{head_commit}"],
cwd=ctx.chromium_src,
)
commit_count = int(result.stdout.strip()) if result.returncode == 0 else 0
@@ -436,13 +499,20 @@ def extract_commit_range(ctx: BuildContext, base_commit: str,
# Step 2: Get diff based on whether we have a custom base
if custom_base:
# First get list of files changed in the range
range_files_cmd = ['git', 'diff', '--name-only', f'{base_commit}..{head_commit}']
range_files_cmd = [
"git",
"diff",
"--name-only",
f"{base_commit}..{head_commit}",
]
result = run_git_command(range_files_cmd, cwd=ctx.chromium_src)
if result.returncode != 0:
raise GitError(f"Failed to get changed files: {result.stderr}")
changed_files = result.stdout.strip().split('\n') if result.stdout.strip() else []
changed_files = (
result.stdout.strip().split("\n") if result.stdout.strip() else []
)
if not changed_files:
log_warning("No files changed in range")
@@ -451,17 +521,17 @@ def extract_commit_range(ctx: BuildContext, base_commit: str,
log_info(f"Found {len(changed_files)} files changed in range")
# Now get diff from custom base to head for these files
diff_cmd = ['git', 'diff', f'{custom_base}..{head_commit}']
diff_cmd = ["git", "diff", f"{custom_base}..{head_commit}"]
if include_binary:
diff_cmd.append('--binary')
diff_cmd.append("--binary")
# Add the specific files to diff command
diff_cmd.append('--')
diff_cmd.append("--")
diff_cmd.extend(changed_files)
else:
# Regular diff from base_commit to head_commit
diff_cmd = ['git', 'diff', f'{base_commit}..{head_commit}']
diff_cmd = ["git", "diff", f"{base_commit}..{head_commit}"]
if include_binary:
diff_cmd.append('--binary')
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src, timeout=120)
@@ -486,9 +556,9 @@ def extract_commit_range(ctx: BuildContext, base_commit: str,
# Process with progress indicator
with click.progressbar(
file_patches.items(),
label='Extracting patches',
label="Extracting patches",
show_pos=True,
show_percent=True
show_percent=True,
) as patches_bar:
for file_path, patch in patches_bar:
# Handle different operations
@@ -526,11 +596,15 @@ def extract_commit_range(ctx: BuildContext, base_commit: str,
return success_count
def extract_commits_individually(ctx: BuildContext, base_commit: str,
head_commit: str, verbose: bool = False,
force: bool = False,
include_binary: bool = False,
custom_base: Optional[str] = None) -> int:
def extract_commits_individually(
ctx: BuildContext,
base_commit: str,
head_commit: str,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
custom_base: Optional[str] = None,
) -> int:
"""Extract patches from each commit in a range individually
This preserves commit boundaries and can help with conflict resolution.
@@ -544,14 +618,14 @@ def extract_commits_individually(ctx: BuildContext, base_commit: str,
# Get list of commits in range
result = run_git_command(
['git', 'rev-list', '--reverse', f'{base_commit}..{head_commit}'],
cwd=ctx.chromium_src
["git", "rev-list", "--reverse", f"{base_commit}..{head_commit}"],
cwd=ctx.chromium_src,
)
if result.returncode != 0:
raise GitError(f"Failed to list commits: {result.stderr}")
commits = [c.strip() for c in result.stdout.strip().split('\n') if c.strip()]
commits = [c.strip() for c in result.stdout.strip().split("\n") if c.strip()]
if not commits:
log_warning(f"No commits between {base_commit} and {head_commit}")
@@ -565,24 +639,28 @@ def extract_commits_individually(ctx: BuildContext, base_commit: str,
failed_commits = []
with click.progressbar(
commits,
label='Processing commits',
show_pos=True,
show_percent=True
commits, label="Processing commits", show_pos=True, show_percent=True
) as commits_bar:
for commit in commits_bar:
try:
if custom_base:
# Use extract_with_base for full diff from custom base
extracted = extract_with_base(
ctx, commit, custom_base, verbose=False,
force=force, include_binary=include_binary
ctx,
commit,
custom_base,
verbose=False,
force=force,
include_binary=include_binary,
)
else:
# Normal extraction from parent
extracted = extract_single_commit(
ctx, commit, verbose=False, force=force,
include_binary=include_binary
ctx,
commit,
verbose=False,
force=force,
include_binary=include_binary,
)
total_extracted += extracted
except GitError as e:
@@ -597,4 +675,4 @@ def extract_commits_individually(ctx: BuildContext, base_commit: str,
if len(failed_commits) > 5:
log_warning(f" ... and {len(failed_commits) - 5} more")
return total_extracted
return total_extracted

View File

@@ -13,16 +13,16 @@ from modules.dev_cli.utils import get_commit_changed_files, run_git_command
from utils import log_info, log_error, log_success, log_warning
@click.group(name='feature')
@click.group(name="feature")
def feature_group():
"""Manage feature-to-file mappings"""
pass
@feature_group.command(name='add')
@click.argument('feature_name')
@click.argument('commit')
@click.option('--description', '-d', help='Description of the feature')
@feature_group.command(name="add")
@click.argument("feature_name")
@click.argument("commit")
@click.option("--description", "-d", help="Description of the feature")
@click.pass_context
def add_feature(ctx, feature_name, commit, description):
"""Add files from a commit to a feature
@@ -32,9 +32,10 @@ def add_feature(ctx, feature_name, commit, description):
dev feature add llm-chat HEAD
dev feature add my-feature abc123 -d "My new feature"
"""
chromium_src = ctx.parent.obj.get('chromium_src')
chromium_src = ctx.parent.obj.get("chromium_src")
from dev import create_build_context
build_ctx = create_build_context(chromium_src)
if not build_ctx:
return
@@ -53,37 +54,37 @@ def add_feature(ctx, feature_name, commit, description):
with open(features_path) as f:
data = yaml.safe_load(f) or {}
else:
data = {'version': '1.0', 'features': {}}
data = {"version": "1.0", "features": {}}
features = data.get('features', {})
features = data.get("features", {})
# Add or update feature
if feature_name in features:
existing_files = set(features[feature_name].get('files', []))
existing_files = set(features[feature_name].get("files", []))
all_files = list(existing_files | set(changed_files))
features[feature_name]['files'] = sorted(all_files)
features[feature_name]["files"] = sorted(all_files)
log_info(f"Updated feature '{feature_name}' ({len(all_files)} files total)")
else:
features[feature_name] = {
'description': description or f"Feature from commit {commit[:8]}",
'files': sorted(changed_files)
"description": description or f"Feature from commit {commit[:8]}",
"files": sorted(changed_files),
}
log_info(f"Created feature '{feature_name}' with {len(changed_files)} files")
# Save back
data['features'] = features
with open(features_path, 'w') as f:
data["features"] = features
with open(features_path, "w") as f:
yaml.dump(data, f, default_flow_style=False, sort_keys=False)
log_success(f"Feature '{feature_name}' saved")
@feature_group.command(name='list')
@feature_group.command(name="list")
@click.pass_context
def list_features(ctx):
"""List all features"""
# Use current directory's features.yaml
features_path = Path.cwd() / 'features.yaml'
features_path = Path.cwd() / "features.yaml"
if not features_path.exists():
log_warning("No features defined (features.yaml not found)")
@@ -92,7 +93,7 @@ def list_features(ctx):
with open(features_path) as f:
data = yaml.safe_load(f) or {}
features = data.get('features', {})
features = data.get("features", {})
if not features:
log_warning("No features defined")
@@ -100,17 +101,17 @@ def list_features(ctx):
log_info("Features:")
for name, info in features.items():
file_count = len(info.get('files', []))
description = info.get('description', 'No description')
file_count = len(info.get("files", []))
description = info.get("description", "No description")
log_info(f" {name} ({file_count} files) - {description}")
@feature_group.command(name='show')
@click.argument('feature_name')
@feature_group.command(name="show")
@click.argument("feature_name")
@click.pass_context
def show_feature(ctx, feature_name):
"""Show details of a specific feature"""
features_path = Path.cwd() / 'features.yaml'
features_path = Path.cwd() / "features.yaml"
if not features_path.exists():
log_error("No features.yaml found")
@@ -119,14 +120,14 @@ def show_feature(ctx, feature_name):
with open(features_path) as f:
data = yaml.safe_load(f)
features = data.get('features', {})
features = data.get("features", {})
if feature_name not in features:
log_error(f"Feature '{feature_name}' not found")
ctx.exit(1)
info = features[feature_name]
files = info.get('files', [])
files = info.get("files", [])
log_info(f"Feature: {feature_name}")
log_info(f"Description: {info.get('description', 'No description')}")
@@ -136,9 +137,9 @@ def show_feature(ctx, feature_name):
log_info(f" - {file_path}")
@feature_group.command(name='generate-patch')
@click.argument('feature_name')
@click.option('--output', '-o', type=click.Path(), help='Output file path')
@feature_group.command(name="generate-patch")
@click.argument("feature_name")
@click.option("--output", "-o", type=click.Path(), help="Output file path")
@click.pass_context
def generate_patch(ctx, feature_name, output):
"""Generate combined patch for a feature
@@ -149,7 +150,7 @@ def generate_patch(ctx, feature_name, output):
dev feature generate-patch my-feature -o combined.patch
"""
# Load feature
features_path = Path.cwd() / 'features.yaml'
features_path = Path.cwd() / "features.yaml"
if not features_path.exists():
log_error("No features.yaml found")
@@ -158,20 +159,20 @@ def generate_patch(ctx, feature_name, output):
with open(features_path) as f:
data = yaml.safe_load(f)
features = data.get('features', {})
features = data.get("features", {})
if feature_name not in features:
log_error(f"Feature '{feature_name}' not found")
ctx.exit(1)
file_list = features[feature_name].get('files', [])
file_list = features[feature_name].get("files", [])
if not file_list:
log_error(f"Feature '{feature_name}' has no files")
ctx.exit(1)
# Find patches directory
patches_dir = Path.cwd() / 'chromium_src'
patches_dir = Path.cwd() / "chromium_src"
if not patches_dir.exists():
log_error(f"Patches directory not found: {patches_dir}")
ctx.exit(1)
@@ -217,12 +218,12 @@ def generate_patch(ctx, feature_name, output):
click.echo(combined)
@feature_group.command(name='remove')
@click.argument('feature_name')
@feature_group.command(name="remove")
@click.argument("feature_name")
@click.pass_context
def remove_feature(ctx, feature_name):
"""Remove a feature"""
features_path = Path.cwd() / 'features.yaml'
features_path = Path.cwd() / "features.yaml"
if not features_path.exists():
log_error("No features.yaml found")
@@ -231,7 +232,7 @@ def remove_feature(ctx, feature_name):
with open(features_path) as f:
data = yaml.safe_load(f)
features = data.get('features', {})
features = data.get("features", {})
if feature_name not in features:
log_error(f"Feature '{feature_name}' not found")
@@ -239,9 +240,9 @@ def remove_feature(ctx, feature_name):
# Remove and save
del features[feature_name]
data['features'] = features
data["features"] = features
with open(features_path, 'w') as f:
with open(features_path, "w") as f:
yaml.dump(data, f, default_flow_style=False, sort_keys=False)
log_success(f"Removed feature '{feature_name}'")
log_success(f"Removed feature '{feature_name}'")

View File

@@ -29,8 +29,8 @@ index abc123..def456 100644
result = parse_diff_output(diff)
assert len(result) == 1
assert 'file.txt' in result
patch = result['file.txt']
assert "file.txt" in result
patch = result["file.txt"]
assert patch.operation == FileOperation.MODIFY
assert not patch.is_binary
assert patch.patch_content is not None
@@ -51,8 +51,8 @@ index 0000000..abc123
result = parse_diff_output(diff)
assert len(result) == 1
assert 'newfile.txt' in result
patch = result['newfile.txt']
assert "newfile.txt" in result
patch = result["newfile.txt"]
assert patch.operation == FileOperation.ADD
assert patch.patch_content is not None
print("✓ New file test passed")
@@ -72,8 +72,8 @@ index abc123..0000000
result = parse_diff_output(diff)
assert len(result) == 1
assert 'deleted.txt' in result
patch = result['deleted.txt']
assert "deleted.txt" in result
patch = result["deleted.txt"]
assert patch.operation == FileOperation.DELETE
print("✓ Deleted file test passed")
@@ -87,10 +87,10 @@ rename to new_name.txt"""
result = parse_diff_output(diff)
assert len(result) == 1
assert 'new_name.txt' in result
patch = result['new_name.txt']
assert "new_name.txt" in result
patch = result["new_name.txt"]
assert patch.operation == FileOperation.RENAME
assert patch.old_path == 'old_name.txt'
assert patch.old_path == "old_name.txt"
assert patch.similarity == 100
print("✓ Renamed file test passed")
@@ -113,10 +113,10 @@ index abc123..def456 100644
result = parse_diff_output(diff)
assert len(result) == 1
assert 'new_name.txt' in result
patch = result['new_name.txt']
assert "new_name.txt" in result
patch = result["new_name.txt"]
assert patch.operation == FileOperation.RENAME
assert patch.old_path == 'old_name.txt'
assert patch.old_path == "old_name.txt"
assert patch.similarity == 85
assert patch.patch_content is not None
print("✓ Renamed with changes test passed")
@@ -130,8 +130,8 @@ Binary files a/image.png and b/image.png differ"""
result = parse_diff_output(diff)
assert len(result) == 1
assert 'image.png' in result
patch = result['image.png']
assert "image.png" in result
patch = result["image.png"]
assert patch.is_binary
assert patch.patch_content is None # Binary content not stored
print("✓ Binary file test passed")
@@ -163,13 +163,13 @@ index 111111..000000
result = parse_diff_output(diff)
assert len(result) == 3
assert 'file1.txt' in result
assert 'file2.txt' in result
assert 'file3.txt' in result
assert "file1.txt" in result
assert "file2.txt" in result
assert "file3.txt" in result
assert result['file1.txt'].operation == FileOperation.MODIFY
assert result['file2.txt'].operation == FileOperation.ADD
assert result['file3.txt'].operation == FileOperation.DELETE
assert result["file1.txt"].operation == FileOperation.MODIFY
assert result["file2.txt"].operation == FileOperation.ADD
assert result["file3.txt"].operation == FileOperation.DELETE
print("✓ Multiple files test passed")
@@ -187,10 +187,10 @@ index abc123..def456 100644
result = parse_diff_output(diff)
assert len(result) == 1
assert 'file.txt' in result
patch = result['file.txt']
assert "file.txt" in result
patch = result["file.txt"]
assert patch.operation == FileOperation.MODIFY
assert '\\ No newline at end of file' in patch.patch_content
assert "\\ No newline at end of file" in patch.patch_content
print("✓ No newline marker test passed")
@@ -209,8 +209,8 @@ index abc123..def456 100644
result = parse_diff_output(diff)
assert len(result) == 1
assert 'src/chrome/browser/ui/views/file.cc' in result
patch = result['src/chrome/browser/ui/views/file.cc']
assert "src/chrome/browser/ui/views/file.cc" in result
patch = result["src/chrome/browser/ui/views/file.cc"]
assert patch.operation == FileOperation.MODIFY
print("✓ Complex path test passed")
@@ -236,11 +236,11 @@ index abc123..abc123
result = parse_diff_output(diff)
assert len(result) == 1
assert 'script.sh' in result
patch = result['script.sh']
assert "script.sh" in result
patch = result["script.sh"]
# Mode changes are captured in the patch content
assert 'old mode 100644' in patch.patch_content
assert 'new mode 100755' in patch.patch_content
assert "old mode 100644" in patch.patch_content
assert "new mode 100755" in patch.patch_content
print("✓ Mode change test passed")
@@ -253,10 +253,10 @@ copy to copy.txt"""
result = parse_diff_output(diff)
assert len(result) == 1
assert 'copy.txt' in result
patch = result['copy.txt']
assert "copy.txt" in result
patch = result["copy.txt"]
assert patch.operation == FileOperation.COPY
assert patch.old_path == 'original.txt'
assert patch.old_path == "original.txt"
assert patch.similarity == 100
print("✓ Copied file test passed")
@@ -275,7 +275,7 @@ def run_all_tests():
test_complex_path,
test_empty_diff,
test_mode_change,
test_copied_file
test_copied_file,
]
print("Running diff parser tests...")
@@ -301,6 +301,6 @@ def run_all_tests():
return True
if __name__ == '__main__':
if __name__ == "__main__":
success = run_all_tests()
sys.exit(0 if success else 1)
sys.exit(0 if success else 1)

View File

@@ -20,6 +20,7 @@ from utils import log_info, log_error, log_success, log_warning
class FileOperation(Enum):
"""Types of file operations in a diff"""
ADD = "add"
MODIFY = "modify"
DELETE = "delete"
@@ -31,6 +32,7 @@ class FileOperation(Enum):
@dataclass
class FilePatch:
"""Represents a single file's patch information"""
file_path: str
operation: FileOperation
old_path: Optional[str] = None # For renames/copies
@@ -41,13 +43,18 @@ class FilePatch:
class GitError(Exception):
"""Custom exception for git operations"""
pass
def run_git_command(cmd: List[str], cwd: Path,
capture: bool = True, check: bool = False,
timeout: Optional[int] = None,
binary_output: bool = False) -> subprocess.CompletedProcess:
def run_git_command(
cmd: List[str],
cwd: Path,
capture: bool = True,
check: bool = False,
timeout: Optional[int] = None,
binary_output: bool = False,
) -> subprocess.CompletedProcess:
"""Run a git command and return the result
Args:
@@ -67,7 +74,7 @@ def run_git_command(cmd: List[str], cwd: Path,
try:
# For commands that might output binary data (like git diff with binary files),
# we need to handle them specially
if binary_output or ('diff' in cmd and '--binary' not in cmd):
if binary_output or ("diff" in cmd and "--binary" not in cmd):
# First try with text mode
try:
result = subprocess.run(
@@ -77,7 +84,7 @@ def run_git_command(cmd: List[str], cwd: Path,
text=True,
check=False,
timeout=timeout or 60,
errors='replace' # Replace invalid UTF-8 sequences
errors="replace", # Replace invalid UTF-8 sequences
)
except UnicodeDecodeError:
# Fall back to binary mode
@@ -87,13 +94,13 @@ def run_git_command(cmd: List[str], cwd: Path,
capture_output=capture,
text=False,
check=False,
timeout=timeout or 60
timeout=timeout or 60,
)
# Convert to text with error handling
if result.stdout:
result.stdout = result.stdout.decode('utf-8', errors='replace')
result.stdout = result.stdout.decode("utf-8", errors="replace")
if result.stderr:
result.stderr = result.stderr.decode('utf-8', errors='replace')
result.stderr = result.stderr.decode("utf-8", errors="replace")
else:
result = subprocess.run(
cmd,
@@ -101,7 +108,7 @@ def run_git_command(cmd: List[str], cwd: Path,
capture_output=capture,
text=True,
check=False,
timeout=timeout or 60
timeout=timeout or 60,
)
if check and result.returncode != 0:
@@ -121,9 +128,7 @@ def validate_git_repository(path: Path) -> bool:
"""Validate that a path is a git repository"""
try:
result = run_git_command(
['git', 'rev-parse', '--git-dir'],
cwd=path,
check=False
["git", "rev-parse", "--git-dir"], cwd=path, check=False
)
return result.returncode == 0
except GitError:
@@ -134,8 +139,8 @@ def validate_commit_exists(commit_hash: str, chromium_src: Path) -> bool:
"""Validate that a commit exists in the repository"""
try:
result = run_git_command(
['git', 'rev-parse', '--verify', f'{commit_hash}^{{commit}}'],
cwd=chromium_src
["git", "rev-parse", "--verify", f"{commit_hash}^{{commit}}"],
cwd=chromium_src,
)
if result.returncode != 0:
@@ -151,15 +156,15 @@ def get_commit_changed_files(commit_hash: str, chromium_src: Path) -> List[str]:
"""Get list of files changed in a commit"""
try:
result = run_git_command(
['git', 'diff-tree', '--no-commit-id', '--name-only', '-r', commit_hash],
cwd=chromium_src
["git", "diff-tree", "--no-commit-id", "--name-only", "-r", commit_hash],
cwd=chromium_src,
)
if result.returncode != 0:
log_error(f"Failed to get changed files for commit {commit_hash}")
return []
files = [f.strip() for f in result.stdout.strip().split('\n') if f.strip()]
files = [f.strip() for f in result.stdout.strip().split("\n") if f.strip()]
return files
except GitError as e:
log_error(f"Error getting changed files: {e}")
@@ -197,21 +202,23 @@ def parse_diff_output(diff_output: str) -> Dict[str, FilePatch]:
line = lines[i]
# Start of a new file diff
if line.startswith('diff --git'):
if line.startswith("diff --git"):
# Save previous patch if exists
if current_file and current_patch_lines:
patch_content = '\n'.join(current_patch_lines) if not is_binary else None
patch_content = (
"\n".join(current_patch_lines) if not is_binary else None
)
patches[current_file] = FilePatch(
file_path=current_file,
operation=current_operation,
old_path=old_path,
patch_content=patch_content,
is_binary=is_binary,
similarity=similarity
similarity=similarity,
)
# Parse file paths from diff line
match = re.match(r'diff --git a/(.*) b/(.*)', line)
match = re.match(r"diff --git a/(.*) b/(.*)", line)
if match:
old_file = match.group(1)
new_file = match.group(2)
@@ -231,45 +238,53 @@ def parse_diff_output(diff_output: str) -> Dict[str, FilePatch]:
# Check for file metadata
if current_file:
if line.startswith('deleted file'):
if line.startswith("deleted file"):
current_operation = FileOperation.DELETE
current_patch_lines.append(line)
elif line.startswith('new file'):
elif line.startswith("new file"):
current_operation = FileOperation.ADD
current_patch_lines.append(line)
elif line.startswith('similarity index'):
elif line.startswith("similarity index"):
# Extract similarity percentage for renames
match = re.match(r'similarity index (\d+)%', line)
match = re.match(r"similarity index (\d+)%", line)
if match:
similarity = int(match.group(1))
current_patch_lines.append(line)
elif line.startswith('rename from'):
elif line.startswith("rename from"):
current_operation = FileOperation.RENAME
old_path = line[12:].strip() # Remove 'rename from '
current_patch_lines.append(line)
elif line.startswith('rename to'):
elif line.startswith("rename to"):
# Confirm rename operation
current_patch_lines.append(line)
elif line.startswith('copy from'):
elif line.startswith("copy from"):
current_operation = FileOperation.COPY
old_path = line[10:].strip() # Remove 'copy from '
current_patch_lines.append(line)
elif line.startswith('copy to'):
elif line.startswith("copy to"):
# Confirm copy operation
current_patch_lines.append(line)
elif line == 'Binary files differ' or line.startswith('Binary files'):
elif line == "Binary files differ" or line.startswith("Binary files"):
is_binary = True
current_operation = FileOperation.BINARY if current_operation == FileOperation.MODIFY else current_operation
current_operation = (
FileOperation.BINARY
if current_operation == FileOperation.MODIFY
else current_operation
)
current_patch_lines.append(line)
elif line.startswith('index ') or line.startswith('---') or line.startswith('+++'):
elif (
line.startswith("index ")
or line.startswith("---")
or line.startswith("+++")
):
current_patch_lines.append(line)
elif line.startswith('@@'):
elif line.startswith("@@"):
# Hunk header
current_patch_lines.append(line)
elif line.startswith('+') or line.startswith('-') or line.startswith(' '):
elif line.startswith("+") or line.startswith("-") or line.startswith(" "):
# Actual diff content
current_patch_lines.append(line)
elif line.startswith('\\'):
elif line.startswith("\\"):
# Special markers like "\ No newline at end of file"
current_patch_lines.append(line)
else:
@@ -280,14 +295,14 @@ def parse_diff_output(diff_output: str) -> Dict[str, FilePatch]:
# Save last patch
if current_file and current_patch_lines:
patch_content = '\n'.join(current_patch_lines) if not is_binary else None
patch_content = "\n".join(current_patch_lines) if not is_binary else None
patches[current_file] = FilePatch(
file_path=current_file,
operation=current_operation,
old_path=old_path,
patch_content=patch_content,
is_binary=is_binary,
similarity=similarity
similarity=similarity,
)
return patches
@@ -313,10 +328,10 @@ def write_patch_file(ctx: BuildContext, file_path: str, patch_content: str) -> b
try:
# Ensure patch ends with newline
if patch_content and not patch_content.endswith('\n'):
patch_content += '\n'
if patch_content and not patch_content.endswith("\n"):
patch_content += "\n"
output_path.write_text(patch_content, encoding='utf-8')
output_path.write_text(patch_content, encoding="utf-8")
log_success(f" Written: {output_path.relative_to(ctx.root_dir)}")
return True
except Exception as e:
@@ -336,13 +351,13 @@ def create_deletion_marker(ctx: BuildContext, file_path: str) -> bool:
True if successful, False otherwise
"""
marker_path = ctx.get_dev_patches_dir() / file_path
marker_path = marker_path.with_suffix(marker_path.suffix + '.deleted')
marker_path = marker_path.with_suffix(marker_path.suffix + ".deleted")
marker_path.parent.mkdir(parents=True, exist_ok=True)
try:
marker_content = f"File deleted in patch\nOriginal path: {file_path}\n"
marker_path.write_text(marker_content, encoding='utf-8')
marker_path.write_text(marker_content, encoding="utf-8")
log_warning(f" Marked deleted: {marker_path.relative_to(ctx.root_dir)}")
return True
except Exception as e:
@@ -350,7 +365,9 @@ def create_deletion_marker(ctx: BuildContext, file_path: str) -> bool:
return False
def create_binary_marker(ctx: BuildContext, file_path: str, operation: FileOperation) -> bool:
def create_binary_marker(
ctx: BuildContext, file_path: str, operation: FileOperation
) -> bool:
"""
Create a marker file for binary files.
@@ -363,13 +380,15 @@ def create_binary_marker(ctx: BuildContext, file_path: str, operation: FileOpera
True if successful, False otherwise
"""
marker_path = ctx.get_dev_patches_dir() / file_path
marker_path = marker_path.with_suffix(marker_path.suffix + '.binary')
marker_path = marker_path.with_suffix(marker_path.suffix + ".binary")
marker_path.parent.mkdir(parents=True, exist_ok=True)
try:
marker_content = f"Binary file\nOperation: {operation.value}\nOriginal path: {file_path}\n"
marker_path.write_text(marker_content, encoding='utf-8')
marker_content = (
f"Binary file\nOperation: {operation.value}\nOriginal path: {file_path}\n"
)
marker_path.write_text(marker_content, encoding="utf-8")
log_warning(f" Binary file marked: {marker_path.relative_to(ctx.root_dir)}")
return True
except Exception as e:
@@ -377,8 +396,9 @@ def create_binary_marker(ctx: BuildContext, file_path: str, operation: FileOpera
return False
def apply_single_patch(patch_path: Path, chromium_src: Path,
interactive: bool = True) -> Tuple[bool, str]:
def apply_single_patch(
patch_path: Path, chromium_src: Path, interactive: bool = True
) -> Tuple[bool, str]:
"""
Apply a single patch file to chromium source with multiple strategies.
@@ -395,7 +415,7 @@ def apply_single_patch(patch_path: Path, chromium_src: Path,
return False, f"Patch file not found: {patch_path}"
# Check if it's a deletion marker
if patch_path.suffix == '.deleted':
if patch_path.suffix == ".deleted":
# Handle file deletion
file_path = patch_path.stem
target_file = chromium_src / file_path
@@ -409,22 +429,18 @@ def apply_single_patch(patch_path: Path, chromium_src: Path,
return True, f"Already deleted: {file_path}"
# Check if it's a binary marker
if patch_path.suffix == '.binary':
if patch_path.suffix == ".binary":
return False, f"Binary file patch not supported: {patch_path.name}"
# Try standard apply
result = run_git_command(
['git', 'apply', '-p1', str(patch_path)],
cwd=chromium_src
)
result = run_git_command(["git", "apply", "-p1", str(patch_path)], cwd=chromium_src)
if result.returncode == 0:
return True, f"Applied: {patch_path.name}"
# Try 3-way merge
result = run_git_command(
['git', 'apply', '-p1', '--3way', str(patch_path)],
cwd=chromium_src
["git", "apply", "-p1", "--3way", str(patch_path)], cwd=chromium_src
)
if result.returncode == 0:
@@ -432,8 +448,7 @@ def apply_single_patch(patch_path: Path, chromium_src: Path,
# Try with whitespace options
result = run_git_command(
['git', 'apply', '-p1', '--whitespace=fix', str(patch_path)],
cwd=chromium_src
["git", "apply", "-p1", "--whitespace=fix", str(patch_path)], cwd=chromium_src
)
if result.returncode == 0:
@@ -446,14 +461,15 @@ def apply_single_patch(patch_path: Path, chromium_src: Path,
return False, f"Failed: {patch_path.name} - {result.stderr}"
def handle_patch_conflict(patch_path: Path, chromium_src: Path,
error_msg: str = "") -> Tuple[bool, str]:
def handle_patch_conflict(
patch_path: Path, chromium_src: Path, error_msg: str = ""
) -> Tuple[bool, str]:
"""Handle patch conflict interactively with detailed options"""
click.echo(f"\n{click.style('CONFLICT:', fg='red', bold=True)} {patch_path}")
if error_msg:
# Parse error message for more context
lines = error_msg.strip().split('\n')
lines = error_msg.strip().split("\n")
for line in lines[:5]: # Show first 5 lines of error
click.echo(f" {line}")
@@ -475,8 +491,8 @@ def handle_patch_conflict(patch_path: Path, chromium_src: Path,
elif choice == "3":
# Try with reduced context
result = run_git_command(
['git', 'apply', '-p1', '--unidiff-zero', str(patch_path)],
cwd=chromium_src
["git", "apply", "-p1", "--unidiff-zero", str(patch_path)],
cwd=chromium_src,
)
if result.returncode == 0:
return True, f"Applied (reduced context): {patch_path.name}"
@@ -487,7 +503,7 @@ def handle_patch_conflict(patch_path: Path, chromium_src: Path,
# Show patch content
try:
content = patch_path.read_text()
lines = content.split('\n')
lines = content.split("\n")
# Show first 50 lines
click.echo("\n--- Patch Content (first 50 lines) ---")
for line in lines[:50]:
@@ -507,30 +523,21 @@ def handle_patch_conflict(patch_path: Path, chromium_src: Path,
def create_git_commit(chromium_src: Path, message: str) -> bool:
"""Create a git commit with the given message"""
# Check if there are changes to commit
result = run_git_command(
['git', 'status', '--porcelain'],
cwd=chromium_src
)
result = run_git_command(["git", "status", "--porcelain"], cwd=chromium_src)
if not result.stdout.strip():
log_warning("Nothing to commit, working tree clean")
return True
# Stage all changes
result = run_git_command(
['git', 'add', '-A'],
cwd=chromium_src
)
result = run_git_command(["git", "add", "-A"], cwd=chromium_src)
if result.returncode != 0:
log_error("Failed to stage changes")
return False
# Create commit
result = run_git_command(
['git', 'commit', '-m', message],
cwd=chromium_src
)
result = run_git_command(["git", "commit", "-m", message], cwd=chromium_src)
if result.returncode != 0:
if "nothing to commit" in result.stdout:
@@ -548,22 +555,28 @@ def get_commit_info(commit_hash: str, chromium_src: Path) -> Optional[Dict[str,
try:
# Get commit info in a structured format
result = run_git_command(
['git', 'show', '--format=%H%n%an%n%ae%n%at%n%s%n%b', '--no-patch', commit_hash],
cwd=chromium_src
[
"git",
"show",
"--format=%H%n%an%n%ae%n%at%n%s%n%b",
"--no-patch",
commit_hash,
],
cwd=chromium_src,
)
if result.returncode != 0:
return None
lines = result.stdout.strip().split('\n')
lines = result.stdout.strip().split("\n")
if len(lines) >= 5:
return {
'hash': lines[0],
'author_name': lines[1],
'author_email': lines[2],
'timestamp': lines[3],
'subject': lines[4],
'body': '\n'.join(lines[5:]) if len(lines) > 5 else ''
"hash": lines[0],
"author_name": lines[1],
"author_email": lines[2],
"timestamp": lines[3],
"subject": lines[4],
"body": "\n".join(lines[5:]) if len(lines) > 5 else "",
}
return None
except GitError:
@@ -573,9 +586,10 @@ def get_commit_info(commit_hash: str, chromium_src: Path) -> Optional[Dict[str,
def prompt_yes_no(question: str, default: bool = False) -> bool:
"""Prompt user for yes/no question"""
default_str = "Y/n" if default else "y/N"
result = click.prompt(f"{question} [{default_str}]",
type=str, default="y" if default else "n")
return result.lower() in ('y', 'yes')
result = click.prompt(
f"{question} [{default_str}]", type=str, default="y" if default else "n"
)
return result.lower() in ("y", "yes")
def log_extraction_summary(file_patches: Dict[str, FilePatch]):
@@ -591,7 +605,7 @@ def log_extraction_summary(file_patches: Dict[str, FilePatch]):
if patch.is_binary:
binary_count += 1
click.echo("\n" + click.style("Extraction Summary", fg='green', bold=True))
click.echo("\n" + click.style("Extraction Summary", fg="green", bold=True))
click.echo("=" * 60)
click.echo(f"Total files: {total}")
click.echo("-" * 40)
@@ -618,9 +632,12 @@ def log_apply_summary(results: List[Tuple[str, bool, str]]):
successful = sum(1 for _, success, _ in results if success)
failed = total - successful
click.echo("\n" + click.style("Apply Summary",
fg='green' if failed == 0 else 'yellow',
bold=True))
click.echo(
"\n"
+ click.style(
"Apply Summary", fg="green" if failed == 0 else "yellow", bold=True
)
)
click.echo("=" * 60)
click.echo(f"Total patches: {total}")
click.echo(f"Successful: {successful}")
@@ -628,7 +645,7 @@ def log_apply_summary(results: List[Tuple[str, bool, str]]):
click.echo("=" * 60)
if failed > 0:
click.echo("\n" + click.style("Failed patches:", fg='red', bold=True))
click.echo("\n" + click.style("Failed patches:", fg="red", bold=True))
for file_path, success, message in results:
if not success:
click.echo(f"{file_path}: {message}")
click.echo(f"{file_path}: {message}")

View File

@@ -7,12 +7,21 @@ import os
from pathlib import Path
from typing import List, Optional
from context import BuildContext
from utils import log_info, log_error, log_success, log_warning, IS_WINDOWS, IS_MACOS, join_paths
from utils import (
log_info,
log_error,
log_success,
log_warning,
IS_WINDOWS,
IS_MACOS,
join_paths,
)
# Try to import google-cloud-storage
try:
from google.cloud import storage
from google.oauth2 import service_account
GCS_AVAILABLE = True
except ImportError:
GCS_AVAILABLE = False
@@ -28,11 +37,11 @@ def upload_to_gcs(ctx: BuildContext, file_paths: List[Path]) -> tuple[bool, List
log_warning("google-cloud-storage not installed. Skipping GCS upload.")
log_info("Install with: pip install google-cloud-storage")
return True, [] # Not a fatal error
if not file_paths:
log_info("No files to upload to GCS")
return True, []
# Determine platform subdirectory
if IS_WINDOWS:
platform_dir = "win"
@@ -40,20 +49,22 @@ def upload_to_gcs(ctx: BuildContext, file_paths: List[Path]) -> tuple[bool, List
platform_dir = "macos"
else:
platform_dir = "linux"
# Build GCS path: gs://nxtscape/resources/<version>/<platform>/
bucket_name = "nxtscape"
gcs_prefix = f"resources/{ctx.nxtscape_version}/{platform_dir}"
log_info(f"\n☁️ Uploading artifacts to gs://{bucket_name}/{gcs_prefix}/")
# Check for service account file
service_account_path = join_paths(ctx.root_dir, SERVICE_ACCOUNT_FILE)
if not service_account_path.exists():
log_error(f"Service account file not found: {SERVICE_ACCOUNT_FILE}")
log_info(f"Please place the service account JSON file at: {service_account_path}")
log_info(
f"Please place the service account JSON file at: {service_account_path}"
)
return False, []
try:
# Initialize GCS client with service account
credentials = service_account.Credentials.from_service_account_file(
@@ -61,45 +72,47 @@ def upload_to_gcs(ctx: BuildContext, file_paths: List[Path]) -> tuple[bool, List
)
client = storage.Client(credentials=credentials)
bucket = client.bucket(bucket_name)
uploaded_files = []
gcs_uris = []
for file_path in file_paths:
if not file_path.exists():
log_warning(f"File not found, skipping: {file_path}")
continue
# Determine blob name (file name in GCS)
blob_name = f"{gcs_prefix}/{file_path.name}"
try:
blob = bucket.blob(blob_name)
log_info(f"📤 Uploading {file_path.name}...")
blob.upload_from_filename(str(file_path))
# Note: With uniform bucket-level access, objects inherit bucket's IAM policies
# No need to set individual object ACLs
public_url = f"https://storage.googleapis.com/{bucket_name}/{blob_name}"
gcs_uri = f"gs://{bucket_name}/{blob_name}"
uploaded_files.append(public_url)
gcs_uris.append(gcs_uri)
log_success(f"✓ Uploaded: {public_url}")
except Exception as e:
log_error(f"Failed to upload {file_path.name}: {e}")
return False, []
if uploaded_files:
log_success(f"\n☁️ Successfully uploaded {len(uploaded_files)} file(s) to GCS")
log_success(
f"\n☁️ Successfully uploaded {len(uploaded_files)} file(s) to GCS"
)
log_info("\nPublic URLs:")
for url in uploaded_files:
log_info(f" {url}")
return True, gcs_uris
except Exception as e:
log_error(f"GCS upload failed: {e}")
return False, []
@@ -109,9 +122,9 @@ def upload_package_artifacts(ctx: BuildContext) -> tuple[bool, List[str]]:
"""Upload package artifacts (DMG, ZIP, EXE) to GCS
Returns: (success, list of GCS URIs)"""
log_info("\n☁️ Preparing to upload package artifacts to GCS...")
artifacts = []
# Look for files in the dist/<version> directory
dist_dir = ctx.get_dist_dir()
if dist_dir.exists():
@@ -125,15 +138,15 @@ def upload_package_artifacts(ctx: BuildContext) -> tuple[bool, List[str]]:
else: # Linux
# Look for AppImage files
artifacts.extend(dist_dir.glob("*.AppImage"))
if not artifacts:
log_info("No package artifacts found to upload")
return True, []
log_info(f"Found {len(artifacts)} artifact(s) to upload:")
for artifact in artifacts:
log_info(f" - {artifact.name}")
return upload_to_gcs(ctx, artifacts)
@@ -144,12 +157,17 @@ def upload_signed_artifacts(ctx: BuildContext) -> bool:
return upload_package_artifacts(ctx)
def download_from_gcs(bucket_name: str, source_path: str, dest_path: Path, ctx: Optional[BuildContext] = None) -> bool:
def download_from_gcs(
bucket_name: str,
source_path: str,
dest_path: Path,
ctx: Optional[BuildContext] = None,
) -> bool:
"""Download a file from GCS (utility function)"""
if not GCS_AVAILABLE:
log_error("google-cloud-storage not installed")
return False
try:
# Try to use service account if available
client = None
@@ -160,19 +178,19 @@ def download_from_gcs(bucket_name: str, source_path: str, dest_path: Path, ctx:
str(service_account_path)
)
client = storage.Client(credentials=credentials)
# Fall back to anonymous client for public buckets
if not client:
client = storage.Client.create_anonymous_client()
bucket = client.bucket(bucket_name)
blob = bucket.blob(source_path)
log_info(f"📥 Downloading gs://{bucket_name}/{source_path}...")
blob.download_to_filename(str(dest_path))
log_success(f"Downloaded to: {dest_path}")
return True
except Exception as e:
log_error(f"Failed to download from GCS: {e}")
return False

View File

@@ -17,29 +17,37 @@ from utils import run_command, log_info, log_error, log_success, IS_WINDOWS, saf
def setup_git(ctx: BuildContext) -> bool:
"""Setup git and checkout Chromium"""
log_info(f"\n🔀 Setting up Chromium {ctx.chromium_version}...")
os.chdir(ctx.chromium_src)
# Fetch all tags and checkout
log_info("📥 Fetching all tags from remote...")
run_command(["git", "fetch", "--tags", "--force"])
# Verify tag exists before checkout
result = subprocess.run(["git", "tag", "-l", ctx.chromium_version],
text=True, capture_output=True, cwd=ctx.chromium_src)
result = subprocess.run(
["git", "tag", "-l", ctx.chromium_version],
text=True,
capture_output=True,
cwd=ctx.chromium_src,
)
if not result.stdout or ctx.chromium_version not in result.stdout:
log_error(f"Tag {ctx.chromium_version} not found!")
log_info("Available tags (last 10):")
list_result = subprocess.run(["git", "tag", "-l", "--sort=-version:refname"],
text=True, capture_output=True, cwd=ctx.chromium_src)
list_result = subprocess.run(
["git", "tag", "-l", "--sort=-version:refname"],
text=True,
capture_output=True,
cwd=ctx.chromium_src,
)
if list_result.stdout:
for tag in list_result.stdout.strip().split('\n')[:10]:
for tag in list_result.stdout.strip().split("\n")[:10]:
log_info(f" {tag}")
raise ValueError(f"Git tag {ctx.chromium_version} not found")
log_info(f"🔀 Checking out tag: {ctx.chromium_version}")
run_command(["git", "checkout", f"tags/{ctx.chromium_version}"])
# Sync dependencies
log_info("📥 Syncing dependencies (this may take a while)...")
# Windows gclient doesn't support --shallow flag
@@ -47,7 +55,7 @@ def setup_git(ctx: BuildContext) -> bool:
run_command(["gclient.bat", "sync", "-D", "--no-history", "--shallow"])
else:
run_command(["gclient", "sync", "-D", "--no-history", "--shallow"])
log_success("Git setup complete")
return True
@@ -55,30 +63,30 @@ def setup_git(ctx: BuildContext) -> bool:
def setup_sparkle(ctx: BuildContext) -> bool:
"""Download and setup Sparkle framework"""
log_info("\n✨ Setting up Sparkle framework...")
sparkle_dir = ctx.get_sparkle_dir()
# Clean existing
if sparkle_dir.exists():
safe_rmtree(sparkle_dir)
sparkle_dir.mkdir(parents=True)
# Download Sparkle
sparkle_url = ctx.get_sparkle_url()
sparkle_archive = sparkle_dir / "sparkle.tar.xz"
# Download using urllib (cross-platform)
log_info(f"Downloading Sparkle from {sparkle_url}...")
urllib.request.urlretrieve(sparkle_url, sparkle_archive)
# Extract using tarfile module (cross-platform)
log_info("Extracting Sparkle...")
with tarfile.open(sparkle_archive, 'r:xz') as tar:
with tarfile.open(sparkle_archive, "r:xz") as tar:
tar.extractall(sparkle_dir)
# Clean up
sparkle_archive.unlink()
log_success("Sparkle setup complete")
return True

View File

@@ -14,53 +14,63 @@ from utils import log_info, log_error, log_success, join_paths
def inject_version(ctx: BuildContext) -> bool:
"""Inject browser version into manifest.json files"""
log_info("\n💉 Injecting browser version into extension manifests...")
# Hardcoded paths to manifest files
manifest_paths = [
join_paths(ctx.root_dir, "resources", "files", "ai_side_panel", "manifest.json"),
join_paths(
ctx.root_dir, "resources", "files", "ai_side_panel", "manifest.json"
),
join_paths(ctx.root_dir, "resources", "files", "bug_reporter", "manifest.json"),
]
success = True
for manifest_path in manifest_paths:
if not inject_version_to_manifest(manifest_path, ctx.get_nxtscape_chromium_version(), ctx.get_nxtscape_version()):
if not inject_version_to_manifest(
manifest_path,
ctx.get_nxtscape_chromium_version(),
ctx.get_nxtscape_version(),
):
success = False
if success:
log_success("Browser version injected into all manifests")
else:
log_error("Failed to inject version into some manifests")
return success
def inject_version_to_manifest(manifest_path: Path, browser_version: str, nxtscape_version: str) -> bool:
def inject_version_to_manifest(
manifest_path: Path, browser_version: str, nxtscape_version: str
) -> bool:
"""Inject browser version and increment version into a single manifest.json file"""
try:
if not manifest_path.exists():
log_error(f"Manifest not found: {manifest_path}")
return False
# Read existing manifest
with open(manifest_path, 'r', encoding='utf-8') as f:
with open(manifest_path, "r", encoding="utf-8") as f:
manifest_data = json.load(f)
# Set version to NXTSCAPE_VERSION formatted as X.0.0.0
if 'version' in manifest_data:
current_version = manifest_data['version']
if "version" in manifest_data:
current_version = manifest_data["version"]
# Format version as X.0.0.0
formatted_version = f"{nxtscape_version}.0.0.0"
manifest_data['version'] = formatted_version
log_info(f" Manifest version updated: {current_version}{formatted_version}")
manifest_data["version"] = formatted_version
log_info(
f" Manifest version updated: {current_version}{formatted_version}"
)
# Add browser_version field
manifest_data['browser_version'] = browser_version
manifest_data["browser_version"] = browser_version
# Write back with proper formatting
with open(manifest_path, 'w', encoding='utf-8') as f:
with open(manifest_path, "w", encoding="utf-8") as f:
json.dump(manifest_data, f, indent=2, ensure_ascii=False)
f.write('\n') # Add trailing newline
f.write("\n") # Add trailing newline
# Validate the written JSON
if validate_json_file(manifest_path):
log_success(f"✓ Updated: {manifest_path.name}")
@@ -68,7 +78,7 @@ def inject_version_to_manifest(manifest_path: Path, browser_version: str, nxtsca
else:
log_error(f"✗ Invalid JSON after injection: {manifest_path.name}")
return False
except json.JSONDecodeError as e:
log_error(f"Failed to parse JSON in {manifest_path}: {e}")
return False
@@ -79,21 +89,21 @@ def inject_version_to_manifest(manifest_path: Path, browser_version: str, nxtsca
def increment_version(version: str) -> str:
"""Increment version string by 1 in the last component"""
parts = version.split('.')
parts = version.split(".")
if not parts:
return "0.0.1"
# Try to increment the last numeric part
for i in range(len(parts) - 1, -1, -1):
try:
# Convert to int, increment, and convert back
incremented = int(parts[i]) + 1
parts[i] = str(incremented)
return '.'.join(parts)
return ".".join(parts)
except ValueError:
# If this part is not numeric, continue to the previous part
continue
# If no numeric part found, append .1
return version + ".1"
@@ -101,7 +111,7 @@ def increment_version(version: str) -> str:
def validate_json_file(file_path: Path) -> bool:
"""Validate that a file contains valid JSON"""
try:
with open(file_path, 'r', encoding='utf-8') as f:
with open(file_path, "r", encoding="utf-8") as f:
json.load(f)
return True
except json.JSONDecodeError:
@@ -113,18 +123,20 @@ def validate_json_file(file_path: Path) -> bool:
def remove_browser_version(ctx: BuildContext) -> bool:
"""Remove browser version from manifest.json files (for cleanup)"""
log_info("\n🧹 Removing browser version from extension manifests...")
# Hardcoded paths to manifest files
manifest_paths = [
join_paths(ctx.root_dir, "resources", "files", "ai_side_panel", "manifest.json"),
join_paths(
ctx.root_dir, "resources", "files", "ai_side_panel", "manifest.json"
),
join_paths(ctx.root_dir, "resources", "files", "bug_reporter", "manifest.json"),
]
success = True
for manifest_path in manifest_paths:
if not remove_version_from_manifest(manifest_path):
success = False
return success
@@ -133,24 +145,24 @@ def remove_version_from_manifest(manifest_path: Path) -> bool:
try:
if not manifest_path.exists():
return True # Nothing to remove
# Read existing manifest
with open(manifest_path, 'r', encoding='utf-8') as f:
with open(manifest_path, "r", encoding="utf-8") as f:
manifest_data = json.load(f)
# Remove browser_version field if it exists
if 'browser_version' in manifest_data:
del manifest_data['browser_version']
if "browser_version" in manifest_data:
del manifest_data["browser_version"]
# Write back with proper formatting
with open(manifest_path, 'w', encoding='utf-8') as f:
with open(manifest_path, "w", encoding="utf-8") as f:
json.dump(manifest_data, f, indent=2, ensure_ascii=False)
f.write('\n') # Add trailing newline
f.write("\n") # Add trailing newline
log_info(f"Removed browser_version from: {manifest_path.name}")
return True
except Exception as e:
log_error(f"Failed to remove version from {manifest_path}: {e}")
return False
@@ -159,8 +171,8 @@ def remove_version_from_manifest(manifest_path: Path) -> bool:
def get_manifest_version(manifest_path: Path) -> str:
"""Get the current version from a manifest.json file"""
try:
with open(manifest_path, 'r', encoding='utf-8') as f:
with open(manifest_path, "r", encoding="utf-8") as f:
manifest_data = json.load(f)
return manifest_data.get('version', 'unknown')
return manifest_data.get("version", "unknown")
except Exception:
return 'unknown'
return "unknown"

View File

@@ -17,53 +17,53 @@ def merge_architectures(
arch1_path: Path,
arch2_path: Path,
output_path: Path,
universalizer_script: Path = None
universalizer_script: Path = None,
) -> bool:
"""
Merge two architecture builds into a universal binary
Args:
arch1_path: Path to first architecture .app bundle
arch2_path: Path to second architecture .app bundle
arch2_path: Path to second architecture .app bundle
output_path: Path where universal .app bundle should be created
universalizer_script: Path to universalizer script (optional)
Returns:
True if successful, False otherwise
"""
log_info("🔄 Merging architecture builds into universal binary...")
# Validate input paths
if not arch1_path.exists():
log_error(f"Architecture 1 app not found: {arch1_path}")
return False
if not arch2_path.exists():
log_error(f"Architecture 2 app not found: {arch2_path}")
return False
log_info(f"📱 Input 1: {arch1_path}")
log_info(f"📱 Input 2: {arch2_path}")
log_info(f"🎯 Output: {output_path}")
# Find universalizer script
if universalizer_script is None:
# Try to find it relative to this module
current_dir = Path(__file__).parent.parent
universalizer_script = current_dir / "universalizer_patched.py"
if not universalizer_script.exists():
log_error(f"Universalizer script not found: {universalizer_script}")
return False
# Create output directory if needed
output_path.parent.mkdir(parents=True, exist_ok=True)
# Remove existing output if present
if output_path.exists():
log_info(f"Removing existing output: {output_path}")
shutil.rmtree(output_path)
try:
# Run universalizer
cmd = [
@@ -71,35 +71,37 @@ def merge_architectures(
str(universalizer_script),
str(arch1_path),
str(arch2_path),
str(output_path)
str(output_path),
]
log_info(f"Running universalizer...")
log_info(f"Command: {' '.join(cmd)}")
run_command(cmd)
if output_path.exists():
log_success(f"Universal binary created: {output_path}")
return True
else:
log_error("Universal binary creation failed - output not found")
return False
except Exception as e:
log_error(f"Failed to create universal binary: {e}")
return False
def create_minimal_context(app_path: Path, chromium_src: Path, root_dir: Path, architecture: str = "universal") -> BuildContext:
def create_minimal_context(
app_path: Path, chromium_src: Path, root_dir: Path, architecture: str = "universal"
) -> BuildContext:
"""Create a minimal BuildContext for signing/packaging operations"""
out_dir_path = app_path.parent # out/Default_universal
log_info(f"Creating context from app path: {app_path}")
log_info(f" Out dir: {out_dir_path}")
log_info(f" Chromium src: {chromium_src}")
log_info(f" Root dir: {root_dir}")
ctx = BuildContext(
root_dir=root_dir,
chromium_src=chromium_src,
@@ -110,20 +112,20 @@ def create_minimal_context(app_path: Path, chromium_src: Path, root_dir: Path, a
package=True,
build=False,
)
# Override out_dir to match the actual location
ctx.out_dir = out_dir_path.name
# Override get_app_path to return the actual app path for merge operations
def get_app_path_override():
return app_path
ctx.get_app_path = get_app_path_override
log_info(f"Context created with out_dir: {ctx.out_dir}")
log_info(f"App path: {ctx.get_app_path()}")
log_info(f"PKG-DMG path: {ctx.get_pkg_dmg_path()}")
return ctx
@@ -135,11 +137,11 @@ def merge_sign_package(
root_dir: Path,
sign: bool = True,
package: bool = True,
universalizer_script: Path = None
universalizer_script: Path = None,
) -> bool:
"""
Complete workflow: merge, sign, and package universal binary
Args:
arch1_path: Path to first architecture .app bundle
arch2_path: Path to second architecture .app bundle
@@ -149,84 +151,86 @@ def merge_sign_package(
sign: Whether to sign the universal binary
package: Whether to create DMG package
universalizer_script: Path to universalizer script (optional)
Returns:
True if successful, False otherwise
"""
log_info("=" * 70)
log_info("🚀 Starting merge, sign, and package workflow...")
log_info("=" * 70)
# Step 1: Merge architectures
if not merge_architectures(arch1_path, arch2_path, output_path, universalizer_script):
if not merge_architectures(
arch1_path, arch2_path, output_path, universalizer_script
):
return False
# Step 2: Sign (if requested)
if sign:
log_info("\n" + "=" * 70)
log_info("🔏 Signing universal binary...")
log_info("=" * 70)
try:
from modules.sign import sign_app
ctx = create_minimal_context(output_path, chromium_src, root_dir)
if not sign_app(ctx, create_dmg=False):
log_error("Failed to sign universal binary")
return False
log_success("Universal binary signed successfully!")
except ImportError as e:
log_error(f"Could not import signing module: {e}")
return False
except Exception as e:
log_error(f"Signing failed: {e}")
return False
# Step 3: Package (if requested)
if package:
log_info("\n" + "=" * 70)
log_info("📦 Creating DMG package...")
log_info("=" * 70)
try:
from modules.package import create_dmg
ctx = create_minimal_context(output_path, chromium_src, root_dir)
# Create DMG in parent directory
dmg_dir = ctx.root_dir / "dmg"
dmg_dir.mkdir(parents=True, exist_ok=True)
dmg_name = ctx.get_dmg_name()
dmg_path = dmg_dir / dmg_name
pkg_dmg_path = ctx.get_pkg_dmg_path()
# pkg-dmg should now be available since we enforce chromium-src path
if not pkg_dmg_path.exists():
log_error(f"Chromium pkg-dmg not found at: {pkg_dmg_path}")
log_error("Make sure you provided the correct --chromium-src path")
return False
if create_dmg(output_path, dmg_path, "BrowserOS", pkg_dmg_path):
log_success(f"DMG created: {dmg_name}")
else:
log_error("Failed to create DMG")
return False
except ImportError as e:
log_error(f"Could not import packaging module: {e}")
return False
except Exception as e:
log_error(f"Packaging failed: {e}")
return False
log_info("\n" + "=" * 70)
log_success("Merge, sign, and package workflow completed successfully!")
log_info("=" * 70)
return True
@@ -235,18 +239,18 @@ def handle_merge_command(
arch2_path: Path,
chromium_src: Path,
sign: bool = False,
package: bool = False
package: bool = False,
) -> bool:
"""
Handle the merge command from CLI
Args:
arch1_path: Path to first architecture .app bundle
arch2_path: Path to second architecture .app bundle
chromium_src: Path to chromium source directory
sign: Whether to sign the universal binary
package: Whether to create DMG package
Returns:
True if successful, False otherwise
"""
@@ -256,27 +260,35 @@ def handle_merge_command(
log_info(f" Sign: {sign}")
log_info(f" Package: {package}")
log_info(f"📁 Using Chromium source: {chromium_src}")
# Validate input paths exist
if not arch1_path.exists():
log_error(f"Architecture 1 app not found: {arch1_path}")
return False
if not arch2_path.exists():
log_error(f"Architecture 2 app not found: {arch2_path}")
return False
# Get root_dir from where this module is located
root_dir = Path(__file__).parent.parent.parent
log_info(f"📂 Using root directory: {root_dir}")
# Auto-generate output path in chromium source
# Get the app name from BuildContext
from context import BuildContext
temp_ctx = BuildContext(root_dir=root_dir, chromium_src=chromium_src, architecture="universal", build_type="release")
output_path = chromium_src / "out" / "Default_universal" / temp_ctx.NXTSCAPE_APP_NAME
temp_ctx = BuildContext(
root_dir=root_dir,
chromium_src=chromium_src,
architecture="universal",
build_type="release",
)
output_path = (
chromium_src / "out" / "Default_universal" / temp_ctx.NXTSCAPE_APP_NAME
)
log_info(f" Output: {output_path} (auto-generated)")
try:
success = merge_sign_package(
arch1_path=arch1_path,
@@ -287,15 +299,16 @@ def handle_merge_command(
sign=sign,
package=package,
)
if success:
log_success("Merge command completed successfully!")
else:
log_error("Merge command failed!")
return success
except Exception as e:
log_error(f"Merge command failed with exception: {e}")
import traceback
traceback.print_exc()
return False
return False

View File

@@ -17,17 +17,17 @@ def package(ctx: BuildContext) -> bool:
if ctx.sign_package:
# Already handled by signing process
return True
log_info("\n📀 Creating DMG package...")
app_path = ctx.get_app_path()
dmg_dir = ctx.get_dist_dir()
dmg_name = ctx.get_dmg_name()
dmg_path = dmg_dir / dmg_name
# Use Chromium's pkg-dmg tool
pkg_dmg_path = ctx.get_pkg_dmg_path()
if create_dmg(app_path, dmg_path, "BrowserOS", pkg_dmg_path):
log_success(f"Created {dmg_name}")
return True
@@ -36,52 +36,63 @@ def package(ctx: BuildContext) -> bool:
raise RuntimeError("Failed to create DMG")
def create_dmg(app_path: Path, dmg_path: Path, volume_name: str = "BrowserOS",
pkg_dmg_path: Optional[Path] = None) -> bool:
def create_dmg(
app_path: Path,
dmg_path: Path,
volume_name: str = "BrowserOS",
pkg_dmg_path: Optional[Path] = None,
) -> bool:
"""Create a DMG package from an app bundle"""
log_info(f"\n📀 Creating DMG package: {dmg_path.name}")
# Verify app exists
if not app_path.exists():
log_error(f"App not found at: {app_path}")
return False
# Create DMG directory if needed
dmg_path.parent.mkdir(parents=True, exist_ok=True)
# Remove existing DMG if present
if dmg_path.exists():
log_info(f" Removing existing DMG: {dmg_path.name}")
dmg_path.unlink()
# Build command
cmd = []
if pkg_dmg_path and pkg_dmg_path.exists():
# Use Chromium's pkg-dmg tool if available
cmd = [str(pkg_dmg_path)]
else:
# Fallback to system pkg-dmg if available
pkg_dmg_system = shutil.which('pkg-dmg')
pkg_dmg_system = shutil.which("pkg-dmg")
if pkg_dmg_system:
cmd = [pkg_dmg_system]
else:
log_error("No pkg-dmg tool found")
return False
cmd.extend([
"--sourcefile",
"--source", str(app_path),
"--target", str(dmg_path),
"--volname", volume_name,
"--symlink", "/Applications:/Applications",
"--format", "UDBZ"
])
cmd.extend(
[
"--sourcefile",
"--source",
str(app_path),
"--target",
str(dmg_path),
"--volname",
volume_name,
"--symlink",
"/Applications:/Applications",
"--format",
"UDBZ",
]
)
# Add verbosity for Chromium's pkg-dmg
if pkg_dmg_path:
cmd.extend(["--verbosity", "2"])
try:
run_command(cmd)
log_success(f"DMG created: {dmg_path}")
@@ -94,21 +105,27 @@ def create_dmg(app_path: Path, dmg_path: Path, volume_name: str = "BrowserOS",
def sign_dmg(dmg_path: Path, certificate_name: str) -> bool:
"""Sign a DMG file"""
log_info(f"\n🔏 Signing DMG: {dmg_path.name}")
if not dmg_path.exists():
log_error(f"DMG not found at: {dmg_path}")
return False
try:
run_command([
"codesign", "--sign", certificate_name,
"--force", "--timestamp", str(dmg_path)
])
run_command(
[
"codesign",
"--sign",
certificate_name,
"--force",
"--timestamp",
str(dmg_path),
]
)
# Verify signature
log_info("🔍 Verifying DMG signature...")
run_command(["codesign", "-vvv", str(dmg_path)])
log_success("DMG signed successfully")
return True
except Exception as e:
@@ -119,112 +136,128 @@ def sign_dmg(dmg_path: Path, certificate_name: str) -> bool:
def notarize_dmg(dmg_path: Path, keychain_profile: str = "notarytool-profile") -> bool:
"""Notarize a DMG file"""
log_info(f"\n📤 Notarizing DMG: {dmg_path.name}")
if not dmg_path.exists():
log_error(f"DMG not found at: {dmg_path}")
return False
try:
# Submit for notarization
log_info("📤 Submitting DMG for notarization (this may take a while)...")
result = run_command(
["xcrun", "notarytool", "submit", str(dmg_path),
"--keychain-profile", keychain_profile, "--wait"],
check=False
[
"xcrun",
"notarytool",
"submit",
str(dmg_path),
"--keychain-profile",
keychain_profile,
"--wait",
],
check=False,
)
log_info(result.stdout)
if result.stderr:
log_error(result.stderr)
if result.returncode != 0:
log_error("DMG notarization submission failed")
return False
# Check if accepted
if "status: Accepted" not in result.stdout:
log_error("DMG notarization failed - status was not 'Accepted'")
# Try to extract submission ID for debugging
for line in result.stdout.split('\n'):
if 'id:' in line:
submission_id = line.split('id:')[1].strip().split()[0]
log_info(f"Get detailed logs with: xcrun notarytool log {submission_id} --keychain-profile \"{keychain_profile}\"")
for line in result.stdout.split("\n"):
if "id:" in line:
submission_id = line.split("id:")[1].strip().split()[0]
log_info(
f'Get detailed logs with: xcrun notarytool log {submission_id} --keychain-profile "{keychain_profile}"'
)
break
return False
log_success("DMG notarization successful - status: Accepted")
# Staple the ticket
log_info("📎 Stapling notarization ticket to DMG...")
result = run_command(
["xcrun", "stapler", "staple", str(dmg_path)],
check=False
)
result = run_command(["xcrun", "stapler", "staple", str(dmg_path)], check=False)
if result.returncode != 0:
log_error("Failed to staple notarization ticket to DMG")
return False
log_success("DMG notarization ticket stapled successfully")
# Verify stapling
log_info("🔍 Verifying DMG stapling...")
result = run_command(
["xcrun", "stapler", "validate", str(dmg_path)],
check=False
["xcrun", "stapler", "validate", str(dmg_path)], check=False
)
if result.returncode != 0:
log_error("DMG stapling verification failed")
return False
log_success("DMG stapling verification successful")
# Final security assessment
log_info("🔍 Performing final security assessment...")
result = run_command(
["spctl", "-a", "-vvv", "-t", "open",
"--context", "context:primary-signature", str(dmg_path)],
check=False
[
"spctl",
"-a",
"-vvv",
"-t",
"open",
"--context",
"context:primary-signature",
str(dmg_path),
],
check=False,
)
if result.returncode != 0:
log_error("Final security assessment failed")
return False
log_success("Final security assessment passed")
return True
except Exception as e:
log_error(f"Unexpected error during DMG notarization: {e}")
return False
def create_signed_notarized_dmg(app_path: Path, dmg_path: Path,
certificate_name: str,
volume_name: str = "BrowserOS",
pkg_dmg_path: Optional[Path] = None,
keychain_profile: str = "notarytool-profile") -> bool:
def create_signed_notarized_dmg(
app_path: Path,
dmg_path: Path,
certificate_name: str,
volume_name: str = "BrowserOS",
pkg_dmg_path: Optional[Path] = None,
keychain_profile: str = "notarytool-profile",
) -> bool:
"""Create, sign, and notarize a DMG in one go"""
log_info("="*70)
log_info("=" * 70)
log_info("📦 Creating signed and notarized DMG package")
log_info("="*70)
log_info("=" * 70)
# Create DMG
if not create_dmg(app_path, dmg_path, volume_name, pkg_dmg_path):
return False
# Sign DMG
if not sign_dmg(dmg_path, certificate_name):
return False
# Notarize DMG
if not notarize_dmg(dmg_path, keychain_profile):
return False
log_info("="*70)
log_info("=" * 70)
log_success(f"DMG package ready: {dmg_path}")
log_info("="*70)
log_info("=" * 70)
return True
@@ -233,19 +266,19 @@ def package_universal(contexts: List[BuildContext]) -> bool:
log_info("=" * 70)
log_info("📦 Creating universal DMG package...")
log_info("=" * 70)
if len(contexts) < 2:
log_error("Universal packaging requires at least 2 architectures")
return False
# Use the universal app path
universal_dir = contexts[0].chromium_src / "out/Default_universal"
universal_app_path = universal_dir / contexts[0].NXTSCAPE_APP_NAME
if not universal_app_path.exists():
log_error(f"Universal app not found: {universal_app_path}")
return False
# Create a temporary universal context for DMG naming
universal_ctx = BuildContext(
root_dir=contexts[0].root_dir,
@@ -257,18 +290,18 @@ def package_universal(contexts: List[BuildContext]) -> bool:
package=False,
build=False,
)
# Create DMG in dist/<version> directory
dmg_dir = universal_ctx.get_dist_dir()
dmg_dir.mkdir(parents=True, exist_ok=True)
# Use context's DMG naming
dmg_name = universal_ctx.get_dmg_name()
dmg_path = dmg_dir / dmg_name
# Get pkg-dmg tool
pkg_dmg_path = contexts[0].get_pkg_dmg_path()
# Create the universal DMG
if create_dmg(universal_app_path, dmg_path, "BrowserOS", pkg_dmg_path):
log_success(f"Universal DMG created: {dmg_name}")

View File

@@ -10,26 +10,34 @@ from pathlib import Path
from typing import List, Tuple, Optional
from context import BuildContext
from utils import log_info, log_error, log_warning, log_success, run_command, safe_rmtree, join_paths
from utils import (
log_info,
log_error,
log_warning,
log_success,
run_command,
safe_rmtree,
join_paths,
)
def prepare_appdir(ctx: BuildContext, appdir: Path) -> bool:
"""Prepare the AppDir structure for AppImage"""
log_info("📁 Preparing AppDir structure...")
# Create directory structure
app_root = join_paths(appdir, "opt", "browseros")
usr_share = join_paths(appdir, "usr", "share")
icons_dir = join_paths(usr_share, "icons", "hicolor")
# Create directories
app_root.mkdir(parents=True, exist_ok=True)
Path(join_paths(usr_share, "applications")).mkdir(parents=True, exist_ok=True)
Path(join_paths(icons_dir, "256x256", "apps")).mkdir(parents=True, exist_ok=True)
# Copy browser files from out/Default
out_dir = join_paths(ctx.chromium_src, ctx.out_dir)
# Essential files to copy
files_to_copy = [
ctx.NXTSCAPE_APP_NAME, # This will be "browseros" on Linux
@@ -48,7 +56,7 @@ def prepare_appdir(ctx: BuildContext, appdir: Path) -> bool:
"chrome_200_percent.pak",
"resources.pak",
]
# Copy files
for file in files_to_copy:
src = join_paths(out_dir, file)
@@ -57,7 +65,7 @@ def prepare_appdir(ctx: BuildContext, appdir: Path) -> bool:
log_info(f" ✓ Copied {file}")
else:
log_warning(f" ⚠ File not found: {file}")
# Copy directories
dirs_to_copy = ["locales", "MEIPreload"]
for dir_name in dirs_to_copy:
@@ -65,20 +73,20 @@ def prepare_appdir(ctx: BuildContext, appdir: Path) -> bool:
if Path(src).exists():
shutil.copytree(src, join_paths(app_root, dir_name), dirs_exist_ok=True)
log_info(f" ✓ Copied {dir_name}/")
# Set executable permissions
browseros_path = Path(join_paths(app_root, ctx.NXTSCAPE_APP_NAME))
if browseros_path.exists():
browseros_path.chmod(0o755)
sandbox_path = Path(join_paths(app_root, "chrome_sandbox"))
if sandbox_path.exists():
sandbox_path.chmod(0o4755) # SUID bit
crashpad_path = Path(join_paths(app_root, "chrome_crashpad_handler"))
if crashpad_path.exists():
crashpad_path.chmod(0o755)
# Create desktop file
desktop_content = f"""[Desktop Entry]
Version=1.0
@@ -92,31 +100,33 @@ Categories=Network;WebBrowser;
MimeType=text/html;text/xml;application/xhtml+xml;application/xml;application/vnd.mozilla.xul+xml;application/rss+xml;application/rdf+xml;image/gif;image/jpeg;image/png;x-scheme-handler/http;x-scheme-handler/https;x-scheme-handler/ftp;x-scheme-handler/chrome;video/webm;application/x-xpinstall;
Icon=browseros
"""
desktop_file = Path(join_paths(usr_share, "applications", "browseros.desktop"))
desktop_file.write_text(desktop_content)
log_info(" ✓ Created desktop file")
# Also copy desktop file to AppDir root (required by appimagetool)
appdir_desktop = Path(join_paths(appdir, "browseros.desktop"))
shutil.copy2(desktop_file, appdir_desktop)
# Update Exec line to use AppRun
desktop_content_appdir = desktop_content.replace(f"Exec=/opt/browseros/{ctx.NXTSCAPE_APP_NAME} %U", "Exec=AppRun %U")
desktop_content_appdir = desktop_content.replace(
f"Exec=/opt/browseros/{ctx.NXTSCAPE_APP_NAME} %U", "Exec=AppRun %U"
)
appdir_desktop.write_text(desktop_content_appdir)
# Copy icon from resources
icon_src = Path(join_paths(ctx.root_dir, "resources", "icons", "product_logo.png"))
if icon_src.exists():
icon_dest = Path(join_paths(icons_dir, "256x256", "apps", "browseros.png"))
shutil.copy2(icon_src, icon_dest)
log_info(" ✓ Copied icon")
# Also copy icon to AppDir root (following ungoogled-chromium convention)
appdir_icon = Path(join_paths(appdir, "browseros.png"))
shutil.copy2(icon_src, appdir_icon)
else:
log_warning(" ⚠ Icon not found at resources/icons/product_logo.png")
# Create AppRun script (following ungoogled-chromium convention)
apprun_content = f"""#!/bin/sh
THIS="$(readlink -f "${{0}}")"
@@ -125,12 +135,12 @@ export LD_LIBRARY_PATH="${{HERE}}"/opt/browseros:$LD_LIBRARY_PATH
export CHROME_WRAPPER="${{THIS}}"
"${{HERE}}"/opt/browseros/{ctx.NXTSCAPE_APP_NAME} "$@"
"""
apprun_file = Path(join_paths(appdir, "AppRun"))
apprun_file.write_text(apprun_content)
apprun_file.chmod(0o755)
log_info(" ✓ Created AppRun script")
return True
@@ -138,19 +148,19 @@ def download_appimagetool(ctx: BuildContext) -> Optional[Path]:
"""Download appimagetool if not available"""
tool_dir = Path(join_paths(ctx.root_dir, "build", "tools"))
tool_dir.mkdir(exist_ok=True)
tool_path = Path(join_paths(tool_dir, "appimagetool-x86_64.AppImage"))
if tool_path.exists():
log_info("✓ appimagetool already available")
return tool_path
log_info("📥 Downloading appimagetool...")
url = "https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage"
cmd = ["wget", "-O", str(tool_path), url]
result = run_command(cmd, check=False)
if result.returncode == 0:
tool_path.chmod(0o755)
log_success("✓ Downloaded appimagetool")
@@ -163,26 +173,27 @@ def download_appimagetool(ctx: BuildContext) -> Optional[Path]:
def create_appimage(ctx: BuildContext, appdir: Path, output_path: Path) -> bool:
"""Create AppImage from AppDir"""
log_info("📦 Creating AppImage...")
# Download appimagetool if needed
appimagetool = download_appimagetool(ctx)
if not appimagetool:
return False
# Set architecture
arch = "x86_64" if ctx.architecture == "x64" else "aarch64"
os.environ["ARCH"] = arch
# Create AppImage
cmd = [
str(appimagetool),
"--comp", "gzip", # Use gzip compression
"--comp",
"gzip", # Use gzip compression
str(appdir),
str(output_path)
str(output_path),
]
result = run_command(cmd, check=False)
if result.returncode == 0:
log_success(f"✓ Created AppImage: {output_path}")
# Make executable
@@ -195,39 +206,41 @@ def create_appimage(ctx: BuildContext, appdir: Path, output_path: Path) -> bool:
def package(ctx: BuildContext) -> bool:
"""Package BrowserOS for Linux as AppImage"""
log_info(f"📦 Packaging {ctx.NXTSCAPE_APP_BASE_NAME} {ctx.get_nxtscape_chromium_version()} for Linux ({ctx.architecture})")
log_info(
f"📦 Packaging {ctx.NXTSCAPE_APP_BASE_NAME} {ctx.get_nxtscape_chromium_version()} for Linux ({ctx.architecture})"
)
# Create packaging directory
package_dir = ctx.get_dist_dir()
package_dir.mkdir(parents=True, exist_ok=True)
# Prepare AppDir
appdir = Path(join_paths(package_dir, f"{ctx.NXTSCAPE_APP_BASE_NAME}.AppDir"))
if appdir.exists():
safe_rmtree(appdir)
if not prepare_appdir(ctx, appdir):
return False
# Define output filename
version = ctx.get_nxtscape_chromium_version().replace(" ", "_")
arch_suffix = "x86_64" if ctx.architecture == "x64" else "arm64"
filename = f"{ctx.NXTSCAPE_APP_BASE_NAME}-{version}-{arch_suffix}.AppImage"
output_path = Path(join_paths(package_dir, filename))
# Create AppImage
if not create_appimage(ctx, appdir, output_path):
return False
# Clean up AppDir
safe_rmtree(appdir)
# Store package path in context for GCS upload
ctx.package_path = output_path
log_success(f"✅ AppImage created: {output_path}")
log_info(f" Size: {output_path.stat().st_size / 1024 / 1024:.1f} MB")
return True
@@ -240,4 +253,4 @@ def package_universal(contexts: List[BuildContext]) -> bool:
def sign_binaries(ctx: BuildContext) -> bool:
"""Linux doesn't require code signing like macOS/Windows"""
log_info("Code signing is not required for Linux AppImages")
return True
return True

View File

@@ -11,73 +11,82 @@ import zipfile
from pathlib import Path
from typing import Optional, List
from context import BuildContext
from utils import run_command, log_info, log_error, log_success, log_warning, join_paths, IS_WINDOWS
from utils import (
run_command,
log_info,
log_error,
log_success,
log_warning,
join_paths,
IS_WINDOWS,
)
def package(ctx: BuildContext) -> bool:
"""Create Windows packages (installer and portable zip)"""
log_info("\n📦 Creating Windows packages...")
# First, ensure mini_installer is built
if not build_mini_installer(ctx):
log_error("Failed to build mini_installer")
return False
# Create both installer and portable zip
success = True
if create_installer(ctx):
log_success("Installer created successfully")
else:
log_error("Failed to create installer")
success = False
if create_portable_zip(ctx):
log_success("Portable ZIP created successfully")
else:
log_error("Failed to create portable ZIP")
success = False
return success
def build_mini_installer(ctx: BuildContext) -> bool:
"""Build the mini_installer target if it doesn't exist"""
log_info("\n🔨 Checking mini_installer build...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if mini_installer_path.exists():
log_info("mini_installer.exe already exists")
return True
log_info("Building mini_installer target...")
# Build mini_installer using autoninja
try:
# Use autoninja.bat on Windows
autoninja_cmd = "autoninja.bat" if IS_WINDOWS else "autoninja"
# Build the mini_installer target
cmd = [
autoninja_cmd,
"-C",
ctx.out_dir, # Use relative path like in compile.py
"mini_installer"
"mini_installer",
]
# Change to chromium_src directory before running (like compile.py does)
import os
old_cwd = os.getcwd()
os.chdir(ctx.chromium_src)
try:
run_command(cmd)
finally:
os.chdir(old_cwd)
# Verify the file was created
if mini_installer_path.exists():
log_success("mini_installer built successfully")
@@ -85,7 +94,7 @@ def build_mini_installer(ctx: BuildContext) -> bool:
else:
log_error("mini_installer build completed but file not found")
return False
except Exception as e:
log_error(f"Failed to build mini_installer: {e}")
return False
@@ -94,24 +103,26 @@ def build_mini_installer(ctx: BuildContext) -> bool:
def create_installer(ctx: BuildContext) -> bool:
"""Create Windows installer (mini_installer.exe)"""
log_info("\n🔧 Creating Windows installer...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
log_warning(f"mini_installer.exe not found at: {mini_installer_path}")
log_info("To build the installer, run: autoninja -C out\\Default_x64 mini_installer")
log_info(
"To build the installer, run: autoninja -C out\\Default_x64 mini_installer"
)
return False
# Create output directory
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
# Generate installer filename with version and architecture
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_nxtscape_chromium_version()}_{ctx.architecture}_installer.exe"
installer_path = output_dir / installer_name
# Copy mini_installer to final location
try:
shutil.copy2(mini_installer_path, installer_path)
@@ -125,35 +136,37 @@ def create_installer(ctx: BuildContext) -> bool:
def create_portable_zip(ctx: BuildContext) -> bool:
"""Create ZIP of just the installer for easier distribution"""
log_info("\n📦 Creating installer ZIP package...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
log_warning(f"mini_installer.exe not found at: {mini_installer_path}")
log_info("To build the installer, run: autoninja -C out\\Default_x64 mini_installer")
log_info(
"To build the installer, run: autoninja -C out\\Default_x64 mini_installer"
)
return False
# Create output directory
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
# Generate ZIP filename with version and architecture
zip_name = f"{ctx.get_app_base_name()}_{ctx.get_nxtscape_chromium_version()}_{ctx.architecture}_installer.zip"
zip_path = output_dir / zip_name
# Create ZIP file containing just the installer
try:
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
# Add mini_installer.exe to the zip
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_nxtscape_version()}_{ctx.architecture}_installer.exe"
zipf.write(mini_installer_path, installer_name)
# Get file size for logging
file_size = mini_installer_path.stat().st_size
log_info(f"Added installer to ZIP ({file_size // (1024*1024)} MB)")
log_success(f"Installer ZIP created: {zip_name}")
return True
except Exception as e:
@@ -164,16 +177,16 @@ def create_portable_zip(ctx: BuildContext) -> bool:
def sign_binaries(ctx: BuildContext, certificate_name: Optional[str] = None) -> bool:
"""Sign Windows binaries using SSL.com CodeSignTool"""
log_info("\n🔏 Signing Windows binaries...")
# Get paths to sign
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
# List of binaries to sign
binaries_to_sign = [
build_output_dir / "chrome.exe",
build_output_dir / "mini_installer.exe"
build_output_dir / "mini_installer.exe",
]
# Check which binaries exist
existing_binaries = []
for binary in binaries_to_sign:
@@ -182,11 +195,11 @@ def sign_binaries(ctx: BuildContext, certificate_name: Optional[str] = None) ->
log_info(f"Found binary to sign: {binary.name}")
else:
log_warning(f"Binary not found: {binary}")
if not existing_binaries:
log_error("No binaries found to sign")
return False
# Always use CodeSignTool for signing
return sign_with_codesigntool(existing_binaries)
@@ -194,27 +207,27 @@ def sign_binaries(ctx: BuildContext, certificate_name: Optional[str] = None) ->
def sign_with_codesigntool(binaries: List[Path]) -> bool:
"""Sign binaries using SSL.com CodeSignTool"""
log_info("Using SSL.com CodeSignTool for signing...")
# Get CodeSignTool directory from environment
codesigntool_dir = os.environ.get('CODE_SIGN_TOOL_PATH')
codesigntool_dir = os.environ.get("CODE_SIGN_TOOL_PATH")
if not codesigntool_dir:
log_error("CODE_SIGN_TOOL_PATH not set in .env file")
log_error("Set CODE_SIGN_TOOL_PATH=C:/src/CodeSignTool-v1.3.2-windows")
return False
# Construct path to CodeSignTool.bat
codesigntool_path = Path(codesigntool_dir) / "CodeSignTool.bat"
if not codesigntool_path.exists():
log_error(f"CodeSignTool.bat not found at: {codesigntool_path}")
log_error(f"Make sure CODE_SIGN_TOOL_PATH points to the CodeSignTool directory")
return False
# Check for required environment variables
username = os.environ.get('ESIGNER_USERNAME')
password = os.environ.get('ESIGNER_PASSWORD')
totp_secret = os.environ.get('ESIGNER_TOTP_SECRET')
credential_id = os.environ.get('ESIGNER_CREDENTIAL_ID')
username = os.environ.get("ESIGNER_USERNAME")
password = os.environ.get("ESIGNER_PASSWORD")
totp_secret = os.environ.get("ESIGNER_TOTP_SECRET")
credential_id = os.environ.get("ESIGNER_CREDENTIAL_ID")
if not all([username, password, totp_secret]):
log_error("Missing required eSigner environment variables in .env:")
log_error(" ESIGNER_USERNAME=your-email")
@@ -223,97 +236,120 @@ def sign_with_codesigntool(binaries: List[Path]) -> bool:
if not credential_id:
log_warning(" ESIGNER_CREDENTIAL_ID is recommended but optional")
return False
all_success = True
for binary in binaries:
try:
log_info(f"Signing {binary.name}...")
# Build command
# Create a temp output directory to avoid source/dest conflict
temp_output_dir = binary.parent / "signed_temp"
temp_output_dir.mkdir(exist_ok=True)
cmd = [
str(codesigntool_path),
"sign",
"-username", username,
"-password", f'"{password}"', # Always quote the password for shell
"-username",
username,
"-password",
f'"{password}"', # Always quote the password for shell
]
# Add credential_id BEFORE totp_secret (order matters!)
if credential_id:
cmd.extend(["-credential_id", credential_id])
cmd.extend([
"-totp_secret", totp_secret,
"-input_file_path", str(binary),
"-output_dir_path", str(temp_output_dir),
"-override" # Add this back
])
cmd.extend(
[
"-totp_secret",
totp_secret,
"-input_file_path",
str(binary),
"-output_dir_path",
str(temp_output_dir),
"-override", # Add this back
]
)
# Note: Timestamp server is configured on SSL.com side automatically
# CodeSignTool needs to be run as a shell command for proper quote handling
cmd_str = ' '.join(cmd)
cmd_str = " ".join(cmd)
log_info(f"Running: {cmd_str}")
import subprocess
result = subprocess.run(cmd_str, shell=True, capture_output=True, text=True, cwd=str(codesigntool_path.parent))
result = subprocess.run(
cmd_str,
shell=True,
capture_output=True,
text=True,
cwd=str(codesigntool_path.parent),
)
# Print output for debugging
if result.stdout:
for line in result.stdout.split('\n'):
for line in result.stdout.split("\n"):
if line.strip():
log_info(line.strip())
if result.stderr:
for line in result.stderr.split('\n'):
for line in result.stderr.split("\n"):
if line.strip() and "WARNING" not in line:
log_error(line.strip())
# Check if signing actually succeeded by looking for error messages
# CodeSignTool returns 0 even on auth errors, so we need to check output
if result.stdout and "Error:" in result.stdout:
log_error(f"✗ Failed to sign {binary.name} - Authentication or signing error")
log_error(
f"✗ Failed to sign {binary.name} - Authentication or signing error"
)
all_success = False
continue
# Move the signed file back to original location
signed_file = temp_output_dir / binary.name
if signed_file.exists():
import shutil
shutil.move(str(signed_file), str(binary))
log_info(f"Moved signed {binary.name} to original location")
# Clean up temp directory
try:
temp_output_dir.rmdir()
except:
pass # Directory might not be empty
# Verify the file is actually signed (Windows only)
verify_cmd = ["powershell", "-Command",
f"(Get-AuthenticodeSignature '{binary}').Status"]
verify_cmd = [
"powershell",
"-Command",
f"(Get-AuthenticodeSignature '{binary}').Status",
]
try:
import subprocess
verify_result = subprocess.run(verify_cmd, capture_output=True, text=True)
verify_result = subprocess.run(
verify_cmd, capture_output=True, text=True
)
if "Valid" in verify_result.stdout:
log_success(f"{binary.name} signed and verified successfully")
else:
log_error(f"{binary.name} signing verification failed - Status: {verify_result.stdout.strip()}")
log_error(
f"{binary.name} signing verification failed - Status: {verify_result.stdout.strip()}"
)
all_success = False
except:
log_warning(f"Could not verify signature for {binary.name}")
except Exception as e:
log_error(f"Failed to sign {binary.name}: {e}")
all_success = False
return all_success
def package_universal(contexts: List[BuildContext]) -> bool:
"""Windows doesn't support universal binaries like macOS"""
log_warning("Universal binaries are not supported on Windows")
@@ -324,32 +360,34 @@ def package_universal(contexts: List[BuildContext]) -> bool:
def get_target_cpu(build_output_dir: Path) -> str:
"""Get target CPU architecture from build configuration"""
args_gn_path = build_output_dir / "args.gn"
if not args_gn_path.exists():
return "x64" # Default
try:
args_gn_content = args_gn_path.read_text(encoding='utf-8')
for cpu in ('x64', 'x86', 'arm64'):
args_gn_content = args_gn_path.read_text(encoding="utf-8")
for cpu in ("x64", "x86", "arm64"):
if f'target_cpu="{cpu}"' in args_gn_content:
return cpu
except Exception:
pass
return "x64" # Default
def create_files_cfg_package(ctx: BuildContext) -> bool:
"""Create package using Chromium's FILES.cfg approach (alternative method)"""
log_info("\n📦 Creating FILES.cfg-based package...")
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
files_cfg_path = ctx.chromium_src / "chrome" / "tools" / "build" / "win" / "FILES.cfg"
files_cfg_path = (
ctx.chromium_src / "chrome" / "tools" / "build" / "win" / "FILES.cfg"
)
if not files_cfg_path.exists():
log_error(f"FILES.cfg not found at: {files_cfg_path}")
return False
# This would require implementing the filescfg module functionality
# from ungoogled-chromium, which is quite complex
log_warning("FILES.cfg packaging not yet implemented")

View File

@@ -349,4 +349,3 @@ def commit_patch(patch_path: Path, tree_path: Path) -> bool:
except Exception as e:
log_warning(f"Error creating commit for patch {patch_path.name}: {e}")
return False

View File

@@ -95,4 +95,3 @@ def run_postbuild(ctx: BuildContext):
# Add other post-build tasks here as needed
log_success("Post-build tasks completed")

View File

@@ -21,7 +21,9 @@ def copy_resources(ctx: BuildContext, commit_each: bool = False) -> bool:
copy_config_path = ctx.get_copy_resources_config()
if not copy_config_path.exists():
log_error(f"Copy configuration file not found: {copy_config_path}")
raise FileNotFoundError(f"Copy configuration file not found: {copy_config_path}")
raise FileNotFoundError(
f"Copy configuration file not found: {copy_config_path}"
)
with open(copy_config_path, "r") as f:
config = yaml.safe_load(f)
@@ -29,9 +31,11 @@ def copy_resources(ctx: BuildContext, commit_each: bool = False) -> bool:
if "copy_operations" not in config:
log_info("⚠️ No copy_operations defined in configuration")
return True
if commit_each:
log_info("📝 Git commit mode enabled - will create a commit after each resource copy")
log_info(
"📝 Git commit mode enabled - will create a commit after each resource copy"
)
# Process each copy operation
for operation in config["copy_operations"]:
@@ -63,7 +67,9 @@ def copy_resources(ctx: BuildContext, commit_each: bool = False) -> bool:
shutil.copytree(src_path, dst_path, dirs_exist_ok=True)
log_info(f" ✓ Copied directory: {source}{destination}")
if commit_each:
commit_resource_copy(name, source, destination, ctx.chromium_src)
commit_resource_copy(
name, source, destination, ctx.chromium_src
)
else:
log_warning(f" Source directory not found: {source}")
@@ -76,9 +82,13 @@ def copy_resources(ctx: BuildContext, commit_each: bool = False) -> bool:
file_path = Path(file_path)
if file_path.is_file():
shutil.copy2(file_path, dst_base)
log_info(f" ✓ Copied {len(files)} files: {source}{destination}")
log_info(
f" ✓ Copied {len(files)} files: {source}{destination}"
)
if commit_each:
commit_resource_copy(name, source, destination, ctx.chromium_src)
commit_resource_copy(
name, source, destination, ctx.chromium_src
)
else:
log_warning(f" No files found matching: {source}")
@@ -89,7 +99,9 @@ def copy_resources(ctx: BuildContext, commit_each: bool = False) -> bool:
shutil.copy2(src_path, dst_base)
log_info(f" ✓ Copied file: {source}{destination}")
if commit_each:
commit_resource_copy(name, source, destination, ctx.chromium_src)
commit_resource_copy(
name, source, destination, ctx.chromium_src
)
else:
log_warning(f" Source file not found: {source}")
@@ -100,25 +112,31 @@ def copy_resources(ctx: BuildContext, commit_each: bool = False) -> bool:
return True
def commit_resource_copy(name: str, source: str, destination: str, chromium_src: Path) -> bool:
def commit_resource_copy(
name: str, source: str, destination: str, chromium_src: Path
) -> bool:
"""Create a git commit for the copied resource"""
try:
# Stage all changes
cmd_add = ['git', 'add', '-A']
result = subprocess.run(cmd_add, capture_output=True, text=True, cwd=chromium_src)
cmd_add = ["git", "add", "-A"]
result = subprocess.run(
cmd_add, capture_output=True, text=True, cwd=chromium_src
)
if result.returncode != 0:
log_warning(f"Failed to stage changes for resource copy: {name}")
if result.stderr:
log_warning(f"Error: {result.stderr}")
return False
# Create commit message
commit_message = f"resource: {name.lower()}"
# Create the commit
cmd_commit = ['git', 'commit', '-m', commit_message]
result = subprocess.run(cmd_commit, capture_output=True, text=True, cwd=chromium_src)
cmd_commit = ["git", "commit", "-m", commit_message]
result = subprocess.run(
cmd_commit, capture_output=True, text=True, cwd=chromium_src
)
if result.returncode == 0:
log_success(f"📝 Created commit for resource: {name}")
return True
@@ -127,8 +145,7 @@ def commit_resource_copy(name: str, source: str, destination: str, chromium_src:
if result.stderr:
log_warning(f"Error: {result.stderr}")
return False
except Exception as e:
log_warning(f"Error creating commit for resource {name}: {e}")
return False

View File

@@ -54,25 +54,25 @@ def check_signing_environment() -> bool:
# Only check on macOS
if not IS_MACOS:
return True
required_vars = [
"MACOS_CERTIFICATE_NAME",
"PROD_MACOS_NOTARIZATION_APPLE_ID",
"PROD_MACOS_NOTARIZATION_TEAM_ID",
"PROD_MACOS_NOTARIZATION_PWD"
"PROD_MACOS_NOTARIZATION_PWD",
]
missing = []
for var in required_vars:
if not os.environ.get(var):
missing.append(var)
if missing:
log_error("❌ Signing requires macOS environment variables!")
log_error(f"Missing environment variables: {', '.join(missing)}")
log_error("Please set all required environment variables before signing.")
return False
return True
@@ -120,19 +120,26 @@ def find_components_to_sign(
# Check both versioned and non-versioned paths for BrowserOS Framework
# Handle both release and debug framework names
framework_names = ["BrowserOS Framework.framework", "BrowserOS Dev Framework.framework"]
framework_names = [
"BrowserOS Framework.framework",
"BrowserOS Dev Framework.framework",
]
nxtscape_framework_paths = []
for fw_name in framework_names:
fw_path = join_paths(framework_path, fw_name)
if fw_path.exists():
nxtscape_framework_paths.append(fw_path)
# Add versioned path if context is available
if ctx and ctx.nxtscape_chromium_version:
versioned_path = join_paths(fw_path, "Versions", ctx.nxtscape_chromium_version)
versioned_path = join_paths(
fw_path, "Versions", ctx.nxtscape_chromium_version
)
if versioned_path.exists():
nxtscape_framework_paths.insert(0, versioned_path) # Prioritize versioned path
nxtscape_framework_paths.insert(
0, versioned_path
) # Prioritize versioned path
# Find all helper apps
for nxtscape_fw_path in nxtscape_framework_paths:
@@ -388,11 +395,13 @@ def sign_all_components(
if exe_path.exists():
main_exe = exe_path
break
if not main_exe:
log_error(f"Main executable not found in {join_paths(app_path, 'Contents', 'MacOS')}")
log_error(
f"Main executable not found in {join_paths(app_path, 'Contents', 'MacOS')}"
)
return False
if not sign_component(main_exe, certificate_name, "com.browseros.BrowserOS"):
return False
@@ -417,7 +426,9 @@ def sign_all_components(
[
join_paths(root_dir, "entitlements"), # Legacy location
join_paths(root_dir, "build", "src", "chrome", "app"),
join_paths(app_path.parent.parent.parent, "chrome", "app"), # Chromium source
join_paths(
app_path.parent.parent.parent, "chrome", "app"
), # Chromium source
]
)
@@ -487,7 +498,9 @@ def notarize_app(
log_info("\n📤 Preparing for notarization...")
# Create zip for notarization
notarize_zip = ctx.get_notarization_zip() if ctx else join_paths(root_dir, "notarize.zip")
notarize_zip = (
ctx.get_notarization_zip() if ctx else join_paths(root_dir, "notarize.zip")
)
if notarize_zip.exists():
notarize_zip.unlink()
@@ -719,7 +732,9 @@ def sign_universal(contexts: List[BuildContext]) -> bool:
universal_dir.mkdir(parents=True, exist_ok=True)
# Use universalizer script to merge architectures
universalizer_script = join_paths(contexts[0].root_dir, "build", "universalizer_patched.py")
universalizer_script = join_paths(
contexts[0].root_dir, "build", "universalizer_patched.py"
)
if not universalizer_script.exists():
log_error(f"Universalizer script not found: {universalizer_script}")

View File

@@ -9,6 +9,7 @@ import requests
from typing import Optional, List
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from utils import log_info, log_warning, log_error, get_platform
@@ -34,18 +35,18 @@ def get_os_info() -> tuple[str, str]:
def send_slack_notification(message: str, success: bool = True) -> bool:
"""Send a notification to Slack if webhook URL is configured"""
webhook_url = get_slack_webhook_url()
if not webhook_url:
# Silently skip if no webhook configured
return True
# Choose emoji and color based on success status
emoji = "" if success else ""
color = "good" if success else "danger"
# Get OS information
os_emoji, os_name = get_os_info()
# Create Slack message payload
payload = {
"attachments": [
@@ -55,30 +56,30 @@ def send_slack_notification(message: str, success: bool = True) -> bool:
{
"title": "Nxtscape Build",
"value": f"{emoji} {message}",
"short": False
"short": False,
}
],
"footer": f"{os_emoji} Nxtscape Build System - {os_name}",
"ts": None # Slack will use current timestamp
"ts": None, # Slack will use current timestamp
}
]
}
try:
response = requests.post(
webhook_url,
data=json.dumps(payload),
headers={'Content-Type': 'application/json'},
timeout=10
headers={"Content-Type": "application/json"},
timeout=10,
)
if response.status_code == 200:
log_info(f"📲 Slack notification sent: {message}")
return True
else:
log_warning(f"Slack notification failed with status {response.status_code}")
return False
except requests.RequestException as e:
log_warning(f"Failed to send Slack notification: {e}")
return False
@@ -97,10 +98,12 @@ def notify_build_step(step_name: str) -> bool:
return send_slack_notification(message, success=True)
def notify_build_success(duration_mins: int, duration_secs: int, gcs_uris: Optional[List[str]] = None) -> bool:
def notify_build_success(
duration_mins: int, duration_secs: int, gcs_uris: Optional[List[str]] = None
) -> bool:
"""Notify that build completed successfully"""
message = f"Build completed successfully in {duration_mins}m {duration_secs}s"
# Add GCS URIs to message if provided
if gcs_uris:
message += f"\n\nUploaded artifacts ({len(gcs_uris)} files):"
@@ -111,7 +114,7 @@ def notify_build_success(duration_mins: int, duration_secs: int, gcs_uris: Optio
message += f"\n{public_url}"
else:
message += f"\n{uri}"
return send_slack_notification(message, success=True)
@@ -131,9 +134,9 @@ def notify_gcs_upload(architecture: str, gcs_uris: List[str]) -> bool:
"""Notify about GCS upload for a specific architecture"""
if not gcs_uris:
return True
message = f"[{architecture}] Uploaded {len(gcs_uris)} artifact(s) to GCS"
# Add URIs to message
for uri in gcs_uris:
# Convert gs:// URI to public URL
@@ -142,5 +145,5 @@ def notify_gcs_upload(architecture: str, gcs_uris: List[str]) -> bool:
message += f"\n{public_url}"
else:
message += f"\n{uri}"
return send_slack_notification(message, success=True)

View File

@@ -83,4 +83,3 @@ def apply_string_replacements(ctx: BuildContext) -> bool:
log_error("String replacements failed")
return success

View File

@@ -422,4 +422,3 @@ def main(args):
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))

View File

@@ -29,13 +29,15 @@ def _ensure_log_file():
# Create logs directory if it doesn't exist
log_dir = Path(__file__).parent.parent / "logs"
log_dir.mkdir(exist_ok=True)
# Create log file with timestamp
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
log_file_path = log_dir / f"build_{timestamp}.log"
# Open with UTF-8 encoding to handle any characters
_log_file = open(log_file_path, 'w', encoding='utf-8')
_log_file.write(f"Nxtscape Build Log - Started at {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
_log_file = open(log_file_path, "w", encoding="utf-8")
_log_file.write(
f"Nxtscape Build Log - Started at {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
)
_log_file.write("=" * 80 + "\n\n")
return _log_file
@@ -52,14 +54,16 @@ def _sanitize_for_windows(message: str) -> str:
"""Remove non-ASCII characters on Windows to avoid encoding issues"""
if sys.platform == "win32":
# Remove all non-ASCII characters
return ''.join(char for char in message if ord(char) < 128)
return "".join(char for char in message if ord(char) < 128)
return message
def log_info(message: str):
"""Print info message"""
print(_sanitize_for_windows(message))
_log_to_file(f"INFO: {message}")
def log_warning(message: str):
"""Print warning message"""
if sys.platform == "win32":
@@ -68,6 +72,7 @@ def log_warning(message: str):
print(f"⚠️ {message}")
_log_to_file(f"WARNING: {message}")
def log_error(message: str):
"""Print error message"""
if sys.platform == "win32":
@@ -107,51 +112,55 @@ def run_command(
stderr=subprocess.STDOUT, # Merge stderr into stdout
text=True,
bufsize=1,
universal_newlines=True
universal_newlines=True,
)
stdout_lines = []
# Stream output line by line
for line in iter(process.stdout.readline, ''):
for line in iter(process.stdout.readline, ""):
line = line.rstrip()
if line:
print(line) # Print to console in real-time
_log_to_file(f"RUN_COMMAND: STDOUT: {line}") # Log to file
stdout_lines.append(line)
# Wait for process to complete
process.wait()
_log_to_file(f"RUN_COMMAND: ✅ Command completed with exit code: {process.returncode}")
_log_to_file(
f"RUN_COMMAND: ✅ Command completed with exit code: {process.returncode}"
)
# Create a CompletedProcess object with captured output
result = subprocess.CompletedProcess(
cmd,
process.returncode,
stdout='\n'.join(stdout_lines) if stdout_lines else '',
stderr=''
stdout="\n".join(stdout_lines) if stdout_lines else "",
stderr="",
)
if check and process.returncode != 0:
raise subprocess.CalledProcessError(process.returncode, cmd, result.stdout, result.stderr)
raise subprocess.CalledProcessError(
process.returncode, cmd, result.stdout, result.stderr
)
return result
except subprocess.CalledProcessError as e:
_log_to_file(f"RUN_COMMAND: ❌ Command failed: {cmd_str}")
_log_to_file(f"RUN_COMMAND: ❌ Exit code: {e.returncode}")
if e.stdout:
for line in e.stdout.strip().split('\n'):
for line in e.stdout.strip().split("\n"):
if line.strip():
_log_to_file(f"RUN_COMMAND: STDOUT: {line}")
if e.stderr:
for line in e.stderr.strip().split('\n'):
for line in e.stderr.strip().split("\n"):
if line.strip():
_log_to_file(f"RUN_COMMAND: STDERR: {line}")
if check:
log_error(f"Command failed: {cmd_str}")
if e.stderr:
@@ -197,10 +206,12 @@ def get_platform_arch() -> str:
elif IS_MACOS:
# macOS can be arm64 or x64
import platform
return "arm64" if platform.machine() == "arm64" else "x64"
elif IS_LINUX:
# Linux can be x64 or arm64
import platform
machine = platform.machine()
if machine in ["x86_64", "AMD64"]:
return "x64"
@@ -231,7 +242,7 @@ def normalize_path(path: Union[str, Path]) -> Path:
path = Path(path)
if IS_WINDOWS:
# Convert forward slashes to backslashes on Windows
return Path(str(path).replace('/', '\\'))
return Path(str(path).replace("/", "\\"))
return path
@@ -239,31 +250,31 @@ def join_paths(*paths: Union[str, Path]) -> Path:
"""Join paths in a platform-aware way"""
if not paths:
return Path()
result = Path(paths[0])
for p in paths[1:]:
result = result / p
return normalize_path(result)
def safe_rmtree(path: Union[str, Path]) -> None:
"""Safely remove directory tree, handling Windows symlinks and junction points"""
path = Path(path)
if not path.exists():
return
if IS_WINDOWS:
# On Windows, use rmdir for junctions and symlinks
import stat
def handle_remove_readonly(func, path, exc):
"""Error handler for Windows readonly files"""
if os.path.exists(path):
os.chmod(path, stat.S_IWRITE)
func(path)
# Try to remove as a junction/symlink first
try:
if path.is_symlink() or (path.is_dir() and os.path.islink(str(path))):
@@ -271,10 +282,9 @@ def safe_rmtree(path: Union[str, Path]) -> None:
return
except:
pass
# Fall back to rmtree with error handler
shutil.rmtree(path, onerror=handle_remove_readonly)
else:
# On Unix-like systems, regular rmtree works fine
shutil.rmtree(path)

28
pyproject.toml Normal file
View File

@@ -0,0 +1,28 @@
[tool.black]
line-length = 88
target-version = ['py312']
include = '\.pyi?$'
extend-exclude = '''
/(
# directories
\.eggs
| \.git
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| env
| _build
| buck-out
| build
| dist
| chromium_src
| chromium_src_bak
| third_party
)/
'''
[tool.isort]
profile = "black"
line_length = 88
skip_glob = ["env/*", "chromium_src/*", "chromium_src_bak/*", "third_party/*"]