chromium 142 upgrade, new cli (#214)

* refactor 1: new typer based cli and browseros cli module

* refactor 2: fixes to context.py

* refactor 3: common/ and notify

* new sign and package module

* update .gitignore

* refactor 5: dev.py and modules for each

* refactor 6: clean-up old files

* refactor 7: organise modules fruther

* refactor 8: renaming nxtscape to browseros

* refactor 9: dev.py remove cli load

* fix: pyproject.toml

* fix: typer pretty exception disable

* refactor 10: cli/build.py set to primary

* refactor 10: cli/build.py set to primary, move OS detection

* refactor: context split, env and module dataclass

* reactor: clean and git moved ot new module type

* refactor: compile and configure

* reactor: sign and package module update

* refactor: new build.py cli

* 'refactor: remove reducant OS checks

* refactor: rename BuildContext to Context

* refactor: rename BuildModule to CommandModule

* refactor: dev.py to use the new modules

* build.py: improve help output

* remove old patching way

* clean-up: remove old build.py stuff

* refactor: move to proper yaml parsing

* clean-up: remove legacy args gating

* fix: patches issues

* fix: clean-up build.py and ars resolver

* minor: gitignore

* fix: patches.py issue

* support universal build

* fix: ENV variable and YAMLs

* fix: move compile to folder to avoid compflics

* fixes: more env fixes

* fix: build_type override in CLI fix

* fix: universal clean all archs before starting

* fix: universal build type constants

* fix: linter, extract options

* fix: linter

* fix: remove chromium_src as a not a conflicting flag

* fix: support chromium_src from cli in config mode

* fix: notify with better messages

* feat: new apply patch with --reset-to feature

* feat: refactor apply and extract into separate sub modules

* 142 patches working (#211)

* updates to build.py apply/patch

* removed all old patches

* 142 build update

* fix: get updated patches from main to 142

* fix: correct patches dir

* fix: import path

* add pyright

* fix: setup pyright

* fix: new updated patches from 137 rebased on 142

* feat: new extract_patch command

* fix: add mising side_panel build patch

* fix: extension uninstall for browseros

* fix: prefs fix

* fix: ota extension updater patch fix

* fix: llm hub and chat

* feat: unvisersal module also package individual archs

* fix: add browseros-server binaries

* fix: attach color for notify

* fix: attachment for slack

* fix: update chromium version to 142.0.7444.175

* feat: add new icons needed

* fix: disable settings in menu

* fix: uv add build-backend

* minor: chromium version bump

* clean-up: removed old files of extnesion and sidepanel

* fix: product logo generate and assets.car and appicon.icns

* feat: few chromium UI fixes

* fix: update features.yaml

* fix: features.yaml path in context

* refactor: rename to get_patches_dir()

* feat: show browserOS version in about page

* fix: copy browseros_version on the build time and rename other to offset

* bump offset

* fix: update features.yaml

* feat: load env from .env files too

* fix: enable split view

* clean-up: removed old prefs

* fix: minor import issue

* fix: linux flag update
This commit is contained in:
Nikhil
2025-12-03 21:09:23 +00:00
committed by GitHub
parent 545d1a2de5
commit 24e9cfd8f2
265 changed files with 10826 additions and 28596 deletions

11
.gitignore vendored
View File

@@ -4,7 +4,16 @@
**/env
**/logs
**/old-scripts
**/__pycache__/**
# Python
**/__pycache__/
**/*.pyc
**/*.pyo
**/*.egg-info/
**/*.egg
**/.pytest_cache/
**/dist/
nxtscape-cli-access.json
gclient.json
.env

View File

@@ -0,0 +1,22 @@
# Windows Code Signing
ESIGNER_USERNAME=
ESIGNER_PASSWORD=
ESIGNER_TOTP_SECRET=
ESIGNER_CREDENTIAL_ID=
CODE_SIGN_TOOL_PATH=C:/src/CodeSignTool-v1.3.2-windows
# macOS Code Signing
MACOS_CERTIFICATE_NAME=
PROD_MACOS_NOTARIZATION_APPLE_ID=
PROD_MACOS_NOTARIZATION_TEAM_ID=
PROD_MACOS_NOTARIZATION_PWD=
# Slack Notifications
SLACK_WEBHOOK_URL=
# Google Cloud Storage
GOOGLE_APPLICATION_CREDENTIALS=/path/to/service-account-key.json
# Optional
# CHROMIUM_SRC=C:/src/chromium/src
# DEPOT_TOOLS_WIN_TOOLCHAIN=0

View File

@@ -1,3 +1,4 @@
# BrowserOS Server Binaries - Track with Git LFS
resources/binaries/browseros_server/* filter=lfs diff=lfs merge=lfs -text
resources/binaries/codex/ filter=lfs diff=lfs merge=lfs -text
resources/binaries/browseros_server/ filter=lfs diff=lfs merge=lfs -text

View File

@@ -1,4 +1,4 @@
MAJOR=137
MAJOR=142
MINOR=0
BUILD=7151
PATCH=69
BUILD=7444
PATCH=49

View File

@@ -0,0 +1,7 @@
"""
Allow running build package as module: python -m build
"""
from .browseros import app
if __name__ == "__main__":
app()

View File

@@ -0,0 +1,45 @@
#!/usr/bin/env python3
"""
BrowserOS Build System - Main Entry Point
Unified CLI for building, developing, and releasing BrowserOS browser.
Usage:
# As installed command:
browseros build --help
# As module:
python -m build.browseros build --help
"""
import typer
from .cli import build
# Create main app
app = typer.Typer(
help="BrowserOS Build System",
pretty_exceptions_enable=False,
pretty_exceptions_show_locals=False
)
# Create build sub-app and register build.main as its callback
build_app = typer.Typer(
pretty_exceptions_enable=False,
pretty_exceptions_show_locals=False
)
build_app.callback(invoke_without_command=True)(build.main)
# Add build as a subcommand
app.add_typer(build_app, name="build", help="Build BrowserOS browser")
# Add dev command
from .cli import dev
app.add_typer(dev.app, name="dev", help="Dev patch management")
# TODO: Add release command in future
# from .cli import release
# app.add_typer(release.app, name="release", help="Release automation")
if __name__ == "__main__":
app()

View File

@@ -1,713 +0,0 @@
#!/usr/bin/env python3
"""
Main build orchestrator for Nxtscape Browser
"""
import os
import sys
import time
import click
from pathlib import Path
from typing import Optional
# Load .env file if it exists
def load_env_file():
env_file = Path(__file__).parent.parent / ".env"
if env_file.exists():
with open(env_file, "r") as f:
for line in f:
line = line.strip()
if line and not line.startswith("#"):
if "=" in line:
key, value = line.split("=", 1)
os.environ[key.strip()] = value.strip()
print(f"✓ Loaded environment from .env file")
# Load .env file on import
load_env_file()
# Import shared components
from context import BuildContext
from utils import (
load_config,
log_info,
log_warning,
log_error,
log_success,
IS_MACOS,
IS_WINDOWS,
IS_LINUX,
)
# Import modules
from modules.clean import clean
from modules.git import setup_git, setup_sparkle
from modules.patches import apply_patches
from modules.resources import copy_resources
from modules.chromium_replace import replace_chromium_files, add_file_to_replacements
from modules.string_replaces import apply_string_replacements
from modules.inject import inject_version
from modules.configure import configure
from modules.compile import build
from modules.gcs import upload_package_artifacts, upload_signed_artifacts, handle_upload_dist
# Platform-specific imports
if IS_MACOS:
from modules.sign import sign, sign_universal, check_signing_environment
from modules.package import package, package_universal
from modules.postbuild import run_postbuild
elif IS_WINDOWS:
from modules.package_windows import (
package,
package_universal,
sign_binaries as sign,
)
# Windows doesn't have universal signing
def sign_universal(contexts: list[BuildContext]) -> bool:
log_warning("Universal signing is not supported on Windows")
return True
def run_postbuild(ctx: BuildContext) -> None:
log_warning("Post-build tasks are not implemented for Windows yet")
elif IS_LINUX:
from modules.package_linux import package, package_universal, sign_binaries as sign
# Linux doesn't have universal signing
def sign_universal(contexts: list[BuildContext]) -> bool:
log_warning("Universal signing is not supported on Linux")
return True
def run_postbuild(ctx: BuildContext) -> None:
log_warning("Post-build tasks are not implemented for Linux yet")
else:
# Stub functions for other platforms
def sign(ctx: BuildContext) -> bool:
log_warning("Signing is not implemented for this platform")
return True
def sign_universal(contexts: list[BuildContext]) -> bool:
log_warning("Universal signing is not implemented for this platform")
return True
def package(ctx: BuildContext) -> bool:
log_warning("Packaging is not implemented for this platform")
return True
def package_universal(contexts: list[BuildContext]) -> bool:
log_warning("Universal packaging is not implemented for this platform")
return True
def run_postbuild(ctx: BuildContext) -> None:
log_warning("Post-build tasks are not implemented for this platform")
from modules.slack import (
notify_build_started,
notify_build_step,
notify_build_success,
notify_build_failure,
notify_build_interrupted,
notify_gcs_upload,
)
def build_main(
config_file: Optional[Path] = None,
clean_flag: bool = False,
git_setup_flag: bool = False,
apply_patches_flag: bool = False,
sign_flag: bool = False,
package_flag: bool = False,
build_flag: bool = False,
arch: str = "", # Will use platform default if not specified
build_type: str = "debug",
chromium_src_dir: Optional[Path] = None,
slack_notifications: bool = False,
patch_interactive: bool = False,
patch_commit: bool = False,
upload_gcs: bool = True, # Default to uploading to GCS
):
"""Main build orchestration"""
log_info("🚀 Nxtscape Build System")
log_info("=" * 50)
# Check if sign flag is enabled and required environment variables are set
if sign_flag and IS_MACOS:
if not check_signing_environment():
sys.exit(1)
# Set Windows-specific environment variables
if IS_WINDOWS:
os.environ["DEPOT_TOOLS_WIN_TOOLCHAIN"] = "0"
log_info("🔧 Set DEPOT_TOOLS_WIN_TOOLCHAIN=0 for Windows build")
# Setup context
root_dir = Path(__file__).parent.parent
# Initialize chromium_src as None - will be set from CLI or config
chromium_src = None
# Load config if provided
config = None
gn_flags_file = None
architectures = [arch] if arch else [] # Empty list if no arch specified
universal = False
certificate_name = None # For Windows signing
if config_file:
config = load_config(config_file)
log_info(f"📄 Loaded config from: {config_file}")
# Override parameters from config
if "build" in config:
build_type = config["build"].get("type", build_type)
arch = config["build"].get("architecture", arch)
# Check for multi-architecture builds
if "architectures" in config["build"]:
architectures = config["build"]["architectures"]
universal = config["build"].get("universal", False)
if "steps" in config:
clean_flag = config["steps"].get("clean", clean_flag)
git_setup_flag = config["steps"].get("git_setup", git_setup_flag)
apply_patches_flag = config["steps"].get(
"apply_patches", apply_patches_flag
)
build_flag = config["steps"].get("build", build_flag)
sign_flag = config["steps"].get("sign", sign_flag)
package_flag = config["steps"].get("package", package_flag)
# Override slack notifications from config if not explicitly set via CLI
if "notifications" in config:
slack_notifications = config["notifications"].get(
"slack", slack_notifications
)
if "gn_flags" in config and "file" in config["gn_flags"]:
gn_flags_file = Path(config["gn_flags"]["file"])
# Get chromium_src from config (only if not provided via CLI)
if (
not chromium_src_dir
and "paths" in config
and "chromium_src" in config["paths"]
):
config_chromium_src = Path(config["paths"]["chromium_src"])
chromium_src = config_chromium_src
log_info(f"📁 Using Chromium source from config: {chromium_src}")
# Get Windows signing certificate name from config
if (
IS_WINDOWS
and "signing" in config
and "certificate_name" in config["signing"]
):
certificate_name = config["signing"]["certificate_name"]
log_info(f"🔏 Using certificate for signing: {certificate_name}")
# CLI takes precedence over config
if chromium_src_dir:
chromium_src = chromium_src_dir
log_info(f"📁 Using Chromium source from CLI: {chromium_src}")
# Enforce chromium_src requirement
if not chromium_src:
log_error("Chromium source directory is required!")
log_error(
"Provide it via --chromium-src CLI option or paths.chromium_src in config YAML"
)
log_error("Example: python build.py --chromium-src /path/to/chromium/src")
raise ValueError("chromium_src is required but not provided")
# Validate chromium_src path exists
if not chromium_src.exists():
log_error(f"Chromium source directory does not exist: {chromium_src}")
log_error("Please provide a valid chromium source path")
raise FileNotFoundError(f"Chromium source directory not found: {chromium_src}")
# If no architectures specified, use platform default
if not architectures:
from utils import get_platform_arch
architectures = [get_platform_arch()]
log_info(f"📍 Using platform default architecture: {architectures[0]}")
# Display build configuration
log_info(f"📍 Root: {root_dir}")
log_info(f"📍 Chromium source: {chromium_src}")
log_info(f"📍 Architectures: {architectures}")
log_info(f"📍 Universal build: {universal}")
log_info(f"📍 Build type: {build_type}")
# Start time for overall build
start_time = time.time()
# Notify build started (if enabled)
if slack_notifications:
notify_build_started(build_type, str(architectures))
# Run build steps
try:
built_contexts = []
all_gcs_uris = [] # Track all uploaded GCS URIs
# Build each architecture separately
for arch_name in architectures:
log_info(f"\n{'='*60}")
log_info(f"🏗️ Building for architecture: {arch_name}")
log_info(f"{'='*60}")
ctx = BuildContext(
root_dir=root_dir,
chromium_src=chromium_src,
architecture=arch_name,
build_type=build_type,
apply_patches=apply_patches_flag,
sign_package=sign_flag,
package=package_flag,
build=build_flag,
)
log_info(f"📍 Chromium: {ctx.chromium_version}")
log_info(f"📍 Nxtscape: {ctx.nxtscape_version}")
log_info(f"📍 Output directory: {ctx.out_dir}")
# Clean (only for first architecture to avoid conflicts)
if clean_flag and arch_name == architectures[0]:
clean(ctx)
if slack_notifications:
notify_build_step("Completed cleaning build artifacts")
# Git setup (only once for first architecture)
if git_setup_flag and arch_name == architectures[0]:
setup_git(ctx)
if slack_notifications:
notify_build_step("Completed Git setup and Chromium source")
# Apply patches (only once for first architecture)
if apply_patches_flag and arch_name == architectures[0]:
# First do chromium file replacements
replace_chromium_files(ctx)
# Then apply string replacements
apply_string_replacements(ctx)
# Setup sparkle (macOS only)
if IS_MACOS:
setup_sparkle(ctx)
else:
log_info("Skipping Sparkle setup (macOS only)")
# Apply patches
apply_patches(
ctx, interactive=patch_interactive, commit_each=patch_commit
)
if slack_notifications:
notify_build_step("Completed applying patches")
# Copy resources for each architecture (YAML filters by arch)
if apply_patches_flag:
copy_resources(ctx, commit_each=patch_commit)
if slack_notifications:
notify_build_step(
f"Completed copying resources for {arch_name}"
)
# Build for this architecture
if build_flag:
if slack_notifications:
notify_build_step(f"Started building for {arch_name}")
configure(ctx, gn_flags_file)
build(ctx)
# Run post-build tasks
# run_postbuild(ctx)
if slack_notifications:
notify_build_step(f"Completed building for {arch_name}")
# Sign and package immediately after building each architecture
if sign_flag:
log_info(f"\n🔏 Signing {ctx.architecture} build...")
if slack_notifications:
notify_build_step(f"[{ctx.architecture}] Started signing")
# Pass certificate_name for Windows signing
if IS_WINDOWS:
sign(ctx, certificate_name)
else:
sign(ctx)
if slack_notifications:
notify_build_step(f"[{ctx.architecture}] Completed signing")
if package_flag:
log_info(f"\n📦 Packaging {ctx.architecture} build...")
if slack_notifications:
package_type = (
"DMG" if IS_MACOS else "installer" if IS_WINDOWS else "AppImage"
)
notify_build_step(
f"[{ctx.architecture}] Started {package_type} creation"
)
package(ctx)
if slack_notifications:
package_type = (
"DMG" if IS_MACOS else "installer" if IS_WINDOWS else "AppImage"
)
notify_build_step(
f"[{ctx.architecture}] Completed {package_type} creation"
)
# Upload to GCS after packaging
gcs_uris = []
if upload_gcs:
success, gcs_uris = upload_package_artifacts(ctx)
if not success:
log_warning("Failed to upload package artifacts to GCS")
elif gcs_uris and slack_notifications:
notify_gcs_upload(ctx.architecture, gcs_uris)
all_gcs_uris.extend(gcs_uris)
built_contexts.append(ctx)
# Handle universal build if requested
if len(architectures) > 1 and universal:
# Universal build: merge, sign and package
log_info(f"\n{'='*60}")
log_info("🔄 Creating universal binary...")
log_info(f"{'='*60}")
# Import merge function
from modules.merge import merge_architectures
# Get paths for the built apps
arch1_app = built_contexts[0].get_app_path()
arch2_app = built_contexts[1].get_app_path()
# Clean up old universal output directory if it exists
universal_dir = built_contexts[0].chromium_src / "out/Default_universal"
if universal_dir.exists():
log_info("🧹 Cleaning up old universal output directory...")
from utils import safe_rmtree
safe_rmtree(universal_dir)
# Create fresh universal output path
universal_dir.mkdir(parents=True, exist_ok=True)
universal_app_path = universal_dir / built_contexts[0].NXTSCAPE_APP_NAME
# Find universalizer script
universalizer_script = root_dir / "build" / "universalizer_patched.py"
# Merge the architectures
if not merge_architectures(
arch1_app, arch2_app, universal_app_path, universalizer_script
):
raise RuntimeError(
"Failed to merge architectures into universal binary"
)
if slack_notifications:
notify_build_step(
"Completed merging architectures into universal binary"
)
if sign_flag:
if slack_notifications:
notify_build_step("[Universal] Started signing and notarization")
sign_universal(built_contexts)
if slack_notifications:
notify_build_step("[Universal] Completed signing and notarization")
if package_flag:
if slack_notifications:
package_type = (
"DMG" if IS_MACOS else "installer" if IS_WINDOWS else "AppImage"
)
notify_build_step(f"[Universal] Started {package_type} creation")
package_universal(built_contexts)
if slack_notifications:
package_type = (
"DMG" if IS_MACOS else "installer" if IS_WINDOWS else "AppImage"
)
notify_build_step(f"[Universal] Completed {package_type} creation")
# Upload universal package to GCS
universal_gcs_uris = []
if upload_gcs:
# Use the first context with universal architecture override
universal_ctx = built_contexts[0]
original_arch = universal_ctx.architecture
universal_ctx.architecture = "universal"
success, universal_gcs_uris = upload_package_artifacts(
universal_ctx
)
if not success:
log_warning(
"Failed to upload universal package artifacts to GCS"
)
elif universal_gcs_uris and slack_notifications:
notify_gcs_upload("universal", universal_gcs_uris)
all_gcs_uris.extend(universal_gcs_uris)
universal_ctx.architecture = original_arch
# Summary
elapsed = time.time() - start_time
mins = int(elapsed / 60)
secs = int(elapsed % 60)
log_info("\n" + "=" * 60)
log_success(
f"Build completed for {len(architectures)} architecture(s) in {mins}m {secs}s"
)
if universal and len(architectures) > 1:
log_success("Universal binary created successfully!")
log_info("=" * 60)
# Notify build success (if enabled)
if slack_notifications:
notify_build_success(mins, secs, gcs_uris=all_gcs_uris)
except KeyboardInterrupt:
log_warning("\nBuild interrupted")
if slack_notifications:
notify_build_interrupted()
sys.exit(130)
except Exception as e:
log_error(f"\nBuild failed: {e}")
if slack_notifications:
notify_build_failure(str(e))
sys.exit(1)
@click.command()
@click.option(
"--config",
"-c",
type=click.Path(exists=True, path_type=Path),
help="Load configuration from YAML file",
)
@click.option("--clean", "-C", is_flag=True, default=False, help="Clean before build")
@click.option("--git-setup", "-g", is_flag=True, default=False, help="Git setup")
@click.option(
"--apply-patches", "-p", is_flag=True, default=False, help="Apply patches"
)
@click.option(
"--sign", "-s", is_flag=True, default=False, help="Sign and notarize the app"
)
@click.option(
"--arch",
"-a",
type=click.Choice(["arm64", "x64"]),
default=None,
help="Architecture (defaults to platform-specific)",
)
@click.option(
"--build-type",
"-t",
type=click.Choice(["debug", "release"]),
default="debug",
help="Build type",
)
@click.option(
"--package",
"-P",
is_flag=True,
default=False,
help="Create package (DMG/AppImage/Installer)",
)
@click.option("--build", "-b", is_flag=True, default=False, help="Build")
@click.option(
"--chromium-src",
"-S",
type=click.Path(exists=False, path_type=Path),
help="Path to Chromium source directory",
)
@click.option(
"--slack-notifications",
"-n",
is_flag=True,
default=False,
help="Enable Slack notifications",
)
@click.option(
"--merge",
nargs=2,
type=click.Path(path_type=Path),
metavar="ARCH1_APP ARCH2_APP",
help="Merge two architecture builds: --merge path/to/arch1.app path/to/arch2.app",
)
@click.option(
"--add-replace",
type=click.Path(exists=True, path_type=Path),
help="Add a file to chromium_src replacement directory: --add-replace /path/to/chromium/src/file --chromium-src /path/to/chromium/src",
)
@click.option(
"--string-replace",
is_flag=True,
default=False,
help="Apply string replacements to chromium files",
)
@click.option(
"--patch-interactive",
"-i",
is_flag=True,
default=False,
help="Ask for confirmation before applying each patch",
)
@click.option(
"--patch-commit",
is_flag=True,
default=False,
help="Create a git commit after applying each patch",
)
@click.option(
"--no-gcs-upload",
is_flag=True,
default=False,
help="Skip uploading artifacts to Google Cloud Storage",
)
@click.option(
"--upload-dist",
type=click.Path(exists=True, path_type=Path),
help="Upload pre-built artifacts from dist/<version> directory to GCS: --upload-dist dist/61",
)
@click.option(
"--platform",
type=click.Choice(["macos", "linux", "win"]),
default=None,
help="Override platform for GCS upload (auto-detected if not specified)",
)
def main(
config,
clean,
git_setup,
apply_patches,
sign,
arch,
build_type,
package,
build,
chromium_src,
slack_notifications,
merge,
add_replace,
string_replace,
patch_interactive,
patch_commit,
no_gcs_upload,
upload_dist,
platform,
):
"""Simple build system for Nxtscape Browser"""
# Validate chromium-src for commands that need it
if add_replace or merge or string_replace or (not config and chromium_src is None):
if not chromium_src:
if add_replace:
log_error("--add-replace requires --chromium-src to be specified")
log_error(
"Example: python build.py --add-replace /path/to/chromium/src/chrome/file.cc --chromium-src /path/to/chromium/src"
)
elif merge:
log_error("--merge requires --chromium-src to be specified")
log_error(
"Example: python build.py --merge app1.app app2.app --chromium-src /path/to/chromium/src"
)
elif string_replace:
log_error("--string-replace requires --chromium-src to be specified")
log_error(
"Example: python build.py --string-replace --chromium-src /path/to/chromium/src"
)
else:
log_error("--chromium-src is required when not using a config file")
log_error(
"Example: python build.py --chromium-src /path/to/chromium/src"
)
sys.exit(1)
# Validate chromium_src path exists
if not chromium_src.exists():
log_error(f"Chromium source directory does not exist: {chromium_src}")
sys.exit(1)
# Handle string-replace command
if string_replace:
# Get root directory
root_dir = Path(__file__).parent.parent
# Create a minimal context for string replacements
from context import BuildContext
ctx = BuildContext(
root_dir=root_dir,
chromium_src=chromium_src,
architecture="", # Use platform default
build_type="debug", # Not used for string replacements
)
# Apply string replacements
if apply_string_replacements(ctx):
sys.exit(0)
else:
sys.exit(1)
# Handle add-replace command
if add_replace:
# Get root directory
root_dir = Path(__file__).parent.parent
# Call the function from chromium_replace module
if add_file_to_replacements(add_replace, chromium_src, root_dir):
sys.exit(0)
else:
sys.exit(1)
# Handle merge command
if merge:
from modules.merge import handle_merge_command
arch1_path, arch2_path = merge
if handle_merge_command(arch1_path, arch2_path, chromium_src, sign, package):
sys.exit(0)
else:
sys.exit(1)
# Handle upload-dist command
if upload_dist:
# Get root directory
root_dir = Path(__file__).parent.parent
# Call the upload handler from gcs module
if handle_upload_dist(upload_dist, root_dir, platform_override=platform):
sys.exit(0)
else:
sys.exit(1)
# Regular build workflow
build_main(
config_file=config,
clean_flag=clean,
git_setup_flag=git_setup,
apply_patches_flag=apply_patches,
sign_flag=sign,
package_flag=package,
build_flag=build,
arch=arch or "", # Pass empty string to use platform default
build_type=build_type,
chromium_src_dir=chromium_src,
slack_notifications=slack_notifications,
patch_interactive=patch_interactive,
patch_commit=patch_commit,
upload_gcs=not no_gcs_upload, # Invert the flag
)
if __name__ == "__main__":
main.main(standalone_mode=False)

View File

@@ -0,0 +1 @@
"""CLI package for BrowserOS build system"""

View File

@@ -0,0 +1,446 @@
#!/usr/bin/env python3
"""Build CLI - Modular build system for BrowserOS"""
import os
import sys
import time
from pathlib import Path
from typing import Optional
import typer
# Import common modules
from ..common.context import Context
from ..common.config import load_config, validate_required_envs
from ..common.pipeline import validate_pipeline, show_available_modules
from ..common.resolver import resolve_config, resolve_pipeline
from ..common.notify import (
notify_pipeline_start,
notify_pipeline_end,
notify_pipeline_error,
notify_module_start,
notify_module_completion,
set_build_context,
)
from ..common.module import ValidationError
from ..common.utils import (
log_error,
log_info,
log_success,
IS_MACOS,
IS_WINDOWS,
IS_LINUX,
)
# Import all module classes
from ..modules.setup.clean import CleanModule
from ..modules.setup.git import GitSetupModule, SparkleSetupModule
from ..modules.setup.configure import ConfigureModule
from ..modules.compile import CompileModule, UniversalBuildModule
from ..modules.patches.patches import PatchesModule
from ..modules.patches.series_patches import SeriesPatchesModule
from ..modules.resources.chromium_replace import ChromiumReplaceModule
from ..modules.resources.string_replaces import StringReplacesModule
from ..modules.resources.resources import ResourcesModule
from ..modules.upload import GCSUploadModule
# Platform-specific modules (imported unconditionally - validation handles platform checks)
from ..modules.sign.macos import MacOSSignModule
from ..modules.sign.windows import WindowsSignModule
from ..modules.sign.linux import LinuxSignModule
from ..modules.package.macos import MacOSPackageModule
from ..modules.package.windows import WindowsPackageModule
from ..modules.package.linux import LinuxPackageModule
AVAILABLE_MODULES = {
# Setup & Environment
"clean": CleanModule,
"git_setup": GitSetupModule,
"sparkle_setup": SparkleSetupModule,
"configure": ConfigureModule,
# Patches & Resources
"patches": PatchesModule,
"series_patches": SeriesPatchesModule,
"chromium_replace": ChromiumReplaceModule,
"string_replaces": StringReplacesModule,
"resources": ResourcesModule,
# Build
"compile": CompileModule,
"universal_build": UniversalBuildModule, # macOS universal binary (arm64 + x64)
# Sign (platform-specific, validated at runtime)
"sign_macos": MacOSSignModule,
"sign_windows": WindowsSignModule,
"sign_linux": LinuxSignModule,
# Package (platform-specific, validated at runtime)
"package_macos": MacOSPackageModule,
"package_windows": WindowsPackageModule,
"package_linux": LinuxPackageModule,
# Upload
"upload_gcs": GCSUploadModule,
}
def _get_sign_module():
"""Get platform-specific sign module name"""
if IS_MACOS():
return "sign_macos"
elif IS_WINDOWS():
return "sign_windows"
elif IS_LINUX():
return "sign_linux"
else:
log_error("Unsupported platform for packaging")
sys.exit(1)
def _get_package_module():
"""Get platform-specific package module name"""
if IS_MACOS():
return "package_macos"
elif IS_WINDOWS():
return "package_windows"
elif IS_LINUX():
return "package_linux"
else:
log_error("Unsupported platform for packaging")
sys.exit(1)
# Fixed execution order - flags enable/disable phases, order is always the same
EXECUTION_ORDER = [
# Phase 1: Setup & Clean
("setup", ["clean", "git_setup", "sparkle_setup"]),
# Phase 2: Patches & Resources
(
"prep",
["resources", "chromium_replace", "string_replaces", "series_patches", "patches"],
),
# Phase 3: Configure & Build
("build", ["configure", "compile"]),
# Phase 4: Code Signing (platform-aware)
("sign", [_get_sign_module()]),
# Phase 5: Packaging (platform-aware)
("package", [_get_package_module()]),
# Phase 6: Upload
("upload", ["upload_gcs"]),
]
def execute_pipeline(
ctx: Context,
pipeline: list[str],
available_modules: dict,
pipeline_name: str = "build",
) -> None:
"""Execute a build pipeline by running modules sequentially.
Args:
ctx: Build context with paths and configuration
pipeline: List of module names to execute in order
available_modules: Dictionary mapping module names to module classes
pipeline_name: Name of pipeline for notifications (default: "build")
Raises:
typer.Exit: On module validation failure, execution failure, or interrupt
Design:
- Executes modules sequentially in pipeline order
- Validates each module before execution (fail fast)
- Tracks timing for each module and total pipeline
- Sends notifications at key lifecycle events
- Handles interrupts (Ctrl+C) gracefully with cleanup
"""
start_time = time.time()
notify_pipeline_start(pipeline_name, pipeline)
try:
for module_name in pipeline:
log_info(f"\n{'='*70}")
log_info(f"🔧 Running module: {module_name}")
log_info(f"{'='*70}")
# Instantiate module
module_class = available_modules[module_name]
module = module_class()
# Notify module start and track timing
notify_module_start(module_name)
module_start = time.time()
# Validate right before executing (fail fast)
try:
module.validate(ctx)
except ValidationError as e:
log_error(f"Validation failed for {module_name}: {e}")
notify_pipeline_error(
pipeline_name, f"{module_name} validation failed: {e}"
)
raise typer.Exit(1)
# Execute module
try:
module.execute(ctx)
module_duration = time.time() - module_start
notify_module_completion(module_name, module_duration)
log_success(f"Module {module_name} completed in {module_duration:.1f}s")
except Exception as e:
log_error(f"Module {module_name} failed: {e}")
notify_pipeline_error(pipeline_name, f"{module_name} failed: {e}")
raise typer.Exit(1)
# Pipeline completed successfully
duration = time.time() - start_time
mins = int(duration / 60)
secs = int(duration % 60)
log_info("\n" + "=" * 70)
log_success(f"✅ Pipeline completed successfully in {mins}m {secs}s")
log_info("=" * 70)
notify_pipeline_end(pipeline_name, duration)
except KeyboardInterrupt:
log_error("\n❌ Pipeline interrupted")
notify_pipeline_error(pipeline_name, "Interrupted by user")
raise typer.Exit(130)
except typer.Exit:
# Re-raise typer.Exit (from validation/execution failures)
raise
except Exception as e:
log_error(f"\n❌ Pipeline failed: {e}")
notify_pipeline_error(pipeline_name, str(e))
raise typer.Exit(1)
def main(
config: Optional[Path] = typer.Option(
None,
"--config",
"-c",
help="Load configuration from YAML file",
exists=True,
),
modules: Optional[str] = typer.Option(
None,
"--modules",
"-m",
help="Comma-separated list of modules to run",
),
list_modules: bool = typer.Option(
False,
"--list",
"-l",
help="List all available modules and exit",
),
# Pipeline phase flags (auto-ordered execution)
setup: bool = typer.Option(
False,
"--setup",
help="Run setup phase (clean, git_setup, sparkle_setup)",
),
prep: bool = typer.Option(
False,
"--prep",
help="Run prep phase (patches, chromium_replace, string_replaces, resources)",
),
build: bool = typer.Option(
False,
"--build",
help="Run build phase (configure, compile)",
),
sign: bool = typer.Option(
False,
"--sign",
help="Run sign phase (platform-specific: sign_macos/windows/linux)",
),
package: bool = typer.Option(
False,
"--package",
help="Run package phase (platform-specific: package_macos/windows/linux)",
),
upload: bool = typer.Option(
False,
"--upload",
help="Run upload phase (upload_gcs)",
),
# Global options that override config
arch: Optional[str] = typer.Option(
None,
"--arch",
"-a",
help="Architecture (arm64, x64, universal)",
),
build_type: Optional[str] = typer.Option(
None,
"--build-type",
"-t",
help="Build type (debug or release)",
),
chromium_src: Optional[Path] = typer.Option(
None,
"--chromium-src",
"-S",
help="Path to Chromium source directory",
),
):
"""BrowserOS Build System - Modular pipeline executor
Build BrowserOS using phase flags (auto-ordered), explicit modules, or configs.
\b
Phase Flags (Recommended - Auto-Ordered):
browseros build --setup --build --sign --package
browseros build --build --sign # Skip setup
browseros build --package --sign # Flags work in any order!
\b
Explicit Modules (Power Users):
browseros build --modules clean,compile,sign_macos
\b
Config Files (CI/CD):
browseros build --config release.yaml --arch arm64
\b
List Available:
browseros build --list # Show all modules and phases
Note: Phase flags always execute in correct order regardless of how you write them.
--sign and --package auto-select platform (macos/windows/linux)
"""
# Handle --list flag
if list_modules:
show_available_modules(AVAILABLE_MODULES)
return
# Check for mutually exclusive options
has_config = config is not None
has_modules = modules is not None
has_flags = any([setup, prep, build, sign, package, upload])
options_provided = sum([has_config, has_modules, has_flags])
if options_provided == 0:
typer.echo(
"Error: Specify --config, --modules, or phase flags (--setup, --build, etc.)\n"
)
typer.echo("Use --help for usage information")
typer.echo("Use --list to see available modules")
raise typer.Exit(1)
if options_provided > 1:
log_error("Specify only ONE of: --config, --modules, or phase flags")
log_error("Examples:")
log_error(" browseros build --setup --build --sign")
log_error(" browseros build --modules clean,compile")
log_error(" browseros build --config release.yaml")
raise typer.Exit(1)
# CONFIG MODE validation: YAML controls everything, CLI build flags not allowed
if has_config:
conflicting_flags = []
if arch is not None:
conflicting_flags.append("--arch")
if build_type is not None:
conflicting_flags.append("--build-type")
if conflicting_flags:
log_error(
f"CONFIG MODE: Cannot use {', '.join(conflicting_flags)} with --config"
)
log_error("When using --config, ALL build parameters come from YAML")
log_error("Remove the conflicting flags or don't use --config")
raise typer.Exit(1)
log_info("🚀 BrowserOS Build System")
log_info("=" * 70)
# Load YAML config if provided
config_data = load_config(config) if config else None
# Build CLI arguments dictionary for resolver
root_dir = Path(__file__).parent.parent.parent
cli_args = {
"chromium_src": chromium_src,
"arch": arch,
"build_type": build_type,
"modules": modules,
"setup": setup,
"prep": prep,
"build": build,
"sign": sign,
"package": package,
"upload": upload,
}
# Resolve build context (CONFIG mode or DIRECT mode)
try:
ctx = resolve_config(cli_args, config_data, root_dir=root_dir)
except ValueError as e:
log_error(str(e))
raise typer.Exit(1)
# Resolve pipeline (CONFIG mode or DIRECT mode)
try:
pipeline = resolve_pipeline(
cli_args,
config_data,
execution_order=EXECUTION_ORDER,
)
except ValueError as e:
log_error(str(e))
raise typer.Exit(1)
# Show execution plan for flag-based mode
if has_flags:
log_info("\n📋 Execution Plan (auto-ordered):")
log_info("-" * 70)
phase_names = []
if setup:
phase_names.append("setup")
if prep:
phase_names.append("prep")
if build:
phase_names.append("build")
if sign:
phase_names.append(f"sign (→ {_get_sign_module()})")
if package:
phase_names.append(f"package (→ {_get_package_module()})")
if upload:
phase_names.append("upload")
for phase_name in phase_names:
log_info(f"{phase_name}")
log_info(f"\n Pipeline: {''.join(pipeline)}")
log_info("-" * 70)
# Validate required environment variables (YAML-specific)
if config_data:
required_envs = config_data.get("required_envs", [])
if required_envs:
validate_required_envs(required_envs)
# Validate pipeline modules exist
validate_pipeline(pipeline, AVAILABLE_MODULES)
# Set Windows-specific environment
if IS_WINDOWS():
os.environ["DEPOT_TOOLS_WIN_TOOLCHAIN"] = "0"
log_info("Set DEPOT_TOOLS_WIN_TOOLCHAIN=0 for Windows build")
log_info(f"📍 Root: {root_dir}")
log_info(f"📍 Chromium: {ctx.chromium_src}")
log_info(f"📍 Architecture: {ctx.architecture}")
log_info(f"📍 Build type: {ctx.build_type}")
log_info(f"📍 Output: {ctx.out_dir}")
log_info(f"📍 Pipeline: {''.join(pipeline)}")
log_info("=" * 70)
# Set notification context for OS and architecture
os_name = "macOS" if IS_MACOS() else "Windows" if IS_WINDOWS() else "Linux"
set_build_context(os_name, ctx.architecture)
# Execute pipeline
execute_pipeline(ctx, pipeline, AVAILABLE_MODULES, pipeline_name="build")

View File

@@ -0,0 +1,413 @@
"""
Build CLI - Main build command
This module uses relative imports and must be run as a module:
python -m build.cli.build
Or via the installed entry point:
browseros build
"""
import os
import sys
import time
from pathlib import Path
from typing import Optional, Tuple
import typer
# Import common modules
from ..common.context import BuildContext
from ..common.utils import (
load_config,
log_error,
log_info,
log_warning,
log_success,
IS_MACOS,
IS_WINDOWS,
IS_LINUX,
)
# Import build modules
from ..modules.setup.clean import clean
from ..modules.setup.git import setup_git, setup_sparkle
from ..modules.patches.patches import apply_patches
from ..modules.resources.resources import copy_resources
from ..modules.resources.chromium_replace import replace_chromium_files
from ..modules.resources.string_replaces import apply_string_replacements
from ..modules.setup.configure import configure
from ..modules.compile import build as build_step
from ..modules.sign import sign, sign_universal, check_signing_environment
from ..modules.package import package, package_universal
from ..modules.upload import upload_package_artifacts
def main(
config: Optional[Path] = typer.Option(
None,
"--config",
"-c",
help="Load configuration from YAML file",
exists=True,
),
clean_flag: bool = typer.Option(
False,
"--clean",
"-C",
help="Clean before build",
),
git_setup: bool = typer.Option(
False,
"--git-setup",
"-g",
help="Git setup",
),
apply_patches_flag: bool = typer.Option(
False,
"--apply-patches",
"-p",
help="Apply patches",
),
sign_flag: bool = typer.Option(
False,
"--sign",
"-s",
help="Sign and notarize the app",
),
arch: Optional[str] = typer.Option(
None,
"--arch",
"-a",
help="Architecture (arm64, x64) - defaults to platform-specific",
),
build_type: str = typer.Option(
"debug",
"--build-type",
"-t",
help="Build type (debug or release)",
),
package_flag: bool = typer.Option(
False,
"--package",
"-P",
help="Create package (DMG/AppImage/Installer)",
),
build_flag: bool = typer.Option(
False,
"--build",
"-b",
help="Build",
),
chromium_src: Optional[Path] = typer.Option(
None,
"--chromium-src",
"-S",
help="Path to Chromium source directory",
),
slack_notifications: bool = typer.Option(
False,
"--slack-notifications",
"-n",
help="Enable Slack notifications",
),
merge: Optional[Tuple[str, str]] = typer.Option(
None,
"--merge",
help="Merge two architecture builds: --merge path/to/arch1.app path/to/arch2.app",
metavar="ARCH1_APP ARCH2_APP",
),
patch_interactive: bool = typer.Option(
False,
"--patch-interactive",
"-i",
help="Ask for confirmation before applying each patch",
),
):
"""Build BrowserOS browser
Simple build system for BrowserOS. Can run individual steps or full pipeline.
"""
# Validate chromium-src for commands that need it
if merge or (not config and chromium_src is None):
if not chromium_src:
if merge:
log_error("--merge requires --chromium-src to be specified")
log_error(
"Example: browseros build --merge app1.app app2.app --chromium-src /path/to/chromium/src"
)
else:
log_error("--chromium-src is required when not using a config file")
log_error(
"Example: browseros build --chromium-src /path/to/chromium/src"
)
raise typer.Exit(1)
# Validate chromium_src path exists
if not chromium_src.exists():
log_error(f"Chromium source directory does not exist: {chromium_src}")
raise typer.Exit(1)
# Handle merge command
if merge:
from ..modules.package.merge import handle_merge_command
arch1_path, arch2_path = merge
# Convert strings to Path objects
arch1_path = Path(arch1_path)
arch2_path = Path(arch2_path)
if handle_merge_command(arch1_path, arch2_path, chromium_src, sign_flag, package_flag):
raise typer.Exit(0)
else:
raise typer.Exit(1)
# Validate arch and build_type choices
if arch and arch not in ["arm64", "x64"]:
log_error(f"Invalid architecture: {arch}. Must be 'arm64' or 'x64'")
raise typer.Exit(1)
if build_type not in ["debug", "release"]:
log_error(f"Invalid build type: {build_type}. Must be 'debug' or 'release'")
raise typer.Exit(1)
# =============================================================================
# Main Build Orchestration
# =============================================================================
log_info("🚀 BrowserOS Build System")
log_info("=" * 50)
# Check signing environment (macOS)
if sign_flag and IS_MACOS():
if not check_signing_environment():
raise typer.Exit(1)
# Set Windows-specific environment variables
if IS_WINDOWS():
os.environ["DEPOT_TOOLS_WIN_TOOLCHAIN"] = "0"
log_info("🔧 Set DEPOT_TOOLS_WIN_TOOLCHAIN=0 for Windows build")
# Setup paths
root_dir = Path(__file__).parent.parent.parent
# Initialize chromium_src as None - will be set from CLI or config
chromium_src_path = None
gn_flags_file = None
architectures = [arch] if arch else []
universal = False
certificate_name = None # For Windows signing
# Load config if provided
if config:
config_data = load_config(config)
log_info(f"📄 Loaded config from: {config}")
# Override parameters from config
if "build" in config_data:
build_type = config_data["build"].get("type", build_type)
arch = config_data["build"].get("architecture", arch)
# Check for multi-architecture builds
if "architectures" in config_data["build"]:
architectures = config_data["build"]["architectures"]
universal = config_data["build"].get("universal", False)
if "steps" in config_data:
clean_flag = config_data["steps"].get("clean", clean_flag)
git_setup = config_data["steps"].get("git_setup", git_setup)
apply_patches_flag = config_data["steps"].get("apply_patches", apply_patches_flag)
build_flag = config_data["steps"].get("build", build_flag)
sign_flag = config_data["steps"].get("sign", sign_flag)
package_flag = config_data["steps"].get("package", package_flag)
# Override slack notifications from config if not explicitly set via CLI
if "notifications" in config_data:
slack_notifications = config_data["notifications"].get("slack", slack_notifications)
if "gn_flags" in config_data and "file" in config_data["gn_flags"]:
gn_flags_file = Path(config_data["gn_flags"]["file"])
# Get chromium_src from config (only if not provided via CLI)
if not chromium_src and "paths" in config_data and "chromium_src" in config_data["paths"]:
chromium_src_path = Path(config_data["paths"]["chromium_src"])
log_info(f"📁 Using Chromium source from config: {chromium_src_path}")
# Get Windows signing certificate name from config
if IS_WINDOWS() and "signing" in config_data and "certificate_name" in config_data["signing"]:
certificate_name = config_data["signing"]["certificate_name"]
log_info(f"🔏 Using certificate for signing: {certificate_name}")
# CLI takes precedence over config
if chromium_src:
chromium_src_path = chromium_src
log_info(f"📁 Using Chromium source from CLI: {chromium_src_path}")
# Enforce chromium_src requirement
if not chromium_src_path:
log_error("Chromium source directory is required!")
log_error("Provide it via --chromium-src CLI option or paths.chromium_src in config YAML")
log_error("Example: browseros build --chromium-src /path/to/chromium/src")
raise typer.Exit(1)
# Validate chromium_src path exists
if not chromium_src_path.exists():
log_error(f"Chromium source directory does not exist: {chromium_src_path}")
log_error("Please provide a valid chromium source path")
raise typer.Exit(1)
# If no architectures specified, use platform default
if not architectures:
from ..common.utils import get_platform_arch
architectures = [get_platform_arch()]
log_info(f"📍 Using platform default architecture: {architectures[0]}")
# Display build configuration
log_info(f"📍 Root: {root_dir}")
log_info(f"📍 Chromium source: {chromium_src_path}")
log_info(f"📍 Architectures: {architectures}")
log_info(f"📍 Universal build: {universal}")
log_info(f"📍 Build type: {build_type}")
# Start time for overall build
start_time = time.time()
# Run build steps
try:
built_contexts = []
# Build each architecture separately
for arch_name in architectures:
log_info(f"\n{'='*60}")
log_info(f"🏗️ Building for architecture: {arch_name}")
log_info(f"{'='*60}")
ctx = BuildContext(
root_dir=root_dir,
chromium_src=chromium_src_path,
architecture=arch_name,
build_type=build_type,
)
log_info(f"📍 Chromium: {ctx.chromium_version}")
log_info(f"📍 BrowserOS: {ctx.browseros_version}")
log_info(f"📍 Output directory: {ctx.out_dir}")
# Clean (only for first architecture to avoid conflicts)
if clean_flag and arch_name == architectures[0]:
clean(ctx)
# Git setup (only once for first architecture)
if git_setup and arch_name == architectures[0]:
setup_git(ctx)
# Apply patches (only once for first architecture)
if apply_patches_flag and arch_name == architectures[0]:
# First do chromium file replacements
replace_chromium_files(ctx)
# Then apply string replacements
apply_string_replacements(ctx)
# Setup sparkle (macOS only)
if IS_MACOS():
setup_sparkle(ctx)
else:
log_info("Skipping Sparkle setup (macOS only)")
# Apply patches
apply_patches(ctx, interactive=patch_interactive, commit_each=False)
# Copy resources for each architecture (YAML filters by arch)
if apply_patches_flag:
copy_resources(ctx, commit_each=False)
# Build for this architecture
if build_flag:
configure(ctx, gn_flags_file)
build_step(ctx)
# Sign and package immediately after building each architecture
if sign_flag:
log_info(f"\n🔏 Signing {ctx.architecture} build...")
# Pass certificate_name for Windows signing
if IS_WINDOWS():
sign(ctx, certificate_name)
else:
sign(ctx)
if package_flag:
log_info(f"\n📦 Packaging {ctx.architecture} build...")
package(ctx)
# Upload to GCS after packaging
upload_package_artifacts(ctx)
built_contexts.append(ctx)
# Handle universal build if requested
if len(architectures) > 1 and universal:
# Universal build: merge, sign and package
log_info(f"\n{'='*60}")
log_info("🔄 Creating universal binary...")
log_info(f"{'='*60}")
# Import merge function
from ..modules.package.merge import merge_architectures
# Get paths for the built apps
arch1_app = built_contexts[0].get_app_path()
arch2_app = built_contexts[1].get_app_path()
# Clean up old universal output directory if it exists
universal_dir = built_contexts[0].chromium_src / "out/Default_universal"
if universal_dir.exists():
log_info("🧹 Cleaning up old universal output directory...")
from ..common.utils import safe_rmtree
safe_rmtree(universal_dir)
# Create fresh universal output path
universal_dir.mkdir(parents=True, exist_ok=True)
universal_app_path = universal_dir / built_contexts[0].BROWSEROS_APP_NAME
# Find universalizer script
universalizer_script = root_dir / "build" / "modules" / "package" / "universalizer_patched.py"
# Merge the architectures
if not merge_architectures(
arch1_app, arch2_app, universal_app_path, universalizer_script
):
raise RuntimeError("Failed to merge architectures into universal binary")
if sign_flag:
sign_universal(built_contexts)
if package_flag:
package_universal(built_contexts)
# Upload universal package to GCS
# Use the first context with universal architecture override
universal_ctx = built_contexts[0]
original_arch = universal_ctx.architecture
universal_ctx.architecture = "universal"
upload_package_artifacts(universal_ctx)
universal_ctx.architecture = original_arch
# Summary
elapsed = time.time() - start_time
mins = int(elapsed / 60)
secs = int(elapsed % 60)
log_info("\n" + "=" * 60)
log_success(
f"Build completed for {len(architectures)} architecture(s) in {mins}m {secs}s"
)
if universal and len(architectures) > 1:
log_success("Universal binary created successfully!")
log_info("=" * 60)
except KeyboardInterrupt:
log_warning("\nBuild interrupted")
raise typer.Exit(130)
except Exception as e:
log_error(f"\nBuild failed: {e}")
raise typer.Exit(1)

View File

@@ -0,0 +1,408 @@
#!/usr/bin/env python3
"""
Dev CLI - Chromium patch management tool
A git-like patch management system for maintaining patches against Chromium.
Enables extracting, applying, and managing patches across Chromium upgrades.
"""
import yaml
from pathlib import Path
from typing import Optional
import typer
from typer import Typer, Option, Argument
# Import from common and utils
from ..common.context import Context
from ..common.utils import log_info, log_error, log_success, log_warning
def create_build_context(chromium_src: Optional[Path] = None) -> Optional[Context]:
"""Create BuildContext for dev CLI operations"""
try:
if not chromium_src:
log_error("Chromium source directory not specified")
log_info(
"Use --chromium-src option to specify the Chromium source directory"
)
return None
if not chromium_src.exists():
log_error(f"Chromium source directory does not exist: {chromium_src}")
return None
ctx = Context(
root_dir=Path.cwd(),
chromium_src=chromium_src,
architecture="", # Not needed for patch operations
build_type="debug", # Not needed for patch operations
)
return ctx
except Exception as e:
log_error(f"Failed to create build context: {e}")
return None
# Create the Typer app
app = Typer(
name="dev",
help="BrowserOS dev CLI",
no_args_is_help=True,
pretty_exceptions_enable=False,
pretty_exceptions_show_locals=False,
)
# State class to hold global options
class State:
def __init__(self):
self.chromium_src: Optional[Path] = None
self.verbose: bool = False
self.quiet: bool = False
state = State()
@app.callback()
def main(
chromium_src: Optional[Path] = Option(
None,
"--chromium-src",
"-S",
help="Path to Chromium source directory",
exists=True,
),
verbose: bool = Option(False, "--verbose", "-v", help="Enable verbose output"),
quiet: bool = Option(False, "--quiet", "-q", help="Suppress non-essential output"),
):
"""
Dev CLI - Chromium patch management tool
This tool provides git-like commands for managing patches against Chromium:
Extract patches from commits:
browseros dev extract commit HEAD
browseros dev extract range HEAD~5 HEAD
Apply patches:
browseros dev apply all
browseros dev apply feature llm-chat
Manage features:
browseros dev feature list
browseros dev feature add my-feature HEAD
browseros dev feature show my-feature
"""
state.chromium_src = chromium_src
state.verbose = verbose
state.quiet = quiet
@app.command()
def status():
"""Show dev CLI status"""
log_info("Dev CLI Status")
log_info("-" * 40)
build_ctx = create_build_context(state.chromium_src)
if build_ctx:
log_success(f"Chromium source: {build_ctx.chromium_src}")
# Check for patches directory
patches_dir = build_ctx.root_dir / "chromium_patches"
if patches_dir.exists():
patch_count = len(list(patches_dir.rglob("*.patch")))
log_info(f"Individual patches: {patch_count}")
else:
log_warning("No patches directory found")
# Check for features.yaml
features_file = build_ctx.root_dir / "features.yaml"
if features_file.exists():
with open(features_file) as f:
features = yaml.safe_load(f)
feature_count = len(features.get("features", {}))
log_info(f"Features defined: {feature_count}")
else:
log_warning("No features.yaml found")
else:
log_error("Failed to create build context")
# Create sub-apps for extract, apply, and feature commands
extract_app = Typer(
name="extract",
help="Extract patches from commits",
pretty_exceptions_enable=False,
pretty_exceptions_show_locals=False,
)
apply_app = Typer(
name="apply",
help="Apply patches to Chromium",
pretty_exceptions_enable=False,
pretty_exceptions_show_locals=False,
)
feature_app = Typer(
name="feature",
help="Manage features",
pretty_exceptions_enable=False,
pretty_exceptions_show_locals=False,
)
# Add sub-apps to main app
app.add_typer(extract_app, name="extract")
app.add_typer(apply_app, name="apply")
app.add_typer(feature_app, name="feature")
# Extract commands
@extract_app.command(name="commit")
def extract_commit(
commit: str = Argument(..., help="Git commit reference (e.g., HEAD)"),
output: Optional[Path] = Option(None, "--output", "-o", help="Output directory"),
interactive: bool = Option(
True, "--interactive/--no-interactive", "-i/-n", help="Interactive mode"
),
force: bool = Option(False, "--force", "-f", help="Overwrite existing patches"),
include_binary: bool = Option(False, "--include-binary", help="Include binary files"),
base: Optional[str] = Option(
None, "--base", help="Extract full diff from base commit for files in COMMIT"
),
):
"""Extract patches from a single commit"""
ctx = create_build_context(state.chromium_src)
if not ctx:
raise typer.Exit(1)
from ..modules.extract import ExtractCommitModule
module = ExtractCommitModule()
try:
module.validate(ctx)
module.execute(
ctx,
commit=commit,
output=output,
interactive=interactive,
verbose=state.verbose,
force=force,
include_binary=include_binary,
base=base,
)
except Exception as e:
log_error(f"Failed to extract commit: {e}")
raise typer.Exit(1)
@extract_app.command(name="patch")
def extract_patch_cmd(
chromium_path: str = Argument(..., help="Chromium file path (e.g., chrome/common/foo.h)"),
base: str = Option(..., "--base", "-b", help="Base commit to diff against"),
force: bool = Option(False, "--force", "-f", help="Overwrite existing patch without prompting"),
):
"""Extract patch for a specific file"""
ctx = create_build_context(state.chromium_src)
if not ctx:
raise typer.Exit(1)
from ..modules.extract import extract_single_file_patch
success, error = extract_single_file_patch(ctx, chromium_path, base, force)
if not success:
log_error(error or "Unknown error")
raise typer.Exit(1)
log_success(f"Successfully extracted patch for: {chromium_path}")
@extract_app.command(name="range")
def extract_range(
start: str = Argument(..., help="Start commit (exclusive)"),
end: str = Argument(..., help="End commit (inclusive)"),
output: Optional[Path] = Option(None, "--output", "-o", help="Output directory"),
interactive: bool = Option(
True, "--interactive/--no-interactive", "-i/-n", help="Interactive mode"
),
force: bool = Option(False, "--force", "-f", help="Overwrite existing patches"),
include_binary: bool = Option(False, "--include-binary", help="Include binary files"),
squash: bool = Option(False, "--squash", help="Squash all commits into single patches"),
base: Optional[str] = Option(
None,
"--base",
help="Use different base for diff (full diff from base for files in range)",
),
):
"""Extract patches from a range of commits"""
ctx = create_build_context(state.chromium_src)
if not ctx:
raise typer.Exit(1)
from ..modules.extract import ExtractRangeModule
module = ExtractRangeModule()
try:
module.validate(ctx)
module.execute(
ctx,
start=start,
end=end,
output=output,
interactive=interactive,
verbose=state.verbose,
force=force,
include_binary=include_binary,
squash=squash,
base=base,
)
except Exception as e:
log_error(f"Failed to extract range: {e}")
raise typer.Exit(1)
# Apply commands
@apply_app.command(name="all")
def apply_all(
interactive: bool = Option(
True, "--interactive/--no-interactive", "-i/-n", help="Interactive mode"
),
commit: bool = Option(False, "--commit", "-c", help="Commit after each patch"),
reset_to: Optional[str] = Option(
None, "--reset-to", "-r", help="Reset files to this commit before applying patches"
),
):
"""Apply all patches from chromium_patches/"""
ctx = create_build_context(state.chromium_src)
if not ctx:
raise typer.Exit(1)
from ..modules.apply import ApplyAllModule
module = ApplyAllModule()
try:
module.validate(ctx)
module.execute(ctx, interactive=interactive, commit=commit, reset_to=reset_to)
except Exception as e:
log_error(f"Failed to apply patches: {e}")
raise typer.Exit(1)
@apply_app.command(name="feature")
def apply_feature(
feature_name: str = Argument(..., help="Feature name to apply"),
interactive: bool = Option(
True, "--interactive/--no-interactive", "-i/-n", help="Interactive mode"
),
commit: bool = Option(False, "--commit", "-c", help="Commit after applying"),
reset_to: Optional[str] = Option(
None, "--reset-to", "-r", help="Reset files to this commit before applying patches"
),
):
"""Apply patches for a specific feature"""
ctx = create_build_context(state.chromium_src)
if not ctx:
raise typer.Exit(1)
from ..modules.apply import ApplyFeatureModule
module = ApplyFeatureModule()
try:
module.validate(ctx)
module.execute(
ctx, feature_name=feature_name, interactive=interactive, commit=commit, reset_to=reset_to
)
except Exception as e:
log_error(f"Failed to apply feature: {e}")
raise typer.Exit(1)
@apply_app.command(name="patch")
def apply_patch_cmd(
chromium_path: str = Argument(..., help="Chromium file path (e.g., chrome/common/foo.h)"),
reset_to: Optional[str] = Option(
None, "--reset-to", "-r", help="Reset file to this commit before applying patch"
),
dry_run: bool = Option(False, "--dry-run", help="Test without applying"),
):
"""Apply patch for a specific file"""
ctx = create_build_context(state.chromium_src)
if not ctx:
raise typer.Exit(1)
from ..modules.apply import apply_single_file_patch
success, error = apply_single_file_patch(ctx, chromium_path, reset_to, dry_run)
if not success:
log_error(error or "Unknown error")
raise typer.Exit(1)
log_success(f"Successfully applied patch for: {chromium_path}")
# Feature commands
@feature_app.command(name="list")
def feature_list():
"""List all defined features"""
ctx = create_build_context(state.chromium_src)
if not ctx:
raise typer.Exit(1)
from ..modules.feature import ListFeaturesModule
module = ListFeaturesModule()
try:
module.validate(ctx)
module.execute(ctx)
except Exception as e:
log_error(f"Failed to list features: {e}")
raise typer.Exit(1)
@feature_app.command(name="show")
def feature_show(
feature_name: str = Argument(..., help="Feature name to show"),
):
"""Show details of a specific feature"""
ctx = create_build_context(state.chromium_src)
if not ctx:
raise typer.Exit(1)
from ..modules.feature import ShowFeatureModule
module = ShowFeatureModule()
try:
module.validate(ctx)
module.execute(ctx, feature_name=feature_name)
except Exception as e:
log_error(f"Failed to show feature: {e}")
raise typer.Exit(1)
@feature_app.command(name="add")
def feature_add(
feature_name: str = Argument(..., help="Feature name to add"),
commit: str = Argument(..., help="Git commit reference"),
description: Optional[str] = Option(
None, "--description", "-d", help="Feature description"
),
):
"""Add a new feature from a commit"""
ctx = create_build_context(state.chromium_src)
if not ctx:
raise typer.Exit(1)
from ..modules.feature import AddFeatureModule
module = AddFeatureModule()
try:
module.validate(ctx)
module.execute(
ctx, feature_name=feature_name, commit=commit, description=description
)
except Exception as e:
log_error(f"Failed to add feature: {e}")
raise typer.Exit(1)
if __name__ == "__main__":
app()

View File

@@ -0,0 +1,25 @@
"""Common modules for the BrowserOS build system"""
from .context import Context, ArtifactRegistry, PathConfig, BuildConfig
from .config import load_config, validate_required_envs
from .notify import Notifier, get_notifier
from .module import CommandModule, ValidationError
from .env import EnvConfig
__all__ = [
# Core context
'Context',
# Sub-components
'ArtifactRegistry',
'PathConfig',
'BuildConfig',
'CommandModule',
'ValidationError',
'EnvConfig',
# Config loading
'load_config',
'validate_required_envs',
# Notifications
'Notifier',
'get_notifier',
]

View File

@@ -0,0 +1,68 @@
#!/usr/bin/env python3
"""YAML configuration parser with environment variable substitution"""
import os
import yaml
from pathlib import Path
from typing import Any, Dict
from .utils import log_info, log_error, log_warning
def env_var_constructor(loader, node):
"""Custom YAML constructor for !env tag
Usage in YAML:
chromium_src: !env CHROMIUM_SRC
path: !env HOME
Returns empty string if environment variable is not set.
"""
value = loader.construct_scalar(node)
env_value = os.environ.get(value)
if env_value is None:
log_warning(f"Environment variable not set: {value} (using empty string)")
return ''
return env_value
# Register the !env constructor with SafeLoader
yaml.add_constructor('!env', env_var_constructor, Loader=yaml.SafeLoader)
def load_config(config_path: Path) -> Dict[str, Any]:
"""Load and parse YAML config file with environment variable substitution
Supports !env tag for environment variables:
chromium_src: !env CHROMIUM_SRC
build_dir: !env BUILD_DIR
"""
if not config_path.exists():
raise FileNotFoundError(f"Config file not found: {config_path}")
log_info(f"Loading config from: {config_path}")
with open(config_path, 'r') as f:
config = yaml.safe_load(f)
return config
def validate_required_envs(required_envs: list) -> None:
"""Validate that all required environment variables are set
Raises SystemExit if any are missing
"""
missing = []
for env_var in required_envs:
if not os.environ.get(env_var):
missing.append(env_var)
if missing:
log_error("Missing required environment variables:")
for var in missing:
log_error(f" - {var}")
log_error("\nSet these variables and try again")
raise SystemExit(1)

View File

@@ -0,0 +1,480 @@
#!/usr/bin/env python3
"""
Build context dataclass to hold all build state
REFACTOR NOTE: This module is being refactored to use sub-components (PathConfig,
BuildConfig, ArtifactRegistry, EnvConfig) to avoid god object anti-pattern.
The old interface is maintained for backward compatibility during the migration.
"""
import time
from pathlib import Path
from dataclasses import dataclass, field
from typing import Dict, List, Optional
from .utils import (
get_platform,
get_platform_arch,
get_executable_extension,
join_paths,
IS_WINDOWS,
IS_MACOS,
)
from .env import EnvConfig
# =============================================================================
# Sub-Components - New modular structure
# =============================================================================
class ArtifactRegistry:
"""
Simple artifact tracking registry
Tracks artifacts produced during the build process. Each artifact has a unique
name (string) and a path (Path object). If you need to track multiple paths
for the same logical artifact, use different names (e.g., "signed_app_arm64",
"signed_app_x64").
Example:
artifacts = ArtifactRegistry()
artifacts.add("built_app", Path("/path/to/BrowserOS.app"))
app_path = artifacts.get("built_app")
if artifacts.has("signed_app"):
...
"""
def __init__(self):
self._artifacts: Dict[str, Path] = {}
def add(self, name: str, path: Path) -> None:
"""
Register an artifact
Args:
name: Unique artifact name (e.g., "built_app", "signed_dmg")
path: Path to the artifact
Note:
If an artifact with the same name already exists, it will be overwritten.
"""
self._artifacts[name] = path
def get(self, name: str) -> Path:
"""
Get artifact path by name
Args:
name: Artifact name
Returns:
Path to the artifact
Raises:
KeyError: If artifact not found
"""
return self._artifacts[name]
def has(self, name: str) -> bool:
"""
Check if artifact exists
Args:
name: Artifact name
Returns:
True if artifact exists, False otherwise
"""
return name in self._artifacts
def all(self) -> Dict[str, Path]:
"""Get all artifacts as a dictionary"""
return self._artifacts.copy()
class PathConfig:
"""
Path-related configuration
Centralizes all path construction and validation logic. This prevents the
BuildContext from becoming a god object with dozens of path-related methods.
"""
def __init__(
self,
root_dir: Path,
chromium_src: Optional[Path] = None,
gn_flags_file: Optional[Path] = None,
):
self.root_dir = root_dir
self._chromium_src = chromium_src or Path()
self._out_dir = "out/Default"
self.gn_flags_file = gn_flags_file
@property
def chromium_src(self) -> Path:
"""Chromium source directory"""
return self._chromium_src
@chromium_src.setter
def chromium_src(self, value: Path):
"""Set chromium source directory"""
self._chromium_src = value
@property
def out_dir(self) -> str:
"""Output directory (relative to chromium_src)"""
return self._out_dir
@out_dir.setter
def out_dir(self, value: str):
"""Set output directory"""
self._out_dir = value
class BuildConfig:
"""
Build-specific configuration
Contains all build-related settings like architecture, build type, versions, etc.
"""
def __init__(
self,
architecture: Optional[str] = None,
build_type: str = "debug",
):
self.architecture = architecture or get_platform_arch()
self.build_type = build_type
self.chromium_version = ""
self.browseros_version = ""
self.browseros_chromium_version = ""
# App names - will be set based on platform
self.CHROMIUM_APP_NAME = ""
self.BROWSEROS_APP_NAME = ""
self.BROWSEROS_APP_BASE_NAME = "BrowserOS"
# Third party versions
self.SPARKLE_VERSION = "2.7.0"
# Set platform-specific app names
self._set_app_names()
def _set_app_names(self):
"""Set platform-specific application names"""
if IS_WINDOWS():
self.CHROMIUM_APP_NAME = f"chrome{get_executable_extension()}"
self.BROWSEROS_APP_NAME = (
f"{self.BROWSEROS_APP_BASE_NAME}{get_executable_extension()}"
)
elif IS_MACOS():
self.CHROMIUM_APP_NAME = "Chromium.app"
self.BROWSEROS_APP_NAME = f"{self.BROWSEROS_APP_BASE_NAME}.app"
else:
self.CHROMIUM_APP_NAME = "chrome"
self.BROWSEROS_APP_NAME = self.BROWSEROS_APP_BASE_NAME.lower()
# =============================================================================
# Legacy BuildContext - Maintained for backward compatibility
# =============================================================================
@dataclass
class Context:
"""
Context Object pattern - ONE place for all build state
"""
root_dir: Path
chromium_src: Path = Path()
out_dir: str = "out/Default"
architecture: str = "" # Will be set in __post_init__
build_type: str = "debug"
chromium_version: str = ""
browseros_version: str = ""
browseros_chromium_version: str = ""
start_time: float = 0.0
# App names - will be set based on platform
CHROMIUM_APP_NAME: str = ""
BROWSEROS_APP_NAME: str = ""
BROWSEROS_APP_BASE_NAME: str = "BrowserOS" # Base name without extension
# Third party
SPARKLE_VERSION: str = "2.7.0"
# Legacy artifacts dict - kept for backward compatibility
# New code should use ctx.artifacts (ArtifactRegistry) instead
artifacts: Dict[str, List[Path]] = field(default_factory=dict)
# Fixed app path - used by UniversalBuildModule to prevent auto-detection
# When set, get_app_path() returns this directly instead of auto-detecting
_fixed_app_path: Optional[Path] = None
# New sub-components (initialized in __post_init__)
paths: PathConfig = field(init=False)
build: BuildConfig = field(init=False)
artifact_registry: ArtifactRegistry = field(init=False) # New artifact system
env: EnvConfig = field(init=False)
def __post_init__(self):
"""Load version files and set platform/architecture-specific configurations"""
# Initialize new sub-components
self.paths = PathConfig(self.root_dir, self.chromium_src)
self.build = BuildConfig(self.architecture, self.build_type)
self.artifact_registry = ArtifactRegistry() # New artifact system
self.env = EnvConfig()
# Set default gn_flags_file if not provided
if not self.paths.gn_flags_file:
self.paths.gn_flags_file = self.get_gn_flags_file()
# Set platform-specific defaults
if not self.architecture:
self.architecture = get_platform_arch()
self.build.architecture = self.architecture
# Set platform-specific app names
if IS_WINDOWS():
self.CHROMIUM_APP_NAME = f"chrome{get_executable_extension()}"
self.BROWSEROS_APP_NAME = (
f"{self.BROWSEROS_APP_BASE_NAME}{get_executable_extension()}"
)
elif IS_MACOS():
self.CHROMIUM_APP_NAME = "Chromium.app"
self.BROWSEROS_APP_NAME = f"{self.BROWSEROS_APP_BASE_NAME}.app"
else:
self.CHROMIUM_APP_NAME = "chrome"
self.BROWSEROS_APP_NAME = self.BROWSEROS_APP_BASE_NAME.lower()
# Sync with BuildConfig
self.build.CHROMIUM_APP_NAME = self.CHROMIUM_APP_NAME
self.build.BROWSEROS_APP_NAME = self.BROWSEROS_APP_NAME
# Set architecture-specific output directory with platform separator
if IS_WINDOWS():
self.out_dir = f"out\\Default_{self.architecture}"
else:
self.out_dir = f"out/Default_{self.architecture}"
# Sync with PathConfig
self.paths.out_dir = self.out_dir
# Load version information using static methods
if not self.chromium_version:
self.chromium_version, version_dict = self._load_chromium_version(
self.root_dir
)
else:
# If chromium_version was provided, we still need to parse it for version_dict
version_dict = {}
if not self.browseros_version:
self.browseros_version = self._load_browseros_version(self.root_dir)
# Set nxtscape_chromium_version as chromium version with BUILD + nxtscape_version
if self.chromium_version and self.browseros_version and version_dict:
# Calculate new BUILD number by adding nxtscape_version to original BUILD
new_build = int(version_dict["BUILD"]) + int(self.browseros_version)
self.browseros_chromium_version = f"{version_dict['MAJOR']}.{version_dict['MINOR']}.{new_build}.{version_dict['PATCH']}"
# Sync versions with BuildConfig
self.build.chromium_version = self.chromium_version
self.build.browseros_version = self.browseros_version
self.build.browseros_chromium_version = self.browseros_chromium_version
# Sync chromium_src with PathConfig (validation done by resolver)
self.paths.chromium_src = self.chromium_src
self.start_time = time.time()
# === Initialization ===
@classmethod
def init_context(cls, config: Dict) -> "Context":
"""
Initialize context from config
Replaces __post_init__ logic for better testability
"""
root_dir = Path(config.get("root_dir", Path.cwd()))
chromium_src = (
Path(config.get("chromium_src", ""))
if config.get("chromium_src")
else Path()
)
# Get architecture or use platform default
arch = config.get("architecture") or get_platform_arch()
# Create instance
ctx = cls(
root_dir=root_dir,
chromium_src=chromium_src,
architecture=arch,
build_type=config.get("build_type", "debug"),
)
return ctx
@staticmethod
def _load_chromium_version(root_dir: Path):
"""
Load chromium version from CHROMIUM_VERSION file
Returns: (version_string, version_dict)
"""
version_dict = {}
version_file = join_paths(root_dir, "CHROMIUM_VERSION")
if version_file.exists():
# Parse VERSION file format: MAJOR=137\nMINOR=0\nBUILD=7151\nPATCH=69
for line in version_file.read_text().strip().split("\n"):
key, value = line.split("=")
version_dict[key] = value
# Construct chromium_version as MAJOR.MINOR.BUILD.PATCH
chromium_version = f"{version_dict['MAJOR']}.{version_dict['MINOR']}.{version_dict['BUILD']}.{version_dict['PATCH']}"
return chromium_version, version_dict
return "", version_dict
@staticmethod
def _load_browseros_version(root_dir: Path) -> str:
"""Load browseros build offset from config/BROWSEROS_BUILD_OFFSET"""
version_file = join_paths(root_dir, "build", "config", "BROWSEROS_BUILD_OFFSET")
if version_file.exists():
return version_file.read_text().strip()
return ""
# Path getter methods
def get_config_dir(self) -> Path:
"""Get build config directory"""
return join_paths(self.root_dir, "build", "config")
def get_gn_config_dir(self) -> Path:
"""Get GN config directory"""
return join_paths(self.get_config_dir(), "gn")
def get_gn_flags_file(self) -> Path:
"""Get GN flags file for current build type"""
platform = get_platform()
return join_paths(
self.get_gn_config_dir(), f"flags.{platform}.{self.build_type}.gn"
)
def get_copy_resources_config(self) -> Path:
"""Get copy resources configuration file"""
return join_paths(self.get_config_dir(), "copy_resources.yaml")
def get_sparkle_dir(self) -> Path:
"""Get Sparkle directory"""
return join_paths(self.chromium_src, "third_party", "sparkle")
def get_sparkle_url(self) -> str:
"""Get Sparkle download URL"""
return f"https://github.com/sparkle-project/Sparkle/releases/download/{self.SPARKLE_VERSION}/Sparkle-{self.SPARKLE_VERSION}.tar.xz"
def get_entitlements_dir(self) -> Path:
"""Get entitlements directory"""
return join_paths(self.root_dir, "resources", "entitlements")
def get_pkg_dmg_path(self) -> Path:
"""Get pkg-dmg tool path (macOS only)"""
return join_paths(self.chromium_src, "chrome", "installer", "mac", "pkg-dmg")
def get_app_path(self) -> Path:
"""Get built app path
For universal builds, checks if out/Default_universal/BrowserOS.app exists
and returns that instead of the architecture-specific path.
This allows downstream modules (sign, package) to work on the universal
binary after UniversalBuildModule has run.
Note: If _fixed_app_path is set, returns that directly (used by
UniversalBuildModule to prevent auto-detection during arch-specific ops).
"""
# If fixed path is set (for arch-specific operations), use it directly
if self._fixed_app_path:
return self._fixed_app_path
# Check for universal binary first (macOS only)
if IS_MACOS():
universal_app = join_paths(
self.chromium_src, "out/Default_universal", self.BROWSEROS_APP_NAME
)
if universal_app.exists():
return universal_app
# For debug builds, check if the app has a different name
if self.build_type == "debug" and IS_MACOS():
# Check for debug-branded app name
debug_app_name = f"{self.BROWSEROS_APP_BASE_NAME} Dev.app"
debug_app_path = join_paths(self.chromium_src, self.out_dir, debug_app_name)
if debug_app_path.exists():
return debug_app_path
# Return architecture-specific path
return join_paths(self.chromium_src, self.out_dir, self.BROWSEROS_APP_NAME)
def get_chromium_app_path(self) -> Path:
"""Get original Chromium app path"""
return join_paths(self.chromium_src, self.out_dir, self.CHROMIUM_APP_NAME)
def get_gn_args_file(self) -> Path:
"""Get GN args file path"""
return join_paths(self.chromium_src, self.out_dir, "args.gn")
def get_notarization_zip(self) -> Path:
"""Get notarization zip path (macOS only)"""
return join_paths(self.chromium_src, self.out_dir, "notarize.zip")
def get_dmg_name(self, signed=False) -> str:
"""Get DMG filename with architecture suffix"""
if self.architecture == "universal":
if signed:
return f"{self.BROWSEROS_APP_BASE_NAME}_{self.browseros_chromium_version}_universal_signed.dmg"
return f"{self.BROWSEROS_APP_BASE_NAME}_{self.browseros_chromium_version}_universal.dmg"
else:
if signed:
return f"{self.BROWSEROS_APP_BASE_NAME}_{self.browseros_chromium_version}_{self.architecture}_signed.dmg"
return f"{self.BROWSEROS_APP_BASE_NAME}_{self.browseros_chromium_version}_{self.architecture}.dmg"
def get_browseros_chromium_version(self) -> str:
"""Get browseros chromium version string"""
return self.browseros_chromium_version
def get_browseros_version(self) -> str:
"""Get browseros version string"""
return self.browseros_version
def get_app_base_name(self) -> str:
"""Get app base name without extension"""
return self.BROWSEROS_APP_BASE_NAME
def get_dist_dir(self) -> Path:
"""Get distribution output directory with version"""
return join_paths(self.root_dir, "dist", self.browseros_version)
# Dev CLI specific methods
def get_patches_dir(self) -> Path:
"""Get individual patches directory"""
return join_paths(self.root_dir, "chromium_patches")
def get_chromium_replace_files_dir(self) -> Path:
"""Get chromium files replacement directory"""
return join_paths(self.root_dir, "chromium_files")
def get_features_yaml_path(self) -> Path:
"""Get features.yaml file path"""
return join_paths(self.root_dir, "build", "features.yaml")
def get_patch_path_for_file(self, file_path: str) -> Path:
"""Convert a chromium file path to patch file path"""
return join_paths(self.get_patches_dir(), file_path)
def get_series_patches_dir(self) -> Path:
"""Get series patches directory (GNU Quilt format)"""
return join_paths(self.root_dir, "series_patches")

View File

@@ -0,0 +1,218 @@
#!/usr/bin/env python3
"""
Environment variable configuration for BrowserOS build system
This module provides centralized access to all environment variables used by the build system.
It provides type-safe access, defaults, and clear documentation of what each variable is for.
The module automatically loads .env files from the project root on import.
"""
import os
from pathlib import Path
from typing import Optional
from dotenv import load_dotenv
def _load_dotenv_file():
"""Load .env file from project root (packages/browseros parent directory)"""
# Find project root by going up from this file's location
# This file is at: packages/browseros/build/common/env.py
# Project root is at: packages/browseros/../../ (the repo root)
current_dir = Path(__file__).parent # common/
browseros_root = current_dir.parent.parent # packages/browseros/
project_root = browseros_root.parent.parent # repo root
# Try loading .env from multiple locations (most specific first)
env_locations = [
browseros_root / ".env", # packages/browseros/.env
project_root / ".env", # repo root .env
]
for env_path in env_locations:
if env_path.exists():
load_dotenv(env_path)
return
# Load .env on module import
_load_dotenv_file()
class EnvConfig:
"""
Centralized environment variable configuration
This class provides clean, type-safe access to all environment variables
used by the build system. It serves as the single source of truth for
what environment variables are available and what they're used for.
Usage:
env = EnvConfig()
if env.chromium_src:
chromium_path = Path(env.chromium_src)
"""
# === Build Configuration ===
@property
def chromium_src(self) -> Optional[str]:
"""Path to Chromium source directory"""
return os.environ.get("CHROMIUM_SRC")
@property
def arch(self) -> Optional[str]:
"""Target architecture (x64, arm64, universal)"""
return os.environ.get("ARCH")
@property
def pythonpath(self) -> Optional[str]:
"""Python path for build scripts"""
return os.environ.get("PYTHONPATH")
@property
def depot_tools_win_toolchain(self) -> str:
"""Windows depot_tools toolchain setting (0 = use system toolchain)"""
return os.environ.get("DEPOT_TOOLS_WIN_TOOLCHAIN", "0")
# === macOS Code Signing ===
@property
def macos_certificate_name(self) -> Optional[str]:
"""macOS code signing certificate name"""
return os.environ.get("MACOS_CERTIFICATE_NAME")
@property
def macos_notarization_apple_id(self) -> Optional[str]:
"""Apple ID for macOS notarization"""
return os.environ.get("PROD_MACOS_NOTARIZATION_APPLE_ID")
@property
def macos_notarization_team_id(self) -> Optional[str]:
"""Team ID for macOS notarization"""
return os.environ.get("PROD_MACOS_NOTARIZATION_TEAM_ID")
@property
def macos_notarization_password(self) -> Optional[str]:
"""App-specific password for macOS notarization"""
return os.environ.get("PROD_MACOS_NOTARIZATION_PWD")
# === Windows Code Signing ===
@property
def code_sign_tool_path(self) -> Optional[str]:
"""Path to Windows code signing tool directory"""
return os.environ.get("CODE_SIGN_TOOL_PATH")
@property
def esigner_username(self) -> Optional[str]:
"""eSigner username for Windows code signing"""
return os.environ.get("ESIGNER_USERNAME")
@property
def esigner_password(self) -> Optional[str]:
"""eSigner password for Windows code signing"""
return os.environ.get("ESIGNER_PASSWORD")
@property
def esigner_totp_secret(self) -> Optional[str]:
"""eSigner TOTP secret for Windows code signing"""
return os.environ.get("ESIGNER_TOTP_SECRET")
@property
def esigner_credential_id(self) -> Optional[str]:
"""eSigner credential ID for Windows code signing"""
return os.environ.get("ESIGNER_CREDENTIAL_ID")
# === Upload & Distribution ===
@property
def gcs_bucket(self) -> str:
"""Google Cloud Storage bucket for artifact uploads
Defaults to 'nxtscape' if not set via GCS_BUCKET env var
"""
return os.environ.get("GCS_BUCKET", "nxtscape")
@property
def gcs_service_account_file(self) -> str:
"""Service account JSON file for GCS authentication
Defaults to 'gclient.json' if not set via GCS_SERVICE_ACCOUNT_FILE env var
"""
return os.environ.get("GCS_SERVICE_ACCOUNT_FILE", "gclient.json")
# === Notifications ===
@property
def slack_webhook_url(self) -> Optional[str]:
"""Slack webhook URL for build notifications"""
return os.environ.get("SLACK_WEBHOOK_URL")
# === Helper Methods ===
def get_macos_signing_config(self) -> dict:
"""
Get all macOS signing configuration as a dict
Returns:
dict with keys: certificate_name, apple_id, team_id, notarization_pwd
"""
return {
"certificate_name": self.macos_certificate_name or "",
"apple_id": self.macos_notarization_apple_id or "",
"team_id": self.macos_notarization_team_id or "",
"notarization_pwd": self.macos_notarization_password or "",
}
def get_windows_signing_config(self) -> dict:
"""
Get all Windows signing configuration as a dict
Returns:
dict with keys: code_sign_tool_path, username, password, totp_secret, credential_id
"""
return {
"code_sign_tool_path": self.code_sign_tool_path or "",
"username": self.esigner_username or "",
"password": self.esigner_password or "",
"totp_secret": self.esigner_totp_secret or "",
"credential_id": self.esigner_credential_id or "",
}
def validate_required(self, *var_names: str) -> None:
"""
Validate that required environment variables are set
Args:
*var_names: Variable names to check (e.g., "chromium_src", "gcs_bucket")
Raises:
ValueError: If any required variable is not set
Example:
env = EnvConfig()
env.validate_required("chromium_src", "macos_certificate_name")
"""
missing = []
for var_name in var_names:
# Convert property name to env var name (e.g., chromium_src -> CHROMIUM_SRC)
env_var = var_name.upper()
if not os.environ.get(env_var):
missing.append(env_var)
if missing:
raise ValueError(
f"Missing required environment variables: {', '.join(missing)}"
)
def has_macos_signing_config(self) -> bool:
"""Check if all macOS signing environment variables are set"""
config = self.get_macos_signing_config()
return all(config.values())
def has_windows_signing_config(self) -> bool:
"""Check if all Windows signing environment variables are set"""
config = self.get_windows_signing_config()
return all(config.values())

View File

@@ -0,0 +1,91 @@
#!/usr/bin/env python3
"""
Logging utilities for the build system
Provides consistent logging with Typer output and file logging
"""
import typer
from pathlib import Path
from datetime import datetime
# Global log file handle
_log_file = None
def _ensure_log_file():
"""Ensure log file is created with timestamp"""
global _log_file
if _log_file is None:
# Create logs directory if it doesn't exist
log_dir = Path(__file__).parent.parent / "logs"
log_dir.mkdir(exist_ok=True)
# Create log file with timestamp
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
log_file_path = log_dir / f"build_{timestamp}.log"
# Open with UTF-8 encoding to handle any characters
_log_file = open(log_file_path, "w", encoding="utf-8")
_log_file.write(
f"BrowserOS Build Log - Started at {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
)
_log_file.write("=" * 80 + "\n\n")
return _log_file
def _log_to_file(message: str):
"""Write message to log file with timestamp"""
log_file = _ensure_log_file()
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
log_file.write(f"[{timestamp}] {message}\n")
log_file.flush()
def log_info(message: str):
"""Print info message using Typer"""
typer.echo(message)
_log_to_file(f"INFO: {message}")
def log_warning(message: str):
"""Print warning message with color"""
typer.secho(f"⚠️ {message}", fg=typer.colors.YELLOW)
_log_to_file(f"WARNING: {message}")
def log_error(message: str):
"""Print error message to stderr with color"""
typer.secho(f"{message}", fg=typer.colors.RED, err=True)
_log_to_file(f"ERROR: {message}")
def log_success(message: str):
"""Print success message with color"""
typer.secho(f"{message}", fg=typer.colors.GREEN)
_log_to_file(f"SUCCESS: {message}")
def log_debug(message: str, enabled: bool = False):
"""Print debug message if enabled"""
if enabled:
typer.secho(f"🔍 {message}", fg=typer.colors.BLUE, dim=True)
_log_to_file(f"DEBUG: {message}")
def close_log_file():
"""Close the log file if it's open"""
global _log_file
if _log_file:
_log_file.close()
_log_file = None
# Export all logging functions
__all__ = [
'log_info',
'log_warning',
'log_error',
'log_success',
'log_debug',
'close_log_file',
'_log_to_file', # Internal use by utils.run_command
]

View File

@@ -0,0 +1,105 @@
#!/usr/bin/env python3
"""
Base module system for BrowserOS build pipeline
This module defines the base class for all build modules and the validation framework.
All build modules should inherit from BuildModule and implement validate() and execute().
"""
from typing import List
class ValidationError(Exception):
"""
Raised when module validation fails
This exception is raised by the validate() method when a module cannot execute
due to missing requirements, platform incompatibility, or invalid configuration.
The build pipeline stops immediately when ValidationError is raised.
"""
pass
class CommandModule:
"""
Base class for all build modules
Each module represents a discrete step in the build pipeline (e.g., clean, compile, sign).
Modules are self-contained and declare their requirements and outputs explicitly.
Class Attributes:
produces: List of artifact names this module creates (e.g., ["signed_app", "notarization_zip"])
requires: List of artifact names this module needs (e.g., ["built_app"])
description: Human-readable description for --list output
Methods:
validate(context): Check if module can run, raise ValidationError if not
execute(context): Execute the module's main task
Example:
class CleanModule(BuildModule):
produces = []
requires = []
description = "Clean build artifacts and reset git state"
def validate(self, context):
if not context.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {context.chromium_src}")
def execute(self, context):
log_info("🧹 Cleaning build artifacts...")
# ... cleaning logic ...
log_success("Build artifacts cleaned")
"""
# Metadata as class attributes (override in subclasses)
produces: List[str] = []
requires: List[str] = []
description: str = "No description provided"
def validate(self, context) -> None:
"""
Validate that this module can run successfully
This method should check all preconditions:
- Platform requirements (e.g., macOS only)
- Required artifacts from previous modules
- Required environment variables
- Required files/directories exist
Args:
context: BuildContext object with all build state
Raises:
ValidationError: If any precondition is not met
Note:
This method is called before execute(). The pipeline stops
immediately if ValidationError is raised.
"""
raise NotImplementedError(
f"{self.__class__.__name__} must implement validate()"
)
def execute(self, context) -> None:
"""
Execute the module's main task
This method performs the actual work of the module. It should:
- Log its own progress using log_info(), log_success(), etc.
- Register any artifacts it produces using context.artifacts.add()
- Raise exceptions on failure (will stop the pipeline)
Args:
context: BuildContext object with all build state
Raises:
Exception: On any failure (stops the pipeline)
Note:
This method is only called after validate() succeeds.
Modules should be idempotent where possible.
"""
raise NotImplementedError(
f"{self.__class__.__name__} must implement execute()"
)

View File

@@ -0,0 +1,164 @@
#!/usr/bin/env python3
"""Notification system for BrowserOS build pipeline"""
import os
import threading
from typing import Optional, Dict, Any
# Slack attachment colors
COLOR_BLUE = "#2196F3"
COLOR_GREEN = "#4CAF50"
COLOR_RED = "#F44336"
# Build context (set once at pipeline start)
_build_context: Dict[str, str] = {}
def set_build_context(os_name: str, arch: str) -> None:
"""Set build context for all notifications"""
_build_context["os"] = os_name
_build_context["arch"] = arch
def _get_context_prefix() -> str:
"""Get [arch] prefix if context is set"""
if "arch" in _build_context:
return f"[{_build_context['arch']}] "
return ""
def _get_context_footer() -> str:
"""Get OS footer if context is set"""
if "os" in _build_context:
return f"BrowserOS Build System - {_build_context['os']}"
return "BrowserOS Build System"
class Notifier:
"""Fire-and-forget notification system"""
def __init__(self):
self.slack_webhook_url = os.environ.get("SLACK_WEBHOOK_URL")
self.enabled = bool(self.slack_webhook_url)
def notify(self, event: str, message: str, details: Optional[Dict[str, Any]] = None, color: str = "#36a64f") -> None:
"""Send notification asynchronously (fire-and-forget)"""
if not self.enabled:
return
# Fire and forget - run in background thread
thread = threading.Thread(
target=self._send_notification,
args=(event, message, details, color),
daemon=True
)
thread.start()
def _send_notification(self, event: str, message: str, details: Optional[Dict[str, Any]], color: str) -> None:
"""Internal method to send notification (runs in background thread)"""
try:
import requests
# Build footer text
footer = f"🍎 {_get_context_footer()}" if _build_context.get("os") == "macOS" \
else f"🪟 {_get_context_footer()}" if _build_context.get("os") == "Windows" \
else f"🐧 {_get_context_footer()}" if _build_context.get("os") == "Linux" \
else _get_context_footer()
# Use legacy attachment format for colored sidebar
attachment = {
"color": color,
"mrkdwn_in": ["text", "fields"],
"text": f"*{event}*\n{message}",
"footer": footer
}
if details:
attachment["fields"] = [
{"title": key, "value": str(value), "short": True}
for key, value in details.items()
]
payload = {"attachments": [attachment]}
requests.post(
self.slack_webhook_url,
json=payload,
timeout=5 # Quick timeout for fire-and-forget
)
except ImportError:
pass
except Exception:
pass
# Global notifier instance
_notifier = None
def get_notifier() -> Notifier:
"""Get global notifier instance"""
global _notifier
if _notifier is None:
_notifier = Notifier()
return _notifier
def notify_pipeline_start(pipeline_name: str, modules: list) -> None:
"""Notify that pipeline has started"""
notifier = get_notifier()
notifier.notify(
"🚀 Pipeline Started",
"Build pipeline started",
{"Modules": ", ".join(modules)},
color=COLOR_BLUE
)
def notify_pipeline_end(pipeline_name: str, duration: float) -> None:
"""Notify that pipeline completed successfully"""
notifier = get_notifier()
mins = int(duration / 60)
secs = int(duration % 60)
notifier.notify(
"🏁 Pipeline Completed",
"Build pipeline completed successfully",
{"Duration": f"{mins}m {secs}s"},
color=COLOR_GREEN
)
def notify_pipeline_error(pipeline_name: str, error: str) -> None:
"""Notify that pipeline failed with error"""
notifier = get_notifier()
notifier.notify(
"❌ Pipeline Failed",
"Build pipeline failed",
{"Error": error},
color=COLOR_RED
)
def notify_module_start(module_name: str) -> None:
"""Notify that a module started executing"""
notifier = get_notifier()
prefix = _get_context_prefix()
notifier.notify(
"▶️ Module Started",
f"{prefix}Module '{module_name}' started",
None,
color=COLOR_BLUE
)
def notify_module_completion(module_name: str, duration: float) -> None:
"""Notify that a module completed successfully"""
notifier = get_notifier()
prefix = _get_context_prefix()
notifier.notify(
"✅ Module Completed",
f"{prefix}Module '{module_name}' completed",
{"Duration": f"{duration:.1f}s"},
color=COLOR_GREEN
)

View File

@@ -0,0 +1,80 @@
#!/usr/bin/env python3
"""Pipeline validation for BrowserOS build system"""
from typing import Dict, List, Type
from .module import CommandModule
from .utils import log_error, log_info
def validate_pipeline(pipeline: List[str], available_modules: Dict[str, Type[CommandModule]]) -> None:
"""Validate that all modules in pipeline exist in available_modules
Raises SystemExit if validation fails
"""
invalid_modules = []
for module_name in pipeline:
if module_name not in available_modules:
invalid_modules.append(module_name)
if invalid_modules:
log_error("Invalid module names in pipeline:")
for module_name in invalid_modules:
log_error(f" - {module_name}")
log_error("\nAvailable modules:")
for module_name in sorted(available_modules.keys()):
module_class = available_modules[module_name]
log_info(f" - {module_name}: {module_class.description}")
raise SystemExit(1)
def show_available_modules(available_modules: Dict[str, Type[CommandModule]]) -> None:
"""Display all available modules with descriptions, grouped by category"""
# Group modules by prefix
groups = {
"Setup & Environment": ["clean", "git_setup", "sparkle_setup", "configure"],
"Patches & Resources": ["patches", "chromium_replace", "string_replaces", "resources"],
"Build": ["compile"],
"Code Signing": ["sign_macos", "sign_windows", "sign_linux"],
"Packaging": ["package_macos", "package_windows", "package_linux"],
"Upload": ["upload_gcs"],
}
log_info("\n" + "=" * 70)
log_info("Available Build Modules")
log_info("=" * 70)
for group_name, module_names in groups.items():
# Only show group if it has modules
group_modules = [m for m in module_names if m in available_modules]
if not group_modules:
continue
log_info(f"\n{group_name}:")
log_info("-" * 70)
for module_name in group_modules:
module_class = available_modules[module_name]
log_info(f" {module_name:20} {module_class.description}")
# Show any modules not in groups (for extensibility)
all_grouped = set(m for group in groups.values() for m in group)
ungrouped = sorted(set(available_modules.keys()) - all_grouped)
if ungrouped:
log_info("\nOther:")
log_info("-" * 70)
for module_name in ungrouped:
module_class = available_modules[module_name]
log_info(f" {module_name:20} {module_class.description}")
log_info("\n" + "=" * 70)
log_info("Example Usage:")
log_info("=" * 70)
log_info(" browseros build --modules clean,git_setup,configure,compile")
log_info(" browseros build --modules compile,sign_macos,package_macos")
log_info(" browseros build --config release.yaml")
log_info("=" * 70 + "\n")

View File

@@ -0,0 +1,324 @@
#!/usr/bin/env python3
"""
Configuration resolver - single source of truth for all config resolution
Two mutually exclusive modes:
1. CONFIG mode (--config FILE): YAML controls everything
2. DIRECT mode (no --config): CLI args > Env > Defaults
Precedence (CONFIG mode):
- YAML (authoritative)
- Env vars (only for secrets/credentials via EnvConfig)
- Error if required fields missing
Precedence (DIRECT mode):
- CLI args (explicit, Typer defaults must be None)
- Environment variables (CHROMIUM_SRC, ARCH)
- Hardcoded defaults
This centralizes ALL configuration resolution in one place.
"""
from pathlib import Path
from typing import Optional, List, Dict, Any, Tuple
from .context import Context
from .env import EnvConfig
from .utils import get_platform_arch, log_info
def resolve_config(
cli_args: Dict[str, Any],
yaml_config: Optional[Dict[str, Any]] = None,
root_dir: Optional[Path] = None,
) -> Context:
"""Resolve build configuration - single entry point.
Args:
cli_args: Dictionary of CLI arguments (all values should be None if not provided)
yaml_config: Optional YAML configuration (triggers CONFIG mode)
root_dir: Optional root directory (defaults to CWD)
Returns:
Fully resolved Context object
Raises:
ValueError: If required fields missing or invalid
Modes:
- CONFIG mode (yaml_config provided): YAML is authoritative
- DIRECT mode (no yaml_config): CLI > Env > Defaults
"""
root_dir = root_dir or Path.cwd()
if yaml_config:
return _resolve_config_mode(yaml_config, cli_args, root_dir)
else:
return _resolve_direct_mode(cli_args, root_dir)
def _resolve_config_mode(
yaml_config: Dict[str, Any], cli_args: Dict[str, Any], root_dir: Path
) -> Context:
"""CONFIG MODE: YAML is base, CLI can override.
Args:
yaml_config: YAML configuration dictionary
cli_args: CLI arguments (can override YAML values)
root_dir: Project root directory
Returns:
Context with values from YAML, optionally overridden by CLI
Raises:
ValueError: If required fields missing from both YAML and CLI
"""
build_section = yaml_config.get("build", {})
# chromium_src: CLI override > YAML > error
chromium_src_str = cli_args.get("chromium_src") or build_section.get("chromium_src")
if not chromium_src_str:
raise ValueError(
"CONFIG MODE: chromium_src required in YAML!\n"
"Add to your config:\n"
" build:\n"
" chromium_src: /path/to/chromium"
)
chromium_src = Path(chromium_src_str)
chromium_src_source = "cli" if cli_args.get("chromium_src") else "yaml"
# Validate chromium_src exists
if not chromium_src.exists():
raise ValueError(
f"CONFIG MODE: chromium_src does not exist: {chromium_src}\n"
f"Expected directory with Chromium source code"
)
# architecture: CLI override > YAML > platform default
architecture = (
cli_args.get("arch")
or build_section.get("architecture")
or build_section.get("arch")
)
arch_source = "cli" if cli_args.get("arch") else "yaml"
if not architecture:
architecture = get_platform_arch()
arch_source = "default"
log_info(f"CONFIG MODE: Using platform default architecture: {architecture}")
# build_type: CLI override > YAML > debug
build_type = cli_args.get("build_type") or build_section.get("type", "debug")
build_type_source = "cli" if cli_args.get("build_type") else "yaml"
log_info(f"✓ CONFIG MODE: chromium_src={chromium_src} ({chromium_src_source})")
log_info(f"✓ CONFIG MODE: architecture={architecture} ({arch_source})")
log_info(f"✓ CONFIG MODE: build_type={build_type} ({build_type_source})")
return Context(
root_dir=root_dir,
chromium_src=chromium_src,
architecture=architecture,
build_type=build_type,
)
def _resolve_direct_mode(cli_args: Dict[str, Any], root_dir: Path) -> Context:
"""DIRECT MODE: CLI > Env > Defaults.
Args:
cli_args: CLI arguments (None if not provided by user)
root_dir: Project root directory
Returns:
Context with resolved values
Raises:
ValueError: If chromium_src not provided
"""
env = EnvConfig()
# chromium_src: CLI > Env > Error
chromium_src = cli_args.get("chromium_src") or env.chromium_src
if not chromium_src:
raise ValueError(
"DIRECT MODE: chromium_src required!\n"
"Provide via one of:\n"
" --chromium-src PATH\n"
" CHROMIUM_SRC environment variable"
)
chromium_src = Path(chromium_src)
# Validate chromium_src exists
if not chromium_src.exists():
raise ValueError(
f"DIRECT MODE: chromium_src does not exist: {chromium_src}\n"
f"Expected directory with Chromium source code"
)
# architecture: CLI > Env > Platform default
architecture = cli_args.get("arch") or env.arch
if not architecture:
architecture = get_platform_arch()
log_info(f"DIRECT MODE: Using platform default architecture: {architecture}")
# build_type: CLI > Default
build_type = cli_args.get("build_type") or "debug"
log_info(f"✓ DIRECT MODE: chromium_src={chromium_src} (cli/env)")
log_info(f"✓ DIRECT MODE: architecture={architecture} (cli/env/default)")
log_info(f"✓ DIRECT MODE: build_type={build_type} (cli/default)")
return Context(
root_dir=root_dir,
chromium_src=chromium_src,
architecture=architecture,
build_type=build_type,
)
def resolve_pipeline(
cli_args: Dict[str, Any],
yaml_config: Optional[Dict[str, Any]] = None,
execution_order: Optional[List[Tuple[str, List[str]]]] = None,
) -> List[str]:
"""Resolve build pipeline - single entry point.
Args:
cli_args: CLI arguments dictionary
yaml_config: Optional YAML configuration (triggers CONFIG mode)
execution_order: Required for DIRECT mode with phase flags
Returns:
List of module names in execution order
Raises:
ValueError: If no pipeline specified or conflicting modes
Modes:
- CONFIG mode: Returns yaml_config["modules"]
- DIRECT mode: --modules or phase flags
"""
if yaml_config:
return _resolve_pipeline_config_mode(yaml_config)
else:
return _resolve_pipeline_direct_mode(cli_args, execution_order)
def _resolve_pipeline_config_mode(yaml_config: Dict[str, Any]) -> List[str]:
"""CONFIG MODE: Pipeline from YAML modules list.
Args:
yaml_config: YAML configuration dictionary
Returns:
Module list from YAML
Raises:
ValueError: If modules not specified in YAML
"""
modules = yaml_config.get("modules")
if not modules:
raise ValueError(
"CONFIG MODE: modules required in YAML!\n"
"Add to your config:\n"
" modules: [clean, configure, compile, sign_macos]"
)
log_info(f"✓ CONFIG MODE: pipeline={modules} (yaml)")
return modules
def _resolve_pipeline_direct_mode(
cli_args: Dict[str, Any],
execution_order: Optional[List[Tuple[str, List[str]]]],
) -> List[str]:
"""DIRECT MODE: Pipeline from --modules or phase flags.
Args:
cli_args: CLI arguments dictionary
execution_order: Phase execution order (required for flag mode)
Returns:
Module list in execution order
Raises:
ValueError: If no pipeline specified or both modes used
"""
has_modules = cli_args.get("modules") is not None
has_flags = _has_phase_flags(cli_args)
if not has_modules and not has_flags:
raise ValueError(
"DIRECT MODE: No pipeline specified!\n"
"Use one of:\n"
" --modules clean,compile,...\n"
" --setup --build --sign (phase flags)"
)
if has_modules and has_flags:
raise ValueError(
"DIRECT MODE: Cannot use both --modules and phase flags!\n"
"Choose one approach."
)
if has_modules:
modules_str = cli_args["modules"]
pipeline = [m.strip() for m in modules_str.split(",")]
log_info(f"✓ DIRECT MODE: pipeline={pipeline} (--modules)")
return pipeline
if has_flags:
if execution_order is None:
raise ValueError(
"DIRECT MODE: execution_order required for phase flag resolution"
)
pipeline = _build_pipeline_from_flags(cli_args, execution_order)
log_info(f"✓ DIRECT MODE: pipeline={pipeline} (phase flags)")
return pipeline
raise ValueError("DIRECT MODE: Internal error - no pipeline resolution matched")
def _has_phase_flags(cli_args: Dict[str, Any]) -> bool:
"""Check if any phase flags are set.
Args:
cli_args: CLI arguments dictionary
Returns:
True if any phase flag is True
"""
phase_flags = ["setup", "prep", "build", "sign", "package", "upload"]
return any(cli_args.get(flag, False) for flag in phase_flags)
def _build_pipeline_from_flags(
cli_args: Dict[str, Any],
execution_order: List[Tuple[str, List[str]]],
) -> List[str]:
"""Build pipeline from phase flags with fixed execution order.
Args:
cli_args: CLI arguments with phase flag keys
execution_order: List of (phase_name, modules) defining order
Returns:
Module list in predetermined order
"""
enabled_phases = {
"setup": cli_args.get("setup", False),
"prep": cli_args.get("prep", False),
"build": cli_args.get("build", False),
"sign": cli_args.get("sign", False),
"package": cli_args.get("package", False),
"upload": cli_args.get("upload", False),
}
pipeline = []
for phase_name, phase_modules in execution_order:
if enabled_phases.get(phase_name, False):
pipeline.extend(phase_modules)
return pipeline

View File

@@ -10,85 +10,31 @@ import yaml
import shutil
from pathlib import Path
from typing import Optional, List, Dict, Union
from datetime import datetime
# Import logging functions from logger module - re-exported for other modules
from .logger import ( # noqa: F401
log_info,
log_error,
log_warning,
log_success,
_log_to_file,
)
# Platform detection
IS_WINDOWS = sys.platform == "win32"
IS_MACOS = sys.platform == "darwin"
IS_LINUX = sys.platform.startswith("linux")
# Global log file handle
_log_file = None
# Platform detection functions
def IS_WINDOWS() -> bool:
"""Check if running on Windows"""
return sys.platform == "win32"
def _ensure_log_file():
"""Ensure log file is created with timestamp"""
global _log_file
if _log_file is None:
# Create logs directory if it doesn't exist
log_dir = Path(__file__).parent.parent / "logs"
log_dir.mkdir(exist_ok=True)
# Create log file with timestamp
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
log_file_path = log_dir / f"build_{timestamp}.log"
# Open with UTF-8 encoding to handle any characters
_log_file = open(log_file_path, "w", encoding="utf-8")
_log_file.write(
f"Nxtscape Build Log - Started at {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
)
_log_file.write("=" * 80 + "\n\n")
return _log_file
def IS_MACOS() -> bool:
"""Check if running on macOS"""
return sys.platform == "darwin"
def _log_to_file(message: str):
"""Write message to log file with timestamp"""
log_file = _ensure_log_file()
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
log_file.write(f"[{timestamp}] {message}\n")
log_file.flush()
def _sanitize_for_windows(message: str) -> str:
"""Remove non-ASCII characters on Windows to avoid encoding issues"""
if sys.platform == "win32":
# Remove all non-ASCII characters
return "".join(char for char in message if ord(char) < 128)
return message
def log_info(message: str):
"""Print info message"""
print(_sanitize_for_windows(message))
_log_to_file(f"INFO: {message}")
def log_warning(message: str):
"""Print warning message"""
if sys.platform == "win32":
print(f"[WARN] {_sanitize_for_windows(message)}")
else:
print(f"⚠️ {message}")
_log_to_file(f"WARNING: {message}")
def log_error(message: str):
"""Print error message"""
if sys.platform == "win32":
print(f"[ERROR] {_sanitize_for_windows(message)}")
else:
print(f"{message}")
_log_to_file(f"ERROR: {message}")
def log_success(message: str):
"""Print success message"""
if sys.platform == "win32":
print(f"[SUCCESS] {_sanitize_for_windows(message)}")
else:
print(f"{message}")
_log_to_file(f"SUCCESS: {message}")
def IS_LINUX() -> bool:
"""Check if running on Linux"""
return sys.platform.startswith("linux")
def run_command(
@@ -190,25 +136,25 @@ def load_config(config_path: Path) -> Dict:
# Platform-specific utilities
def get_platform() -> str:
"""Get platform name in a consistent format"""
if IS_WINDOWS:
if IS_WINDOWS():
return "windows"
elif IS_MACOS:
elif IS_MACOS():
return "macos"
elif IS_LINUX:
elif IS_LINUX():
return "linux"
return "unknown"
def get_platform_arch() -> str:
"""Get default architecture for current platform"""
if IS_WINDOWS:
if IS_WINDOWS():
return "x64"
elif IS_MACOS:
elif IS_MACOS():
# macOS can be arm64 or x64
import platform
return "arm64" if platform.machine() == "arm64" else "x64"
elif IS_LINUX:
elif IS_LINUX():
# Linux can be x64 or arm64
import platform
@@ -225,14 +171,14 @@ def get_platform_arch() -> str:
def get_executable_extension() -> str:
"""Get executable file extension for current platform"""
return ".exe" if IS_WINDOWS else ""
return ".exe" if IS_WINDOWS() else ""
def get_app_extension() -> str:
"""Get application bundle extension for current platform"""
if IS_MACOS:
if IS_MACOS():
return ".app"
elif IS_WINDOWS:
elif IS_WINDOWS():
return ".exe"
return ""
@@ -240,7 +186,7 @@ def get_app_extension() -> str:
def normalize_path(path: Union[str, Path]) -> Path:
"""Normalize path for current platform"""
path = Path(path)
if IS_WINDOWS:
if IS_WINDOWS():
# Convert forward slashes to backslashes on Windows
return Path(str(path).replace("/", "\\"))
return path
@@ -265,7 +211,7 @@ def safe_rmtree(path: Union[str, Path]) -> None:
if not path.exists():
return
if IS_WINDOWS:
if IS_WINDOWS():
# On Windows, use rmdir for junctions and symlinks
import stat
@@ -280,7 +226,7 @@ def safe_rmtree(path: Union[str, Path]) -> None:
if path.is_symlink() or (path.is_dir() and os.path.islink(str(path))):
path.unlink()
return
except:
except Exception:
pass
# Fall back to rmtree with error handler

View File

@@ -0,0 +1 @@
82

View File

@@ -1 +0,0 @@
81

View File

@@ -33,6 +33,12 @@
# arch: ["x64"]
copy_operations:
# Version file
- name: "BrowserOS Version File"
source: "resources/BROWSEROS_VERSION"
destination: "chrome/BROWSEROS_VERSION"
type: "file"
# Extensions
# - name: "AI Side Panel Extension"
# source: "resources/files/ai_side_panel"

View File

@@ -1,30 +1,42 @@
# Nxtscape Debug Build Configuration
# BrowserOS macOS Debug Build Configuration
#
# Debug builds use single architecture for faster iteration.
# Use --arch flag to specify arm64 or x64 (defaults to system architecture).
#
# Environment Variables:
# Use !env tag to reference environment variables:
# Example: chromium_src: !env CHROMIUM_SRC
build:
type: debug
architecture: arm64 # Single arch for faster debug builds
# architectures: [x86_64, arm64] # Uncomment for multi-arch debug
# universal: false # Uncomment to disable universal for debug
architecture: arm64 # Single arch for faster debug builds
gn_flags:
file: build/config/gn/flags.macos.debug.gn
steps:
clean: false
git_setup: true
apply_patches: true
build: true
sign: false
package: true
# Explicit module execution order
modules:
# Phase 1: Setup (no clean for faster iteration)
- git_setup
paths:
root_dir: .
# chromium_src: ../chromium-src
# Phase 2: Patches & Resources
- resources
- chromium_replace
- string_replaces
- patches
# Environment-specific settings
env:
PYTHONPATH: scripts
# Phase 3: Build
- configure
- compile
# Phase 4: Package (no signing for debug)
- package_macos
# Required environment variables
# Note: CHROMIUM_SRC can be provided via --chromium-src CLI flag, YAML config, or env var
# Debug builds typically don't require other env vars (no signing, no upload)
# Notification settings
notifications:
slack: false # Set to true to enable Slack notifications for debug builds
slack: false # Disable Slack notifications for debug builds

View File

@@ -14,7 +14,7 @@ clang_use_chrome_plugins = false
fatal_linker_warnings = false
# Core features
enable_nacl = false
# enable_nacl = false
enable_widevine = true
disable_fieldtrial_testing_config = true

View File

@@ -6,7 +6,6 @@ chrome_pgo_phase = 0
is_official_build=false
enable_sparkle=true
enable_reading_list=false
enable_reporting=false
enable_service_discovery=false
enable_widevine=true
@@ -24,3 +23,6 @@ proprietary_codecs=true
enable_platform_hevc = true
disable_fieldtrial_testing_config=true
# new macOS build flags
use_system_xcode = true
use_clang_modules = false

View File

@@ -8,8 +8,12 @@ chrome_pgo_phase = 0
dcheck_always_on=false
enable_sparkle=true
# new macOS build flags
use_system_xcode = true
use_clang_modules = false
enable_reading_list=false
enable_reporting=false
enable_service_discovery=false
enable_widevine=true
@@ -27,6 +31,7 @@ proprietary_codecs=true
enable_platform_hevc = true
disable_fieldtrial_testing_config=true
# build_with_tflite_lib=false
# clang_use_chrome_plugins=false
# disable_fieldtrial_testing_config=true

View File

@@ -19,7 +19,6 @@ use_lld = true
dcheck_always_on = true
# Match macOS features exactly
enable_reading_list = false
enable_reporting = false
enable_service_discovery = false
enable_widevine = true
@@ -41,4 +40,4 @@ disable_fieldtrial_testing_config = true
# enable_sparkle = false
# Disable Windows-specific features that macOS doesn't use
enable_nacl = false
enable_nacl = false

View File

@@ -1,35 +1,30 @@
# BrowserOS Linux Release Build Configuration
# BrowserOS Linux Package Configuration
#
# This config packages an already-built Linux application.
# Use this when you have a pre-built app and only need to package it.
#
# Expects: out/Default/chrome (Linux binary)
#
# Environment Variables:
# Use !env tag to reference environment variables:
# Example: chromium_src: !env CHROMIUM_SRC
build:
type: release
architecture: x64 # Linux x64 only
# architectures: [x64] # Single architecture for Linux
universal: false # Linux doesn't support universal binaries
architecture: x64 # Linux x64
gn_flags:
file: build/config/gn/flags.linux.release.gn
steps:
clean: false
git_setup: false
apply_patches: false
build: false
sign: false # Linux doesn't require code signing
package: true
# Explicit module execution order (partial pipeline - package only)
modules:
- package_linux # Linux doesn't require code signing
paths:
root_dir: .
# chromium_src: ../chromium-src
# Environment-specific settings
env:
PYTHONPATH: scripts
# Linux-specific settings
linux:
appimage:
compression: gzip # Compression type for AppImage
architecture: x86_64 # AppImage architecture designation
# Required environment variables
# Note: CHROMIUM_SRC can be provided via --chromium-src CLI flag, YAML config, or env var
# Note: GCS_BUCKET defaults to 'nxtscape' if not set
# Linux packaging doesn't require signing
# Notification settings
notifications:
slack: true # Enable Slack notifications for release builds
slack: true # Enable Slack notifications for packaging operations

View File

@@ -1,35 +1,45 @@
# BrowserOS Linux Release Build Configuration
#
# Environment Variables:
# Use !env tag to reference environment variables:
# Example: chromium_src: !env CHROMIUM_SRC
build:
type: release
architecture: x64 # Linux x64 only
# architectures: [x64] # Single architecture for Linux
universal: false # Linux doesn't support universal binaries
architecture: x64 # Linux x64
gn_flags:
file: build/config/gn/flags.linux.release.gn
steps:
clean: true
git_setup: true
apply_patches: true
build: true
sign: false # Linux doesn't require code signing
package: true
# Explicit module execution order
modules:
# Phase 1: Setup
- clean
- git_setup
paths:
root_dir: .
# chromium_src: ../chromium-src
# Phase 2: Patches & Resources
- resources
- chromium_replace
- string_replaces
- series_patches
- patches
# Environment-specific settings
env:
PYTHONPATH: scripts
# Phase 3: Build
- configure
- compile
# Linux-specific settings
linux:
appimage:
compression: gzip # Compression type for AppImage
architecture: x86_64 # AppImage architecture designation
# Phase 4: Package (Linux doesn't require code signing)
- package_linux
# Phase 5: Upload (optional)
- upload_gcs
# Required environment variables
# Note: CHROMIUM_SRC can be provided via --chromium-src CLI flag, YAML config, or env var
# Note: GCS_BUCKET defaults to 'nxtscape' if not set
# Note: GCS_SERVICE_ACCOUNT_FILE defaults to 'gclient.json' if not set
# Linux release builds don't require code signing, only upload
# Notification settings
notifications:
slack: true # Enable Slack notifications for release builds
slack: true

View File

@@ -1,41 +1,51 @@
# Nxtscape Release Build Configuration
# BrowserOS macOS Release Build Configuration (Universal Binary)
#
# This config builds a universal binary (arm64 + x64).
# For single-architecture builds, use --arch flag to override.
#
# Environment Variables:
# Use !env tag to reference environment variables:
# Example: chromium_src: !env CHROMIUM_SRC
build:
type: release
# architecture: arm64 # Default single architecture
architectures: [arm64, x64] # List for multi-architecture builds
universal: true # Create universal binary from multiple architectures
architecture: universal # Builds both arm64 and x64, then merges
gn_flags:
file: build/config/gn/flags.macos.release.gn
steps:
clean: true
git_setup: true
apply_patches: true
build: true
sign: true
package: true
# Explicit module execution order
modules:
# Phase 1: Setup
- clean
- git_setup
- sparkle_setup
# Disable Sparkle for testing (uncomment to disable)
# disable_sparkle: true
# Phase 2: Patches & Resources
- chromium_replace
- string_replaces
- series_patches
- patches
paths:
root_dir: .
# chromium_src: ../chromium-src
# Phase 3: Universal Build (full pipeline)
# This module handles:
# - Build arm64 -> sign -> package -> upload
# - Build x64 -> sign -> package -> upload
# - Merge -> sign universal -> package -> upload
# Output: 3 DMGs uploaded to GCS (arm64, x64, universal)
- universal_build
# Environment-specific settings
env:
PYTHONPATH: scripts
# NOTE: sign_macos, package_macos, upload_gcs are now internal to universal_build
# For single-arch builds, use the standard pipeline with those modules
# Signing configuration (requires environment variables)
signing:
require_env_vars:
- MACOS_CERTIFICATE_NAME
- PROD_MACOS_NOTARIZATION_APPLE_ID
- PROD_MACOS_NOTARIZATION_TEAM_ID
- PROD_MACOS_NOTARIZATION_PWD
# Required environment variables
# Note: CHROMIUM_SRC can be provided via --chromium-src CLI flag, YAML config, or env var
required_envs:
- MACOS_CERTIFICATE_NAME
- PROD_MACOS_NOTARIZATION_APPLE_ID
- PROD_MACOS_NOTARIZATION_TEAM_ID
- PROD_MACOS_NOTARIZATION_PWD
# Notification settings
notifications:
slack: true # Enable Slack notifications for release builds
slack: true

View File

@@ -1,43 +1,50 @@
# Nxtscape Windows Release Build Configuration
# BrowserOS Windows Release Build Configuration
#
# Environment Variables:
# Use !env tag to reference environment variables:
# Example: chromium_src: !env CHROMIUM_SRC
build:
type: release
architecture: x64 # Windows default architecture
# No universal builds on Windows
architecture: x64 # Windows builds are typically x64
gn_flags:
file: build/config/gn/flags.windows.release.gn
steps:
clean: true
git_setup: true
apply_patches: true
build: true
sign: true # Enable signing with eSigner CKA
package: true
# Explicit module execution order
modules:
# Phase 1: Setup
- clean
- git_setup
paths:
root_dir: .
# chromium_src: C:\Users\YourName\chromium\src
# Phase 2: Patches & Resources
- resources
- chromium_replace
- string_replaces
- series_patches
- patches
# Environment-specific settings
env:
PYTHONPATH: scripts
# Phase 3: Build
- configure
- compile
# Signing configuration (using eSigner CKA certificate in Windows store)
signing:
certificate_name: "FELAFAX, INC." # Your certificate subject name from SSL.com
# Or use environment variable:
# require_env_vars:
# - WINDOWS_CERTIFICATE_NAME
# Phase 4: Sign & Package
- sign_windows
- package_windows
# Phase 5: Upload (optional)
- upload_gcs
# Required environment variables
# Note: CHROMIUM_SRC can be provided via --chromium-src CLI flag, YAML config, or env var
# Note: GCS_BUCKET defaults to 'nxtscape' if not set
# Note: GCS_SERVICE_ACCOUNT_FILE defaults to 'gclient.json' if not set
required_envs:
- CODE_SIGN_TOOL_PATH # Path to CodeSignTool directory
- ESIGNER_USERNAME # SSL.com eSigner username
- ESIGNER_PASSWORD # SSL.com eSigner password
- ESIGNER_TOTP_SECRET # SSL.com eSigner TOTP secret
# Notification settings
notifications:
slack: true
# Build options
build_options:
# Build mini_installer for creating installer package
build_mini_installer: true
# Create both installer and portable ZIP
create_portable: true
create_installer: true

View File

@@ -1,38 +1,36 @@
# Nxtscape Release Build Configuration
# BrowserOS macOS Sign & Package Configuration
#
# This config signs and packages an already-built macOS application.
# Use this when you have a pre-built app and only need to sign/package it.
#
# Expects: out/Default_universal/BrowserOS.app (or architecture-specific path)
#
# Environment Variables:
# Use !env tag to reference environment variables:
# Example: chromium_src: !env CHROMIUM_SRC
build:
type: release
# architecture: arm64 # Default single architecture
architectures: [x64, arm64] # List for multi-architecture builds
universal: true # Create universal binary from multiple architectures
architecture: universal # Expects universal binary by default
gn_flags:
file: build/config/gn/flags.macos.release.gn
steps:
clean: false
git_setup: false
apply_patches: false
build: false
sign: true
package: true
# Explicit module execution order (partial pipeline - sign & package only)
modules:
- sign_macos
- package_macos
paths:
root_dir: .
# chromium_src: ../chromium-src
# Environment-specific settings
env:
PYTHONPATH: scripts
# Signing configuration (requires environment variables)
signing:
require_env_vars:
- MACOS_CERTIFICATE_NAME
- PROD_MACOS_NOTARIZATION_APPLE_ID
- PROD_MACOS_NOTARIZATION_TEAM_ID
- PROD_MACOS_NOTARIZATION_PWD
# Required environment variables
# Note: CHROMIUM_SRC can be provided via --chromium-src CLI flag, YAML config, or env var
# Note: GCS_BUCKET defaults to 'nxtscape' if not set
required_envs:
- MACOS_CERTIFICATE_NAME
- PROD_MACOS_NOTARIZATION_APPLE_ID
- PROD_MACOS_NOTARIZATION_TEAM_ID
- PROD_MACOS_NOTARIZATION_PWD
# Notification settings
notifications:
slack: true # Enable Slack notifications for release builds
slack: true # Enable Slack notifications for signing operations

View File

@@ -1,43 +1,35 @@
# Nxtscape Windows Sign & Package Configuration
# BrowserOS Windows Sign & Package Configuration
#
# This config signs and packages an already-built Windows application.
# Use this when you have a pre-built app and only need to sign/package it.
#
# Expects: out/Default/chrome.exe and mini_installer.exe
#
# Environment Variables:
# Use !env tag to reference environment variables:
# Example: certificate_name: !env WINDOWS_CERTIFICATE_NAME
build:
type: release
architecture: x64 # Windows default architecture
# No universal builds on Windows
architecture: x64 # Windows x64
gn_flags:
file: build/config/gn/flags.windows.release.gn
steps:
clean: false
git_setup: false
apply_patches: false
build: false
sign: true # Enable signing with eSigner CKA
package: true
# Explicit module execution order (partial pipeline - sign & package only)
modules:
- sign_windows
- package_windows
paths:
root_dir: .
# chromium_src: C:\Users\YourName\chromium\src
# Environment-specific settings
env:
PYTHONPATH: scripts
# Signing configuration (using eSigner CKA certificate in Windows store)
signing:
certificate_name: "FELAFAX, INC." # Your certificate subject name from SSL.com
# Or use environment variable:
# require_env_vars:
# - WINDOWS_CERTIFICATE_NAME
# Required environment variables
# Note: CHROMIUM_SRC can be provided via --chromium-src CLI flag, YAML config, or env var
# Note: GCS_BUCKET defaults to 'nxtscape' if not set
required_envs:
- CODE_SIGN_TOOL_PATH # Path to CodeSignTool directory
- ESIGNER_USERNAME # SSL.com eSigner username
- ESIGNER_PASSWORD # SSL.com eSigner password
- ESIGNER_TOTP_SECRET # SSL.com eSigner TOTP secret
# Notification settings
notifications:
slack: true
# Build options
build_options:
# Build mini_installer for creating installer package
build_mini_installer: true
# Create both installer and portable ZIP
create_portable: true
create_installer: true
slack: true # Enable Slack notifications for signing operations

View File

@@ -1,253 +0,0 @@
#!/usr/bin/env python3
"""
Build context dataclass to hold all build state
"""
import time
from pathlib import Path
from dataclasses import dataclass
from utils import (
log_error,
log_warning,
get_platform,
get_platform_arch,
get_executable_extension,
join_paths,
IS_WINDOWS,
IS_MACOS,
)
@dataclass
class BuildContext:
"""Simple dataclass to hold all build state"""
root_dir: Path
chromium_src: Path = Path()
out_dir: str = "out/Default"
architecture: str = "" # Will be set in __post_init__
build_type: str = "debug"
apply_patches: bool = False
sign_package: bool = False
package: bool = False
build: bool = False
chromium_version: str = ""
nxtscape_version: str = ""
nxtscape_chromium_version: str = ""
start_time: float = 0.0
# App names - will be set based on platform
CHROMIUM_APP_NAME: str = ""
NXTSCAPE_APP_NAME: str = ""
NXTSCAPE_APP_BASE_NAME: str = "BrowserOS" # Base name without extension
# Third party
SPARKLE_VERSION: str = "2.7.0"
def __post_init__(self):
"""Load version files and set platform/architecture-specific configurations"""
# Set platform-specific defaults
if not self.architecture:
self.architecture = get_platform_arch()
# Set platform-specific app names
if IS_WINDOWS:
self.CHROMIUM_APP_NAME = f"chrome{get_executable_extension()}"
self.NXTSCAPE_APP_NAME = (
f"{self.NXTSCAPE_APP_BASE_NAME}{get_executable_extension()}"
)
elif IS_MACOS:
self.CHROMIUM_APP_NAME = "Chromium.app"
self.NXTSCAPE_APP_NAME = f"{self.NXTSCAPE_APP_BASE_NAME}.app"
else:
self.CHROMIUM_APP_NAME = "chrome"
self.NXTSCAPE_APP_NAME = self.NXTSCAPE_APP_BASE_NAME.lower()
# Set architecture-specific output directory with platform separator
if IS_WINDOWS:
self.out_dir = f"out\\Default_{self.architecture}"
else:
self.out_dir = f"out/Default_{self.architecture}"
version_dict = {}
if not self.chromium_version:
# Read from VERSION file
version_file = join_paths(self.root_dir, "CHROMIUM_VERSION")
if version_file.exists():
# Parse VERSION file format: MAJOR=137\nMINOR=0\nBUILD=7151\nPATCH=69
for line in version_file.read_text().strip().split("\n"):
key, value = line.split("=")
version_dict[key] = value
# Construct chromium_version as MAJOR.MINOR.BUILD.PATCH
self.chromium_version = f"{version_dict['MAJOR']}.{version_dict['MINOR']}.{version_dict['BUILD']}.{version_dict['PATCH']}"
if not self.nxtscape_version:
# Read from NXTSCAPE_VERSION file
version_file = join_paths(
self.root_dir, "build", "config", "NXTSCAPE_VERSION"
)
if version_file.exists():
self.nxtscape_version = version_file.read_text().strip()
# Set nxtscape_chromium_version as chromium version with BUILD + nxtscape_version
if self.chromium_version and self.nxtscape_version and version_dict:
# Calculate new BUILD number by adding nxtscape_version to original BUILD
new_build = int(version_dict["BUILD"]) + int(self.nxtscape_version)
self.nxtscape_chromium_version = f"{version_dict['MAJOR']}.{version_dict['MINOR']}.{new_build}.{version_dict['PATCH']}"
# Determine chromium source directory
if self.chromium_src and self.chromium_src.exists():
log_warning(f"📁 Using provided Chromium source: {self.chromium_src}")
else:
log_warning(f"⚠️ Provided path does not exist: {self.chromium_src}")
self.chromium_src = join_paths(self.root_dir, "chromium_src")
if not self.chromium_src.exists():
log_error(
f"⚠️ Default Chromium source path does not exist: {self.chromium_src}"
)
raise FileNotFoundError(
f"Chromium source path does not exist: {self.chromium_src}"
)
self.start_time = time.time()
# Path getter methods
def get_config_dir(self) -> Path:
"""Get build config directory"""
return join_paths(self.root_dir, "build", "config")
def get_gn_config_dir(self) -> Path:
"""Get GN config directory"""
return join_paths(self.get_config_dir(), "gn")
def get_gn_flags_file(self) -> Path:
"""Get GN flags file for current build type"""
platform = get_platform()
return join_paths(
self.get_gn_config_dir(), f"flags.{platform}.{self.build_type}.gn"
)
def get_copy_resources_config(self) -> Path:
"""Get copy resources configuration file"""
return join_paths(self.get_config_dir(), "copy_resources.yaml")
def get_patches_dir(self) -> Path:
"""Get patches directory"""
return join_paths(self.root_dir, "patches")
def get_nxtscape_patches_dir(self) -> Path:
"""Get Nxtscape specific patches directory"""
return join_paths(self.get_patches_dir(), "browseros")
def get_sparkle_dir(self) -> Path:
"""Get Sparkle directory"""
return join_paths(self.chromium_src, "third_party", "sparkle")
def get_sparkle_url(self) -> str:
"""Get Sparkle download URL"""
return f"https://github.com/sparkle-project/Sparkle/releases/download/{self.SPARKLE_VERSION}/Sparkle-{self.SPARKLE_VERSION}.tar.xz"
def get_resources_dir(self) -> Path:
"""Get resources directory"""
return join_paths(self.root_dir, "resources")
def get_resources_files_dir(self) -> Path:
"""Get resources files directory"""
return join_paths(self.get_resources_dir(), "files")
def get_resources_gen_dir(self) -> Path:
"""Get generated resources directory"""
return join_paths(self.get_resources_dir(), "gen")
def get_chrome_resources_dir(self) -> Path:
"""Get Chrome browser resources directory"""
return join_paths(self.chromium_src, "chrome", "browser", "resources")
def get_chrome_theme_dir(self) -> Path:
"""Get Chrome theme directory"""
return join_paths(self.chromium_src, "chrome", "app", "theme", "chromium")
def get_chrome_app_dir(self) -> Path:
"""Get Chrome app directory"""
return join_paths(self.chromium_src, "chrome", "app")
def get_entitlements_dir(self) -> Path:
"""Get entitlements directory"""
return join_paths(self.root_dir, "resources", "entitlements")
def get_dmg_dir(self) -> Path:
"""Get DMG output directory (macOS only)"""
return join_paths(self.chromium_src, self.out_dir, "dmg")
def get_pkg_dmg_path(self) -> Path:
"""Get pkg-dmg tool path (macOS only)"""
return join_paths(self.chromium_src, "chrome", "installer", "mac", "pkg-dmg")
def get_app_path(self) -> Path:
"""Get built app path"""
# For debug builds, check if the app has a different name
if self.build_type == "debug" and IS_MACOS:
# Check for debug-branded app name
debug_app_name = f"{self.NXTSCAPE_APP_BASE_NAME} Dev.app"
debug_app_path = join_paths(self.chromium_src, self.out_dir, debug_app_name)
if debug_app_path.exists():
return debug_app_path
return join_paths(self.chromium_src, self.out_dir, self.NXTSCAPE_APP_NAME)
def get_chromium_app_path(self) -> Path:
"""Get original Chromium app path"""
return join_paths(self.chromium_src, self.out_dir, self.CHROMIUM_APP_NAME)
def get_gn_args_file(self) -> Path:
"""Get GN args file path"""
return join_paths(self.chromium_src, self.out_dir, "args.gn")
def get_notarization_zip(self) -> Path:
"""Get notarization zip path (macOS only)"""
return join_paths(self.chromium_src, self.out_dir, "notarize.zip")
def get_dmg_name(self, signed=False) -> str:
"""Get DMG filename with architecture suffix"""
if self.architecture == "universal":
if signed:
return f"{self.NXTSCAPE_APP_BASE_NAME}_{self.nxtscape_chromium_version}_universal_signed.dmg"
return f"{self.NXTSCAPE_APP_BASE_NAME}_{self.nxtscape_chromium_version}_universal.dmg"
else:
if signed:
return f"{self.NXTSCAPE_APP_BASE_NAME}_{self.nxtscape_chromium_version}_{self.architecture}_signed.dmg"
return f"{self.NXTSCAPE_APP_BASE_NAME}_{self.nxtscape_chromium_version}_{self.architecture}.dmg"
def get_nxtscape_chromium_version(self) -> str:
"""Get Nxtscape version string"""
return self.nxtscape_chromium_version
def get_nxtscape_version(self) -> str:
"""Get Nxtscape version string"""
return self.nxtscape_version
def get_app_base_name(self) -> str:
"""Get app base name without extension"""
return self.NXTSCAPE_APP_BASE_NAME
def get_dist_dir(self) -> Path:
"""Get distribution output directory with version"""
return join_paths(self.root_dir, "dist", self.nxtscape_version)
# Dev CLI specific methods
def get_dev_patches_dir(self) -> Path:
"""Get individual patches directory"""
return join_paths(self.root_dir, "chromium_patches")
def get_chromium_replace_files_dir(self) -> Path:
"""Get chromium files replacement directory"""
return join_paths(self.root_dir, "chromium_files")
def get_features_yaml_path(self) -> Path:
"""Get features.yaml file path"""
return join_paths(self.root_dir, "features.yaml")
def get_patch_path_for_file(self, file_path: str) -> Path:
"""Convert a chromium file path to patch file path"""
return join_paths(self.get_dev_patches_dir(), file_path)

View File

@@ -1,217 +0,0 @@
#!/usr/bin/env python3
"""
Dev CLI - Chromium patch management tool
A git-like patch management system for maintaining patches against Chromium.
Enables extracting, applying, and managing patches across Chromium upgrades.
"""
import click
import os
import sys
import yaml
from pathlib import Path
from typing import Optional, Dict, Any
from dataclasses import dataclass
# Add build directory to path for imports
sys.path.insert(0, str(Path(__file__).parent))
from context import BuildContext
from utils import log_info, log_error, log_success, log_warning, join_paths
@dataclass
class DevCliConfig:
"""Configuration for Dev CLI from various sources"""
chromium_src: Optional[Path] = None
auto_commit: bool = False
interactive: bool = True
@classmethod
def load(cls, cli_chromium_src: Optional[Path] = None) -> "DevCliConfig":
"""Load configuration from various sources with precedence:
1. CLI arguments (highest priority)
2. Environment variables
3. Config file
4. Defaults (lowest priority)
"""
config = cls()
# Load from config file if exists
config_file = Path.cwd() / ".dev-cli.yaml"
if config_file.exists():
try:
with open(config_file, "r") as f:
file_config = yaml.safe_load(f)
if file_config and "defaults" in file_config:
defaults = file_config["defaults"]
if "chromium_src" in defaults:
config.chromium_src = Path(defaults["chromium_src"])
config.auto_commit = defaults.get("auto_commit", False)
config.interactive = defaults.get("interactive", True)
except Exception as e:
log_warning(f"Failed to load config file: {e}")
# Override with environment variables
if "DEV_CLI_CHROMIUM_SRC" in os.environ:
config.chromium_src = Path(os.environ["DEV_CLI_CHROMIUM_SRC"])
if "DEV_CLI_AUTO_COMMIT" in os.environ:
config.auto_commit = os.environ["DEV_CLI_AUTO_COMMIT"].lower() in (
"true",
"1",
"yes",
)
if "DEV_CLI_INTERACTIVE" in os.environ:
config.interactive = os.environ["DEV_CLI_INTERACTIVE"].lower() in (
"true",
"1",
"yes",
)
# Override with CLI arguments (highest priority)
if cli_chromium_src:
config.chromium_src = cli_chromium_src
return config
def create_build_context(chromium_src: Optional[Path] = None) -> Optional[BuildContext]:
"""Create BuildContext with dev CLI extensions"""
config = DevCliConfig.load(chromium_src)
if not config.chromium_src:
log_error("Chromium source directory not specified")
log_info("Use --chromium-src or set DEV_CLI_CHROMIUM_SRC environment variable")
return None
if not config.chromium_src.exists():
log_error(f"Chromium source directory does not exist: {config.chromium_src}")
return None
# For dev CLI, we just need it to be a git repository
# Don't enforce strict Chromium structure
if not (config.chromium_src / ".git").exists():
log_warning(f"Warning: Not a git repository: {config.chromium_src}")
# Continue anyway - patches might still work
try:
ctx = BuildContext(
root_dir=Path.cwd(),
chromium_src=config.chromium_src,
architecture="", # Not needed for patch operations
build_type="debug", # Not needed for patch operations
)
# Store config in context for access by commands
ctx.dev_config = config
return ctx
except Exception as e:
log_error(f"Failed to create build context: {e}")
return None
@click.group()
@click.option(
"--chromium-src",
"-S",
type=click.Path(exists=True, path_type=Path),
help="Path to Chromium source directory",
)
@click.option("--verbose", "-v", is_flag=True, help="Enable verbose output")
@click.option("--quiet", "-q", is_flag=True, help="Suppress non-essential output")
@click.pass_context
def cli(ctx, chromium_src, verbose, quiet):
"""Dev CLI - Chromium patch management tool
This tool provides git-like commands for managing patches against Chromium:
\b
Extract patches from commits:
dev extract commit HEAD
dev extract range HEAD~5 HEAD
\b
Apply patches:
dev apply all
dev apply feature llm-chat
\b
Manage features:
dev feature list
dev feature add my-feature HEAD
dev feature show my-feature
"""
# Store options in context for subcommands
ctx.ensure_object(dict)
ctx.obj["chromium_src"] = chromium_src
ctx.obj["verbose"] = verbose
ctx.obj["quiet"] = quiet
# Import and register subcommand groups
# These will be created in the next step
try:
from modules.dev_cli import extract, apply, feature
cli.add_command(extract.extract_group)
cli.add_command(apply.apply_group)
cli.add_command(feature.feature_group)
except ImportError as e:
# During initial setup, modules might not exist yet
log_warning(f"Some modules not yet available: {e}")
# Add placeholder commands for testing
@cli.command()
@click.pass_context
def status(ctx):
"""Show dev CLI status"""
log_info("Dev CLI Status")
log_info("-" * 40)
build_ctx = create_build_context(ctx.obj.get("chromium_src"))
if build_ctx:
log_success(f"Chromium source: {build_ctx.chromium_src}")
# Check for patches directory
patches_dir = build_ctx.root_dir / "chromium_src"
if patches_dir.exists():
patch_count = len(list(patches_dir.rglob("*.patch")))
log_info(f"Individual patches: {patch_count}")
else:
log_warning("No patches directory found")
# Check for features.yaml
features_file = build_ctx.root_dir / "features.yaml"
if features_file.exists():
with open(features_file) as f:
features = yaml.safe_load(f)
feature_count = len(features.get("features", {}))
log_info(f"Features defined: {feature_count}")
else:
log_warning("No features.yaml found")
else:
log_error("Failed to create build context")
def main():
"""Main entry point"""
try:
cli()
except KeyboardInterrupt:
log_warning("\nInterrupted by user")
sys.exit(1)
except Exception as e:
if "--verbose" in sys.argv or "-v" in sys.argv:
import traceback
traceback.print_exc()
else:
log_error(f"Error: {e}")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -1,331 +1,355 @@
version: '1.0'
version: "1.0"
features:
add-sparkle-info-plist-keys:
description: 'patch: app-info.plist changes'
description: "patch: app-info.plist changes"
files:
- chrome/app/app-Info.plist
- chrome/app/app-Info.plist
adding-new-vector-icons:
description: 'patch: adding-new-vector-icons'
description: "patch: adding-new-vector-icons"
files:
- components/vector_icons/BUILD.gn
- components/vector_icons/chat_orange.icon
- components/vector_icons/clash_of_gpts.icon
- components/vector_icons/BUILD.gn
- components/vector_icons/chat_orange.icon
- components/vector_icons/clash_of_gpts.icon
branding-file-updates:
description: browseros branding for file paths
files:
- chrome/common/chrome_constants.cc
- chrome/common/chrome_paths_linux.cc
- chrome/install_static/chromium_install_modes.cc
- chrome/install_static/chromium_install_modes.h
- components/os_crypt/sync/keychain_password_mac.mm
- chrome/common/chrome_constants.cc
- chrome/common/chrome_paths_linux.cc
- chrome/install_static/chromium_install_modes.cc
- chrome/install_static/chromium_install_modes.h
- components/os_crypt/sync/keychain_password_mac.mm
branding-resources:
description: browseros branding resources and assets
files:
- chrome/app/chromium_strings.grd
- chrome/app/settings_chromium_strings.grdp
- chrome/app/theme/chromium/BRANDING
- chrome/app/theme/chromium/chromeos/
- chrome/app/theme/chromium/chromium.ai
- chrome/app/theme/chromium/linux/
- chrome/app/theme/chromium/mac/
- chrome/app/theme/chromium/product_logo.ai
- chrome/app/theme/chromium/product_logo.svg
- chrome/app/theme/chromium/product_logo.png
- chrome/app/theme/chromium/product_logo_16.png
- chrome/app/theme/chromium/product_logo_22.png
- chrome/app/theme/chromium/product_logo_22_mono.png
- chrome/app/theme/chromium/product_logo_24.png
- chrome/app/theme/chromium/product_logo_32.png
- chrome/app/theme/chromium/product_logo_48.png
- chrome/app/theme/chromium/product_logo_64.png
- chrome/app/theme/chromium/product_logo_128.png
- chrome/app/theme/chromium/product_logo_192.png
- chrome/app/theme/chromium/product_logo_256.png
- chrome/app/theme/chromium/product_logo_animation.svg
- chrome/app/theme/chromium/product_logo_name_22.png
- chrome/app/theme/chromium/product_logo_name_22_2x.png
- chrome/app/theme/chromium/product_logo_name_22_white.png
- chrome/app/theme/chromium/product_logo_name_22_white_2x.png
- chrome/app/theme/chromium/win/
- chrome/app/theme/default_100_percent/chromium/
- chrome/app/theme/default_200_percent/chromium/
- chrome/enterprise_companion/branding.gni
- chrome/app/chromium_strings.grd
- chrome/app/settings_chromium_strings.grdp
- chrome/app/theme/chromium/BRANDING
- chrome/app/theme/chromium/chromeos/
- chrome/app/theme/chromium/chromium.ai
- chrome/app/theme/chromium/linux/
- chrome/app/theme/chromium/mac/
- chrome/app/theme/chromium/product_logo.ai
- chrome/app/theme/chromium/product_logo.svg
- chrome/app/theme/chromium/product_logo.png
- chrome/app/theme/chromium/product_logo_16.png
- chrome/app/theme/chromium/product_logo_22.png
- chrome/app/theme/chromium/product_logo_22_mono.png
- chrome/app/theme/chromium/product_logo_24.png
- chrome/app/theme/chromium/product_logo_32.png
- chrome/app/theme/chromium/product_logo_48.png
- chrome/app/theme/chromium/product_logo_64.png
- chrome/app/theme/chromium/product_logo_128.png
- chrome/app/theme/chromium/product_logo_192.png
- chrome/app/theme/chromium/product_logo_256.png
- chrome/app/theme/chromium/product_logo_animation.svg
- chrome/app/theme/chromium/product_logo_name_22.png
- chrome/app/theme/chromium/product_logo_name_22_2x.png
- chrome/app/theme/chromium/product_logo_name_22_white.png
- chrome/app/theme/chromium/product_logo_name_22_white_2x.png
- chrome/app/theme/chromium/win/
- chrome/app/theme/default_100_percent/chromium/
- chrome/app/theme/default_200_percent/chromium/
- chrome/enterprise_companion/branding.gni
ai-settings-page:
description: llm settings page
files:
- chrome/browser/extensions/api/settings_private/prefs_util.cc
- chrome/browser/prefs/browser_prefs.cc
- chrome/browser/prefs/browser_prefs.h
- chrome/browser/resources/settings/BUILD.gn
- chrome/browser/resources/settings/nxtscape_page/nxtscape_page.html
- chrome/browser/resources/settings/nxtscape_page/nxtscape_page.ts
- chrome/browser/resources/settings/route.ts
- chrome/browser/resources/settings/router.ts
- chrome/browser/resources/settings/settings.ts
- chrome/browser/resources/settings/settings_main/settings_main.html
- chrome/browser/resources/settings/settings_main/settings_main.ts
- chrome/browser/resources/settings/settings_menu/settings_menu.html
- chrome/common/pref_names.h
- chrome/browser/extensions/api/settings_private/prefs_util.cc
- chrome/browser/prefs/browser_prefs.cc
- chrome/browser/prefs/browser_prefs.h
- chrome/browser/resources/settings/BUILD.gn
- chrome/browser/resources/settings/nxtscape_page/nxtscape_page.html
- chrome/browser/resources/settings/nxtscape_page/nxtscape_page.ts
- chrome/browser/resources/settings/route.ts
- chrome/browser/resources/settings/router.ts
- chrome/browser/resources/settings/settings.ts
- chrome/browser/resources/settings/settings_main/settings_main.html
- chrome/browser/resources/settings/settings_main/settings_main.ts
- chrome/browser/resources/settings/settings_menu/settings_menu.html
- chrome/common/pref_names.h
api:
description: browseros API
files:
- chrome/browser/extensions/BUILD.gn
- chrome/browser/extensions/api/browser_os/browser_os_api.cc
- chrome/browser/extensions/api/browser_os/browser_os_api.h
- chrome/browser/extensions/api/browser_os/browser_os_api_helpers.cc
- chrome/browser/extensions/api/browser_os/browser_os_api_helpers.h
- chrome/browser/extensions/api/browser_os/browser_os_api_utils.cc
- chrome/browser/extensions/api/browser_os/browser_os_api_utils.h
- chrome/browser/extensions/api/browser_os/browser_os_change_detector.cc
- chrome/browser/extensions/api/browser_os/browser_os_change_detector.h
- chrome/browser/extensions/api/browser_os/browser_os_content_processor.cc
- chrome/browser/extensions/api/browser_os/browser_os_content_processor.h
- chrome/browser/extensions/api/browser_os/browser_os_snapshot_processor.cc
- chrome/browser/extensions/api/browser_os/browser_os_snapshot_processor.h
- chrome/browser/extensions/chrome_extensions_browser_api_provider.cc
- chrome/browser/media/extension_media_access_handler.cc
- chrome/common/extensions/api/_api_features.json
- chrome/common/extensions/api/_permission_features.json
- chrome/common/extensions/api/api_sources.gni
- chrome/common/extensions/api/browser_os.idl
- chrome/common/extensions/permissions/chrome_api_permissions.cc
- extensions/browser/extension_function_histogram_value.h
- extensions/common/mojom/api_permission_id.mojom
- tools/metrics/histograms/metadata/extensions/enums.xml
- chrome/browser/extensions/BUILD.gn
- chrome/browser/extensions/api/browser_os/browser_os_api.cc
- chrome/browser/extensions/api/browser_os/browser_os_api.h
- chrome/browser/extensions/api/browser_os/browser_os_api_helpers.cc
- chrome/browser/extensions/api/browser_os/browser_os_api_helpers.h
- chrome/browser/extensions/api/browser_os/browser_os_api_utils.cc
- chrome/browser/extensions/api/browser_os/browser_os_api_utils.h
- chrome/browser/extensions/api/browser_os/browser_os_change_detector.cc
- chrome/browser/extensions/api/browser_os/browser_os_change_detector.h
- chrome/browser/extensions/api/browser_os/browser_os_content_processor.cc
- chrome/browser/extensions/api/browser_os/browser_os_content_processor.h
- chrome/browser/extensions/api/browser_os/browser_os_snapshot_processor.cc
- chrome/browser/extensions/api/browser_os/browser_os_snapshot_processor.h
- chrome/browser/extensions/chrome_extensions_browser_api_provider.cc
- chrome/browser/media/extension_media_access_handler.cc
- chrome/common/extensions/api/_api_features.json
- chrome/common/extensions/api/_permission_features.json
- chrome/common/extensions/api/api_sources.gni
- chrome/common/extensions/api/browser_os.idl
- chrome/common/extensions/permissions/chrome_api_permissions.cc
- extensions/browser/extension_function_histogram_value.h
- extensions/common/mojom/api_permission_id.mojom
- tools/metrics/histograms/metadata/extensions/enums.xml
server:
description: browseros server
files:
- chrome/browser/browseros_server/
- base/threading/thread_restrictions.h
- chrome/browser/browseros_server/
- base/threading/thread_restrictions.h
metrics:
description: browseros metrics
files:
- chrome/browser/metrics/chrome_metrics_service_client.cc
- chrome/browser/prefs/browser_prefs.cc
- chrome/browser/profiles/chrome_browser_main_extra_parts_profiles.cc
- chrome/browser/ui/BUILD.gn
- chrome/browser/ui/webui/settings/browseros_metrics_handler.cc
- chrome/browser/ui/webui/settings/browseros_metrics_handler.h
- chrome/browser/ui/webui/settings/settings_ui.cc
- chrome/common/pref_names.h
- components/metrics/browseros_metrics/BUILD.gn
- components/metrics/browseros_metrics/DEPS
- components/metrics/browseros_metrics/browseros_metrics.cc
- components/metrics/browseros_metrics/browseros_metrics.h
- components/metrics/browseros_metrics/browseros_metrics_prefs.cc
- components/metrics/browseros_metrics/browseros_metrics_prefs.h
- components/metrics/browseros_metrics/browseros_metrics_service.cc
- components/metrics/browseros_metrics/browseros_metrics_service.h
- components/metrics/browseros_metrics/browseros_metrics_service_factory.cc
- components/metrics/browseros_metrics/browseros_metrics_service_factory.h
- chrome/browser/metrics/chrome_metrics_service_client.cc
- chrome/browser/prefs/browser_prefs.cc
- chrome/browser/profiles/chrome_browser_main_extra_parts_profiles.cc
- chrome/browser/ui/BUILD.gn
- chrome/browser/ui/webui/settings/browseros_metrics_handler.cc
- chrome/browser/ui/webui/settings/browseros_metrics_handler.h
- chrome/browser/ui/webui/settings/settings_ui.cc
- chrome/common/pref_names.h
- components/metrics/browseros_metrics/BUILD.gn
- components/metrics/browseros_metrics/DEPS
- components/metrics/browseros_metrics/browseros_metrics.cc
- components/metrics/browseros_metrics/browseros_metrics.h
- components/metrics/browseros_metrics/browseros_metrics_prefs.cc
- components/metrics/browseros_metrics/browseros_metrics_prefs.h
- components/metrics/browseros_metrics/browseros_metrics_service.cc
- components/metrics/browseros_metrics/browseros_metrics_service.h
- components/metrics/browseros_metrics/browseros_metrics_service_factory.cc
- components/metrics/browseros_metrics/browseros_metrics_service_factory.h
ota-updater:
description: extensions ota updater
files:
- chrome/browser/extensions/BUILD.gn
- chrome/browser/extensions/api/developer_private/extension_info_generator_shared.cc
- chrome/browser/extensions/browseros_extension_constants.h
- chrome/browser/extensions/browseros_external_loader.cc
- chrome/browser/extensions/browseros_external_loader.h
- chrome/browser/extensions/chrome_extension_registrar_delegate.cc
- chrome/browser/extensions/extension_web_ui_override_registrar.cc
- chrome/browser/extensions/external_provider_impl.cc
- chrome/browser/ui/extensions/settings_overridden_params_providers.cc
- chrome/browser/extensions/BUILD.gn
- chrome/browser/extensions/api/developer_private/extension_info_generator_shared.cc
- chrome/browser/extensions/browseros_extension_constants.h
- chrome/browser/extensions/browseros_external_loader.cc
- chrome/browser/extensions/browseros_external_loader.h
- chrome/browser/extensions/chrome_extension_registrar_delegate.cc
- chrome/browser/extensions/extension_web_ui_override_registrar.cc
- chrome/browser/extensions/external_provider_impl.cc
- chrome/browser/ui/extensions/settings_overridden_params_providers.cc
chrome-importer:
description: chrome importer
files:
- chrome/app/generated_resources.grd
- chrome/app/settings_strings.grdp
- chrome/browser/extensions/api/settings_private/prefs_util.cc
- chrome/browser/importer/external_process_importer_client.cc
- chrome/browser/importer/external_process_importer_client.h
- chrome/browser/importer/importer_list.cc
- chrome/browser/importer/importer_uma.cc
- chrome/browser/importer/in_process_importer_bridge.cc
- chrome/browser/importer/in_process_importer_bridge.h
- chrome/browser/importer/profile_writer.cc
- chrome/browser/importer/profile_writer.h
- chrome/browser/resources/settings/people_page/import_data_browser_proxy.ts
- chrome/browser/resources/settings/people_page/import_data_dialog.html
- chrome/browser/ui/webui/settings/import_data_handler.cc
- chrome/browser/ui/webui/settings/settings_localized_strings_provider.cc
- chrome/browser/ui/webui/settings/settings_ui.cc
- chrome/common/importer/importer_bridge.h
- chrome/common/importer/importer_data_types.h
- chrome/common/importer/importer_type.h
- chrome/common/importer/profile_import.mojom
- chrome/common/importer/profile_import_process_param_traits_macros.h
- chrome/common/pref_names.h
- chrome/utility/BUILD.gn
- chrome/utility/importer/chrome_importer.cc
- chrome/utility/importer/chrome_importer.h
- chrome/utility/importer/external_process_importer_bridge.cc
- chrome/utility/importer/external_process_importer_bridge.h
- chrome/utility/importer/importer_creator.cc
- tools/metrics/histograms/metadata/sql/histograms.xml
- chrome/app/generated_resources.grd
- chrome/app/settings_strings.grdp
- chrome/browser/extensions/api/settings_private/prefs_util.cc
- chrome/browser/importer/external_process_importer_client.cc
- chrome/browser/importer/external_process_importer_client.h
- chrome/browser/importer/importer_list.cc
- chrome/browser/importer/importer_uma.cc
- chrome/browser/importer/in_process_importer_bridge.cc
- chrome/browser/importer/in_process_importer_bridge.h
- chrome/browser/importer/profile_writer.cc
- chrome/browser/importer/profile_writer.h
- chrome/browser/resources/settings/people_page/import_data_browser_proxy.ts
- chrome/browser/resources/settings/people_page/import_data_dialog.html
- chrome/browser/ui/webui/settings/import_data_handler.cc
- chrome/browser/ui/webui/settings/settings_localized_strings_provider.cc
- chrome/browser/ui/webui/settings/settings_ui.cc
- chrome/common/importer/importer_bridge.h
- chrome/common/importer/importer_data_types.h
- chrome/common/importer/importer_type.h
- chrome/common/importer/profile_import.mojom
- chrome/common/importer/profile_import_process_param_traits_macros.h
- chrome/common/pref_names.h
- chrome/utility/BUILD.gn
- chrome/utility/importer/chrome_importer.cc
- chrome/utility/importer/chrome_importer.h
- chrome/utility/importer/external_process_importer_bridge.cc
- chrome/utility/importer/external_process_importer_bridge.h
- chrome/utility/importer/importer_creator.cc
- tools/metrics/histograms/metadata/sql/histograms.xml
chrome-version-updater:
description: 'patch: chrome version update'
description: "patch: chrome version update"
files:
- chrome/VERSION
- chrome/VERSION
default-light-mode:
description: enable light mode as default theme
files:
- chrome/browser/themes/theme_service_factory.cc
- chrome/browser/themes/theme_service_factory.cc
disable-chrome-labs-pinning:
description: 'patch: disable-chrome-labs-pinning'
description: "patch: disable-chrome-labs-pinning"
files:
- chrome/browser/ui/toolbar/pinned_toolbar/pinned_toolbar_actions_model.cc
- chrome/browser/ui/toolbar/toolbar_pref_names.cc
- chrome/browser/ui/toolbar/pinned_toolbar/pinned_toolbar_actions_model.cc
- chrome/browser/ui/toolbar/toolbar_pref_names.cc
disable-google-key-info-bar:
description: 'patch: disable-google-key-info-bar'
description: "patch: disable-google-key-info-bar"
files:
- chrome/browser/ui/startup/google_api_keys_infobar_delegate.cc
- chrome/browser/ui/startup/google_api_keys_infobar_delegate.cc
disable-info-bar-in-cdp:
description: 'patch: disable-info-bar-in-cdp'
description: "patch: disable-info-bar-in-cdp"
files:
- chrome/browser/extensions/api/debugger/debugger_api.cc
- chrome/browser/extensions/api/debugger/debugger_api.cc
disable-sidepanel-animation:
description: disable sidepanel animation
files:
- chrome/browser/ui/views/side_panel/side_panel.cc
- chrome/browser/ui/views/side_panel/side_panel.h
- chrome/browser/ui/views/side_panel/side_panel.cc
- chrome/browser/ui/views/side_panel/side_panel.h
disable-user-gesture-restriction-on-sidepanel:
description: 'patch: disable-user-gesture-restriction-on-sidepanel'
description: "patch: disable-user-gesture-restriction-on-sidepanel"
files:
- chrome/browser/extensions/api/side_panel/side_panel_api.cc
- chrome/browser/extensions/api/side_panel/side_panel_api.cc
first-run:
description: first run
files:
- chrome/browser/chrome_browser_main.cc
- chrome/browser/ui/webui/chrome_web_ui_configs.cc
- chrome/browser/ui/webui/nxtscape_first_run.h
- chrome/common/webui_url_constants.cc
- chrome/browser/chrome_browser_main.cc
- chrome/browser/ui/webui/chrome_web_ui_configs.cc
- chrome/browser/ui/webui/nxtscape_first_run.h
- chrome/common/webui_url_constants.cc
llm-chat:
description: llm chat and updates
files:
- chrome/app/chrome_command_ids.h
- chrome/app/generated_resources.grd
- chrome/browser/global_keyboard_shortcuts_mac.mm
- chrome/browser/ui/actions/chrome_action_id.h
- chrome/browser/ui/browser_actions.cc
- chrome/browser/ui/browser_command_controller.cc
- chrome/browser/ui/toolbar/toolbar_pref_names.cc
- chrome/browser/ui/ui_features.cc
- chrome/browser/ui/ui_features.h
- chrome/browser/ui/views/accelerator_table.cc
- chrome/browser/ui/views/side_panel/BUILD.gn
- chrome/browser/ui/views/side_panel/browseros_simple_page_extractor.cc
- chrome/browser/ui/views/side_panel/browseros_simple_page_extractor.h
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_coordinator.cc
- chrome/browser/ui/views/side_panel/side_panel_entry_id.h
- chrome/browser/ui/views/side_panel/side_panel_prefs.cc
- chrome/browser/ui/views/side_panel/side_panel_util.cc
- chrome/browser/ui/views/side_panel/third_party_llm/third_party_llm_panel_coordinator.cc
- chrome/browser/ui/views/side_panel/third_party_llm/third_party_llm_panel_coordinator.h
- chrome/browser/ui/views/side_panel/third_party_llm/third_party_llm_view.cc
- chrome/browser/ui/views/side_panel/third_party_llm/third_party_llm_view.h
- chrome/browser/ui/webui/side_panel/customize_chrome/customize_toolbar/customize_toolbar.mojom
- chrome/browser/ui/webui/side_panel/customize_chrome/customize_toolbar/customize_toolbar_handler.cc
- chrome/app/chrome_command_ids.h
- chrome/app/generated_resources.grd
- chrome/browser/global_keyboard_shortcuts_mac.mm
- chrome/browser/ui/actions/chrome_action_id.h
- chrome/browser/ui/browser_actions.cc
- chrome/browser/ui/browser_command_controller.cc
- chrome/browser/ui/toolbar/toolbar_pref_names.cc
- chrome/browser/ui/ui_features.cc
- chrome/browser/ui/ui_features.h
- chrome/browser/ui/views/accelerator_table.cc
- chrome/browser/ui/views/side_panel/BUILD.gn
- chrome/browser/ui/views/side_panel/browseros_simple_page_extractor.cc
- chrome/browser/ui/views/side_panel/browseros_simple_page_extractor.h
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_coordinator.cc
- chrome/browser/ui/views/side_panel/side_panel_entry_id.h
- chrome/browser/ui/views/side_panel/side_panel_prefs.cc
- chrome/browser/ui/views/side_panel/side_panel_util.cc
- chrome/browser/ui/views/side_panel/third_party_llm/third_party_llm_panel_coordinator.cc
- chrome/browser/ui/views/side_panel/third_party_llm/third_party_llm_panel_coordinator.h
- chrome/browser/ui/views/side_panel/third_party_llm/third_party_llm_view.cc
- chrome/browser/ui/views/side_panel/third_party_llm/third_party_llm_view.h
- chrome/browser/ui/webui/side_panel/customize_chrome/customize_toolbar/customize_toolbar.mojom
- chrome/browser/ui/webui/side_panel/customize_chrome/customize_toolbar/customize_toolbar_handler.cc
llm-hub:
description: llm-hub
files:
- chrome/app/chrome_command_ids.h
- chrome/app/generated_resources.grd
- chrome/browser/global_keyboard_shortcuts_mac.mm
- chrome/browser/ui/actions/chrome_action_id.h
- chrome/browser/ui/browser_actions.cc
- chrome/browser/ui/browser_command_controller.cc
- chrome/browser/ui/toolbar/toolbar_pref_names.cc
- chrome/browser/ui/ui_features.cc
- chrome/browser/ui/ui_features.h
- chrome/browser/ui/views/accelerator_table.cc
- chrome/browser/ui/views/side_panel/BUILD.gn
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_coordinator.cc
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_coordinator.h
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_view.cc
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_view.h
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_window.cc
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_window.h
- chrome/browser/ui/views/side_panel/side_panel_entry_id.h
- chrome/browser/ui/views/side_panel/side_panel_prefs.cc
- chrome/browser/ui/views/side_panel/side_panel_util.cc
- chrome/browser/ui/views/toolbar/pinned_action_toolbar_button.cc
- chrome/browser/ui/webui/BUILD.gn
- chrome/browser/ui/webui/chrome_web_ui_configs.cc
- chrome/browser/ui/webui/clash_of_gpts/clash_of_gpts_ui.cc
- chrome/browser/ui/webui/clash_of_gpts/clash_of_gpts_ui.h
- chrome/browser/ui/webui/side_panel/customize_chrome/customize_toolbar/customize_toolbar.mojom
- chrome/browser/ui/webui/side_panel/customize_chrome/customize_toolbar/customize_toolbar_handler.cc
- chrome/common/webui_url_constants.h
- chrome/app/chrome_command_ids.h
- chrome/app/generated_resources.grd
- chrome/browser/global_keyboard_shortcuts_mac.mm
- chrome/browser/ui/actions/chrome_action_id.h
- chrome/browser/ui/browser_actions.cc
- chrome/browser/ui/browser_command_controller.cc
- chrome/browser/ui/toolbar/toolbar_pref_names.cc
- chrome/browser/ui/ui_features.cc
- chrome/browser/ui/ui_features.h
- chrome/browser/ui/views/accelerator_table.cc
- chrome/browser/ui/views/side_panel/BUILD.gn
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_coordinator.cc
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_coordinator.h
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_view.cc
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_view.h
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_window.cc
- chrome/browser/ui/views/side_panel/clash_of_gpts/clash_of_gpts_window.h
- chrome/browser/ui/views/side_panel/side_panel_entry_id.h
- chrome/browser/ui/views/side_panel/side_panel_prefs.cc
- chrome/browser/ui/views/side_panel/side_panel_util.cc
- chrome/browser/ui/views/toolbar/pinned_action_toolbar_button.cc
- chrome/browser/ui/webui/BUILD.gn
- chrome/browser/ui/webui/chrome_web_ui_configs.cc
- chrome/browser/ui/webui/clash_of_gpts/clash_of_gpts_ui.cc
- chrome/browser/ui/webui/clash_of_gpts/clash_of_gpts_ui.h
- chrome/browser/ui/webui/side_panel/customize_chrome/customize_toolbar/customize_toolbar.mojom
- chrome/browser/ui/webui/side_panel/customize_chrome/customize_toolbar/customize_toolbar_handler.cc
- chrome/common/webui_url_constants.h
llm-settings-page-updates:
description: 'llm settings page: updates'
description: "llm settings page: updates"
files:
- chrome/browser/resources/settings/BUILD.gn
- chrome/browser/resources/settings/nxtscape_page/models_data.html
- chrome/browser/resources/settings/nxtscape_page/models_data.ts
- chrome/browser/resources/settings/nxtscape_page/nxtscape_page.html
- chrome/browser/resources/settings/nxtscape_page/nxtscape_page.ts
- chrome/browser/resources/settings/BUILD.gn
- chrome/browser/resources/settings/nxtscape_page/models_data.html
- chrome/browser/resources/settings/nxtscape_page/models_data.ts
- chrome/browser/resources/settings/nxtscape_page/nxtscape_page.html
- chrome/browser/resources/settings/nxtscape_page/nxtscape_page.ts
mac-sparkle-updater:
description: 'patch: nxtscape-updater-sparkle'
description: "patch: nxtscape-updater-sparkle"
files:
- chrome/BUILD.gn
- chrome/browser/BUILD.gn
- chrome/browser/mac/chrome_browser_main_extra_parts_mac.h
- chrome/browser/mac/chrome_browser_main_extra_parts_mac.mm
- chrome/browser/mac/sparkle_glue.h
- chrome/browser/mac/sparkle_glue.mm
- chrome/browser/mac/su_updater.h
- chrome/browser/sparkle_buildflags.gni
- chrome/browser/ui/BUILD.gn
- chrome/browser/ui/webui/help/sparkle_version_updater_mac.h
- chrome/browser/ui/webui/help/sparkle_version_updater_mac.mm
- chrome/browser/ui/webui/help/version_updater_mac.mm
- third_party/sparkle/
- chrome/BUILD.gn
- chrome/browser/BUILD.gn
- chrome/browser/mac/chrome_browser_main_extra_parts_mac.h
- chrome/browser/mac/chrome_browser_main_extra_parts_mac.mm
- chrome/browser/mac/sparkle_glue.h
- chrome/browser/mac/sparkle_glue.mm
- chrome/browser/mac/su_updater.h
- chrome/browser/sparkle_buildflags.gni
- chrome/browser/ui/BUILD.gn
- chrome/browser/ui/webui/help/sparkle_version_updater_mac.h
- chrome/browser/ui/webui/help/sparkle_version_updater_mac.mm
- chrome/browser/ui/webui/help/version_updater_mac.mm
- third_party/sparkle/
pin-chat-and-hub:
description: pin browseros native panels
files:
- chrome/browser/extensions/browseros_extension_constants.h
- chrome/browser/sync/prefs/chrome_syncable_prefs_database.cc
- chrome/browser/ui/actions/browseros_actions_config.h
- chrome/browser/ui/toolbar/pinned_toolbar/pinned_toolbar_actions_model.cc
- chrome/browser/ui/toolbar/pinned_toolbar/pinned_toolbar_actions_model.h
- chrome/browser/ui/toolbar/toolbar_pref_names.cc
- chrome/browser/ui/toolbar/toolbar_pref_names.h
- chrome/browser/ui/views/toolbar/pinned_action_toolbar_button.cc
- chrome/browser/ui/views/toolbar/pinned_toolbar_actions_container.cc
- chrome/browser/extensions/browseros_extension_constants.h
- chrome/browser/sync/prefs/chrome_syncable_prefs_database.cc
- chrome/browser/ui/actions/browseros_actions_config.h
- chrome/browser/ui/toolbar/pinned_toolbar/pinned_toolbar_actions_model.cc
- chrome/browser/ui/toolbar/pinned_toolbar/pinned_toolbar_actions_model.h
- chrome/browser/ui/toolbar/toolbar_pref_names.cc
- chrome/browser/ui/toolbar/toolbar_pref_names.h
- chrome/browser/ui/views/toolbar/pinned_action_toolbar_button.cc
- chrome/browser/ui/views/toolbar/pinned_toolbar_actions_container.cc
pin-extensions-toolbar:
description: pin browseros extensions to extension toolbar
files:
- chrome/browser/extensions/browseros_extension_constants.h
- chrome/browser/extensions/extension_context_menu_model.cc
- chrome/browser/extensions/extension_management.cc
- chrome/browser/ui/actions/browseros_actions_config.h
- chrome/browser/ui/toolbar/toolbar_actions_model.cc
- chrome/browser/ui/views/side_panel/extensions/extension_side_panel_manager.cc
- chrome/browser/extensions/browseros_extension_constants.h
- chrome/browser/extensions/extension_context_menu_model.cc
- chrome/browser/extensions/extension_management.cc
- chrome/browser/ui/actions/browseros_actions_config.h
- chrome/browser/ui/toolbar/toolbar_actions_model.cc
- chrome/browser/ui/views/side_panel/extensions/extension_side_panel_manager.cc
preferences-settings-page:
description: 'patch: settings prefs page'
description: "patch: settings prefs page"
files:
- chrome/browser/extensions/api/settings_private/prefs_util.cc
- chrome/browser/prefs/browser_prefs.cc
- chrome/browser/resources/settings/BUILD.gn
- chrome/browser/resources/settings/browseros_prefs_page/browseros_prefs_page.html
- chrome/browser/resources/settings/browseros_prefs_page/browseros_prefs_page.ts
- chrome/browser/resources/settings/route.ts
- chrome/browser/resources/settings/router.ts
- chrome/browser/resources/settings/settings.ts
- chrome/browser/resources/settings/settings_main/settings_main.html
- chrome/browser/resources/settings/settings_main/settings_main.ts
- chrome/browser/resources/settings/settings_menu/settings_menu.html
- chrome/browser/ui/views/toolbar/pinned_action_toolbar_button.cc
- chrome/browser/ui/views/toolbar/pinned_action_toolbar_button.h
- chrome/common/pref_names.h
- chrome/browser/extensions/api/settings_private/prefs_util.cc
- chrome/browser/prefs/browser_prefs.cc
- chrome/browser/resources/settings/BUILD.gn
- chrome/browser/resources/settings/browseros_prefs_page/browseros_prefs_page.html
- chrome/browser/resources/settings/browseros_prefs_page/browseros_prefs_page.ts
- chrome/browser/resources/settings/route.ts
- chrome/browser/resources/settings/router.ts
- chrome/browser/resources/settings/settings.ts
- chrome/browser/resources/settings/settings_main/settings_main.html
- chrome/browser/resources/settings/settings_main/settings_main.ts
- chrome/browser/resources/settings/settings_menu/settings_menu.html
- chrome/browser/ui/views/toolbar/pinned_action_toolbar_button.cc
- chrome/browser/ui/views/toolbar/pinned_action_toolbar_button.h
- chrome/common/pref_names.h
ui fixes:
description: "patch: chromium ui fixes"
files:
- chrome/browser/chrome_content_browser_client.cc
- chrome/browser/net/profile_network_context_service.cc
- chrome/browser/resources/settings/about_page/about_page.html
- chrome/browser/resources/settings/about_page/about_page.ts
- chrome/browser/resources/settings/reset_page/reset_profile_dialog.html
- chrome/browser/ui/browser_ui_prefs.cc
- chrome/browser/ui/views/chrome_layout_provider.cc
- chrome/browser/ui/views/infobars/infobar_container_view.cc
- components/bookmarks/browser/bookmark_utils.cc
- components/content_settings/core/browser/cookie_settings.cc
- components/payments/core/payment_prefs.cc
- components/performance_manager/user_tuning/prefs.cc
browseros-version:
description: "patch: browseros version"
files:
- base/version_info/BUILD.gn
- base/version_info/version_info.h
- base/version_info/version_info_values.h.version
- chrome/BROWSEROS_VERSION
- chrome/browser/resources/settings/about_page/about_page.html
- chrome/browser/ui/webui/settings/settings_localized_strings_provider.cc
misc:
description: miscellaneous patches
files:
- chrome/browser/ui/omnibox/chrome_omnibox_client.cc
- chrome/browser/ui/profiles/profile_error_dialog.cc
- chrome/browser/ui/startup/infobar_utils.cc
- chrome/installer/mini_installer/chrome.release
- chrome/updater/branding.gni
- extensions/browser/process_manager.cc
- extensions/browser/process_manager.h
- third_party/blink/renderer/core/frame/navigator.cc
- chrome/browser/ui/omnibox/chrome_omnibox_client.cc
- chrome/browser/ui/profiles/profile_error_dialog.cc
- chrome/browser/ui/startup/infobar_utils.cc
- chrome/installer/mini_installer/chrome.release
- chrome/updater/branding.gni
- extensions/browser/process_manager.cc
- extensions/browser/process_manager.h
- third_party/blink/renderer/core/frame/navigator.cc

View File

@@ -1,2 +0,0 @@
# Build system modules
from .string_replaces import apply_string_replacements

View File

@@ -0,0 +1,20 @@
"""
Apply module - Apply patches to Chromium source.
Provides commands for applying patches:
- apply_all: Apply all patches from patches directory
- apply_feature: Apply patches for a specific feature
- apply_patch: Apply patch for a single file
"""
from .apply_all import apply_all_patches, ApplyAllModule
from .apply_feature import apply_feature_patches, ApplyFeatureModule
from .apply_patch import apply_single_file_patch
__all__ = [
"apply_all_patches",
"ApplyAllModule",
"apply_feature_patches",
"ApplyFeatureModule",
"apply_single_file_patch",
]

View File

@@ -0,0 +1,114 @@
"""
Apply All - Apply all patches from patches directory.
"""
from typing import List, Tuple, Optional
from ...common.context import Context
from ...common.module import CommandModule, ValidationError
from ...common.utils import log_info, log_error, log_warning
from .common import find_patch_files, process_patch_list
def apply_all_patches(
build_ctx: Context,
commit_each: bool = False,
dry_run: bool = False,
interactive: bool = False,
reset_to: Optional[str] = None,
) -> Tuple[int, List[str]]:
"""Apply all patches from patches directory.
Args:
build_ctx: Build context
commit_each: Create a commit after each patch
dry_run: Only check if patches would apply
interactive: Ask for confirmation before each patch
reset_to: Commit to reset files to before applying (optional)
Returns:
Tuple of (applied_count, failed_list)
"""
patches_dir = build_ctx.get_patches_dir()
if not patches_dir.exists():
log_warning(f"Patches directory does not exist: {patches_dir}")
return 0, []
# Find all patch files
patch_files = find_patch_files(patches_dir)
if not patch_files:
log_warning("No patch files found")
return 0, []
log_info(f"Found {len(patch_files)} patches")
if dry_run:
log_info("DRY RUN - No changes will be made")
# Create patch list with display names
patch_list = [(p, p.relative_to(patches_dir)) for p in patch_files]
# Process patches
applied, failed = process_patch_list(
patch_list,
build_ctx.chromium_src,
patches_dir,
commit_each,
dry_run,
interactive,
reset_to=reset_to,
)
# Summary
log_info(f"\nSummary: {applied} applied, {len(failed)} failed")
if failed:
log_error("Failed patches:")
for p in failed:
log_error(f" - {p}")
return applied, failed
class ApplyAllModule(CommandModule):
"""Apply all patches from chromium_patches/"""
produces = []
requires = []
description = "Apply all patches from chromium_patches/"
def validate(self, ctx: Context) -> None:
"""Validate git is available"""
import shutil
if not shutil.which("git"):
raise ValidationError("Git is not available in PATH")
if not ctx.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {ctx.chromium_src}")
def execute(
self,
ctx: Context,
interactive: bool = True,
commit: bool = False,
reset_to: Optional[str] = None,
**kwargs,
) -> None:
"""Execute apply all patches
Args:
interactive: Interactive mode (ask before each patch)
commit: Create git commit after each patch
reset_to: Commit to reset files to before applying (optional)
"""
applied, failed = apply_all_patches(
ctx,
commit_each=commit,
dry_run=False,
interactive=interactive,
reset_to=reset_to,
)
if failed:
raise RuntimeError(f"Failed to apply {len(failed)} patches")

View File

@@ -0,0 +1,135 @@
"""
Apply Feature - Apply patches for a specific feature.
"""
import yaml
from typing import List, Tuple, Optional
from ...common.context import Context
from ...common.module import CommandModule, ValidationError
from ...common.utils import log_info, log_error, log_warning
from .common import process_patch_list
def apply_feature_patches(
build_ctx: Context,
feature_name: str,
commit_each: bool = False,
dry_run: bool = False,
reset_to: Optional[str] = None,
) -> Tuple[int, List[str]]:
"""Apply patches for a specific feature.
Args:
build_ctx: Build context
feature_name: Name of the feature
commit_each: Create a commit after each patch
dry_run: Only check if patches would apply
reset_to: Commit to reset files to before applying (optional)
Returns:
Tuple of (applied_count, failed_list)
"""
# Load features.yaml
features_path = build_ctx.get_features_yaml_path()
if not features_path.exists():
log_error("No features.yaml found")
return 0, []
with open(features_path) as f:
data = yaml.safe_load(f)
features = data.get("features", {})
if feature_name not in features:
log_error(f"Feature '{feature_name}' not found")
log_info("Available features:")
for name in features:
log_info(f" - {name}")
return 0, []
file_list = features[feature_name].get("files", [])
if not file_list:
log_warning(f"Feature '{feature_name}' has no files")
return 0, []
log_info(f"Applying patches for feature '{feature_name}' ({len(file_list)} files)")
if dry_run:
log_info("DRY RUN - No changes will be made")
# Create patch list
patches_dir = build_ctx.get_patches_dir()
patch_list = []
for file_path in file_list:
patch_path = build_ctx.get_patch_path_for_file(file_path)
patch_list.append((patch_path, file_path))
# Process patches
applied, failed = process_patch_list(
patch_list,
build_ctx.chromium_src,
patches_dir,
commit_each,
dry_run,
interactive=False, # Feature patches don't support interactive mode
feature_name=feature_name,
reset_to=reset_to,
)
# Summary
log_info(f"\nSummary: {applied} applied, {len(failed)} failed")
if failed:
log_error("Failed patches:")
for p in failed:
log_error(f" - {p}")
return applied, failed
class ApplyFeatureModule(CommandModule):
"""Apply patches for a specific feature"""
produces = []
requires = []
description = "Apply patches for a specific feature"
def validate(self, ctx: Context) -> None:
"""Validate git is available"""
import shutil
if not shutil.which("git"):
raise ValidationError("Git is not available in PATH")
if not ctx.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {ctx.chromium_src}")
def execute(
self,
ctx: Context,
feature_name: str,
interactive: bool = True,
commit: bool = False,
reset_to: Optional[str] = None,
**kwargs,
) -> None:
"""Execute apply feature patches
Args:
feature_name: Name of the feature to apply
interactive: Interactive mode (ask before each patch)
commit: Create git commit after applying
reset_to: Commit to reset files to before applying (optional)
"""
applied, failed = apply_feature_patches(
ctx,
feature_name,
commit_each=commit,
dry_run=False,
reset_to=reset_to,
)
if failed:
raise RuntimeError(
f"Failed to apply {len(failed)} patches for feature '{feature_name}'"
)

View File

@@ -0,0 +1,44 @@
"""
Apply Patch - Apply patch for a single chromium file.
"""
from typing import Tuple, Optional
from ...common.context import Context
from ...common.utils import log_info
from .common import apply_single_patch
def apply_single_file_patch(
build_ctx: Context,
chromium_path: str,
reset_to: Optional[str] = None,
dry_run: bool = False,
) -> Tuple[bool, Optional[str]]:
"""Apply patch for a single chromium file.
Args:
build_ctx: Build context
chromium_path: Path to file in chromium (e.g., chrome/common/foo.h)
reset_to: Commit to reset file to before applying
dry_run: If True, only check if patch would apply
Returns:
Tuple of (success: bool, error_message: Optional[str])
"""
patch_path = build_ctx.get_patch_path_for_file(chromium_path)
if not patch_path.exists():
return False, f"No patch found for: {chromium_path}"
log_info(f"Applying patch for: {chromium_path}")
if dry_run:
log_info("DRY RUN - No changes will be made")
return apply_single_patch(
patch_path,
build_ctx.chromium_src,
dry_run=dry_run,
relative_to=build_ctx.get_patches_dir(),
reset_to=reset_to,
)

View File

@@ -1,19 +1,16 @@
"""
Apply module - Apply patches to Chromium source
Common functions shared across apply module commands.
Simple and straightforward patch application with minimal error handling.
Contains core patch application logic used by apply_all, apply_feature, and apply_patch.
"""
import click
import yaml
from pathlib import Path
from typing import List, Tuple, Optional
from context import BuildContext
from modules.dev_cli.utils import run_git_command, GitError
from utils import log_info, log_error, log_success, log_warning
from .utils import run_git_command, file_exists_in_commit, reset_file_to_commit
from ...common.utils import log_info, log_error, log_success, log_warning
# Core Functions - Can be called programmatically or from CLI
def find_patch_files(patches_dir: Path) -> List[Path]:
"""Find all valid patch files in a directory.
@@ -44,6 +41,7 @@ def apply_single_patch(
chromium_src: Path,
dry_run: bool = False,
relative_to: Optional[Path] = None,
reset_to: Optional[str] = None,
) -> Tuple[bool, Optional[str]]:
"""Apply a single patch file.
@@ -52,12 +50,20 @@ def apply_single_patch(
chromium_src: Chromium source directory
dry_run: If True, only check if patch would apply
relative_to: Base path for displaying relative paths (optional)
reset_to: Commit to reset file to before applying (optional)
Returns:
Tuple of (success: bool, error_message: Optional[str])
"""
display_path = patch_path.relative_to(relative_to) if relative_to else patch_path
# Reset file to base commit if requested
if reset_to and not dry_run:
file_path = str(display_path)
if file_exists_in_commit(file_path, reset_to, chromium_src):
log_info(f" Resetting to {reset_to[:8]}: {file_path}")
reset_file_to_commit(file_path, reset_to, chromium_src)
if dry_run:
# Just check if patch would apply
result = run_git_command(
@@ -151,6 +157,7 @@ def process_patch_list(
dry_run: bool = False,
interactive: bool = False,
feature_name: Optional[str] = None,
reset_to: Optional[str] = None,
) -> Tuple[int, List[str]]:
"""Process a list of patches.
@@ -162,6 +169,7 @@ def process_patch_list(
dry_run: Only check if patches would apply
interactive: Ask for confirmation before each patch
feature_name: Optional feature name for commit messages
reset_to: Commit to reset files to before applying (optional)
Returns:
Tuple of (applied_count, failed_list)
@@ -205,7 +213,7 @@ def process_patch_list(
# Apply the patch
success, error = apply_single_patch(
patch_path, chromium_src, dry_run, patches_dir
patch_path, chromium_src, dry_run, patches_dir, reset_to
)
if success:
@@ -238,207 +246,3 @@ def process_patch_list(
log_error("Invalid choice.")
return applied, failed
# ============================================================================
# Main Functions - Entry points for programmatic use
# ============================================================================
def apply_all_patches(
build_ctx: BuildContext,
commit_each: bool = False,
dry_run: bool = False,
interactive: bool = False,
) -> Tuple[int, List[str]]:
"""Apply all patches from patches directory.
Args:
build_ctx: Build context
commit_each: Create a commit after each patch
dry_run: Only check if patches would apply
interactive: Ask for confirmation before each patch
Returns:
Tuple of (applied_count, failed_list)
"""
patches_dir = build_ctx.get_dev_patches_dir()
if not patches_dir.exists():
log_warning(f"Patches directory does not exist: {patches_dir}")
return 0, []
# Find all patch files
patch_files = find_patch_files(patches_dir)
if not patch_files:
log_warning("No patch files found")
return 0, []
log_info(f"Found {len(patch_files)} patches")
if dry_run:
log_info("DRY RUN - No changes will be made")
# Create patch list with display names
patch_list = [(p, p.relative_to(patches_dir)) for p in patch_files]
# Process patches
applied, failed = process_patch_list(
patch_list,
build_ctx.chromium_src,
patches_dir,
commit_each,
dry_run,
interactive,
)
# Summary
log_info(f"\nSummary: {applied} applied, {len(failed)} failed")
if failed:
log_error("Failed patches:")
for p in failed:
log_error(f" - {p}")
return applied, failed
def apply_feature_patches(
build_ctx: BuildContext,
feature_name: str,
commit_each: bool = False,
dry_run: bool = False,
) -> Tuple[int, List[str]]:
"""Apply patches for a specific feature.
Args:
build_ctx: Build context
feature_name: Name of the feature
commit_each: Create a commit after each patch
dry_run: Only check if patches would apply
Returns:
Tuple of (applied_count, failed_list)
"""
# Load features.yaml
features_path = build_ctx.get_features_yaml_path()
if not features_path.exists():
log_error("No features.yaml found")
return 0, []
with open(features_path) as f:
data = yaml.safe_load(f)
features = data.get("features", {})
if feature_name not in features:
log_error(f"Feature '{feature_name}' not found")
log_info("Available features:")
for name in features:
log_info(f" - {name}")
return 0, []
file_list = features[feature_name].get("files", [])
if not file_list:
log_warning(f"Feature '{feature_name}' has no files")
return 0, []
log_info(f"Applying patches for feature '{feature_name}' ({len(file_list)} files)")
if dry_run:
log_info("DRY RUN - No changes will be made")
# Create patch list
patches_dir = build_ctx.get_dev_patches_dir()
patch_list = []
for file_path in file_list:
patch_path = build_ctx.get_patch_path_for_file(file_path)
patch_list.append((patch_path, file_path))
# Process patches
applied, failed = process_patch_list(
patch_list,
build_ctx.chromium_src,
patches_dir,
commit_each,
dry_run,
interactive=False, # Feature patches don't support interactive mode
feature_name=feature_name,
)
# Summary
log_info(f"\nSummary: {applied} applied, {len(failed)} failed")
if failed:
log_error("Failed patches:")
for p in failed:
log_error(f" - {p}")
return applied, failed
# CLI Commands - Thin wrappers around core functions
@click.group(name="apply")
def apply_group():
"""Apply patches to Chromium source"""
pass
@apply_group.command(name="all")
@click.option("--commit-each", is_flag=True, help="Create git commit after each patch")
@click.option("--dry-run", is_flag=True, help="Test patches without applying")
@click.pass_context
def apply_all(ctx, commit_each, dry_run):
"""Apply all patches from chromium_src/
\b
Examples:
dev apply all
dev apply all --commit-each
dev apply all --dry-run
"""
chromium_src = ctx.parent.obj.get("chromium_src")
from dev import create_build_context
build_ctx = create_build_context(chromium_src)
if not build_ctx:
return
applied, failed = apply_all_patches(build_ctx, commit_each, dry_run)
# Exit with error code if any patches failed
if failed:
ctx.exit(1)
@apply_group.command(name="feature")
@click.argument("feature_name")
@click.option("--commit-each", is_flag=True, help="Create git commit after each patch")
@click.option("--dry-run", is_flag=True, help="Test patches without applying")
@click.pass_context
def apply_feature(ctx, feature_name, commit_each, dry_run):
"""Apply patches for a specific feature
\b
Examples:
dev apply feature llm-chat
dev apply feature my-feature --commit-each
"""
chromium_src = ctx.parent.obj.get("chromium_src")
from dev import create_build_context
build_ctx = create_build_context(chromium_src)
if not build_ctx:
return
applied, failed = apply_feature_patches(
build_ctx, feature_name, commit_each, dry_run
)
# Exit with error code if any patches failed
if failed:
ctx.exit(1)

View File

@@ -0,0 +1,667 @@
"""
Shared utilities for Dev CLI operations
This module provides robust utilities for git operations, diff parsing,
and patch management with comprehensive error handling.
"""
import subprocess
import click
import re
from pathlib import Path
from typing import Optional, List, Dict, Tuple
from enum import Enum
from dataclasses import dataclass
from ...common.context import Context
from ...common.utils import log_error, log_success, log_warning
class FileOperation(Enum):
"""Types of file operations in a diff"""
ADD = "add"
MODIFY = "modify"
DELETE = "delete"
RENAME = "rename"
COPY = "copy"
BINARY = "binary"
@dataclass
class FilePatch:
"""Represents a single file's patch information"""
file_path: str
operation: FileOperation
old_path: Optional[str] = None # For renames/copies
patch_content: Optional[str] = None
is_binary: bool = False
similarity: Optional[int] = None # For renames (percentage)
class GitError(Exception):
"""Custom exception for git operations"""
pass
def run_git_command(
cmd: List[str],
cwd: Path,
capture: bool = True,
check: bool = False,
timeout: Optional[int] = None,
binary_output: bool = False,
) -> subprocess.CompletedProcess:
"""Run a git command and return the result
Args:
cmd: Command to run
cwd: Working directory
capture: Whether to capture output
check: Whether to raise on non-zero return
timeout: Command timeout in seconds
binary_output: If True, handle binary output (don't decode as text)
Returns:
CompletedProcess result
Raises:
GitError: If command fails and check=True
"""
try:
# For commands that might output binary data (like git diff with binary files),
# we need to handle them specially
if binary_output or ("diff" in cmd and "--binary" not in cmd):
# First try with text mode
try:
result = subprocess.run(
cmd,
cwd=cwd,
capture_output=capture,
text=True,
check=False,
timeout=timeout or 60,
errors="replace", # Replace invalid UTF-8 sequences
)
except UnicodeDecodeError:
# Fall back to binary mode
result = subprocess.run(
cmd,
cwd=cwd,
capture_output=capture,
text=False,
check=False,
timeout=timeout or 60,
)
# Convert to text with error handling
if result.stdout:
result.stdout = result.stdout.decode("utf-8", errors="replace")
if result.stderr:
result.stderr = result.stderr.decode("utf-8", errors="replace")
else:
result = subprocess.run(
cmd,
cwd=cwd,
capture_output=capture,
text=True,
check=False,
timeout=timeout or 60,
)
if check and result.returncode != 0:
error_msg = result.stderr or result.stdout or "Unknown error"
raise GitError(f"Git command failed: {' '.join(cmd)}\nError: {error_msg}")
return result
except subprocess.TimeoutExpired:
log_error(f"Git command timed out after {timeout} seconds: {' '.join(cmd)}")
raise GitError(f"Command timed out: {' '.join(cmd)}")
except Exception as e:
log_error(f"Failed to run git command: {' '.join(cmd)}")
raise GitError(f"Command failed: {e}")
def validate_git_repository(path: Path) -> bool:
"""Validate that a path is a git repository"""
try:
result = run_git_command(
["git", "rev-parse", "--git-dir"], cwd=path, check=False
)
return result.returncode == 0
except GitError:
return False
def validate_commit_exists(commit_hash: str, chromium_src: Path) -> bool:
"""Validate that a commit exists in the repository"""
try:
result = run_git_command(
["git", "rev-parse", "--verify", f"{commit_hash}^{{commit}}"],
cwd=chromium_src,
)
if result.returncode != 0:
log_error(f"Commit '{commit_hash}' not found in repository")
return False
return True
except GitError as e:
log_error(f"Failed to validate commit: {e}")
return False
def file_exists_in_commit(file_path: str, commit: str, chromium_src: Path) -> bool:
"""Check if file exists in a commit."""
result = run_git_command(
["git", "cat-file", "-e", f"{commit}:{file_path}"],
cwd=chromium_src,
)
return result.returncode == 0
def reset_file_to_commit(file_path: str, commit: str, chromium_src: Path) -> bool:
"""Reset a single file to a specific commit state."""
result = run_git_command(
["git", "checkout", commit, "--", file_path],
cwd=chromium_src,
)
return result.returncode == 0
def get_commit_changed_files(commit_hash: str, chromium_src: Path) -> List[str]:
"""Get list of files changed in a commit"""
try:
result = run_git_command(
["git", "diff-tree", "--no-commit-id", "--name-only", "-r", commit_hash],
cwd=chromium_src,
)
if result.returncode != 0:
log_error(f"Failed to get changed files for commit {commit_hash}")
return []
files = [f.strip() for f in result.stdout.strip().split("\n") if f.strip()]
return files
except GitError as e:
log_error(f"Error getting changed files: {e}")
return []
def parse_diff_output(diff_output: str) -> Dict[str, FilePatch]:
"""
Parse git diff output into individual file patches with full metadata.
Handles:
- Regular file modifications
- New files
- Deleted files
- Binary files
- File renames
- File copies
- Mode changes
Returns:
Dict mapping file path to FilePatch objects
"""
patches = {}
current_file = None
current_patch_lines = []
current_operation = FileOperation.MODIFY
is_binary = False
old_path = None
similarity = None
lines = diff_output.splitlines()
i = 0
while i < len(lines):
line = lines[i]
# Start of a new file diff
if line.startswith("diff --git"):
# Save previous patch if exists
if current_file and current_patch_lines:
patch_content = (
"\n".join(current_patch_lines) if not is_binary else None
)
patches[current_file] = FilePatch(
file_path=current_file,
operation=current_operation,
old_path=old_path,
patch_content=patch_content,
is_binary=is_binary,
similarity=similarity,
)
# Parse file paths from diff line
match = re.match(r"diff --git a/(.*) b/(.*)", line)
if match:
_old_file = match.group(1)
new_file = match.group(2)
current_file = new_file
current_patch_lines = [line]
current_operation = FileOperation.MODIFY
is_binary = False
old_path = None
similarity = None
else:
log_warning(f"Could not parse diff line: {line}")
current_file = None
current_patch_lines = []
i += 1
continue
# Check for file metadata
if current_file:
if line.startswith("deleted file"):
current_operation = FileOperation.DELETE
current_patch_lines.append(line)
elif line.startswith("new file"):
current_operation = FileOperation.ADD
current_patch_lines.append(line)
elif line.startswith("similarity index"):
# Extract similarity percentage for renames
match = re.match(r"similarity index (\d+)%", line)
if match:
similarity = int(match.group(1))
current_patch_lines.append(line)
elif line.startswith("rename from"):
current_operation = FileOperation.RENAME
old_path = line[12:].strip() # Remove 'rename from '
current_patch_lines.append(line)
elif line.startswith("rename to"):
# Confirm rename operation
current_patch_lines.append(line)
elif line.startswith("copy from"):
current_operation = FileOperation.COPY
old_path = line[10:].strip() # Remove 'copy from '
current_patch_lines.append(line)
elif line.startswith("copy to"):
# Confirm copy operation
current_patch_lines.append(line)
elif line == "Binary files differ" or line.startswith("Binary files"):
is_binary = True
current_operation = (
FileOperation.BINARY
if current_operation == FileOperation.MODIFY
else current_operation
)
current_patch_lines.append(line)
elif (
line.startswith("index ")
or line.startswith("---")
or line.startswith("+++")
):
current_patch_lines.append(line)
elif line.startswith("@@"):
# Hunk header
current_patch_lines.append(line)
elif line.startswith("+") or line.startswith("-") or line.startswith(" "):
# Actual diff content
current_patch_lines.append(line)
elif line.startswith("\\"):
# Special markers like "\ No newline at end of file"
current_patch_lines.append(line)
else:
# Other content
current_patch_lines.append(line)
i += 1
# Save last patch
if current_file and current_patch_lines:
patch_content = "\n".join(current_patch_lines) if not is_binary else None
patches[current_file] = FilePatch(
file_path=current_file,
operation=current_operation,
old_path=old_path,
patch_content=patch_content,
is_binary=is_binary,
similarity=similarity,
)
return patches
def write_patch_file(ctx: Context, file_path: str, patch_content: str) -> bool:
"""
Write a patch file to chromium_src directory structure.
Args:
ctx: Build context
file_path: Path of the file being patched
patch_content: The patch content to write
Returns:
True if successful, False otherwise
"""
# Construct output path
output_path = ctx.get_patch_path_for_file(file_path)
# Create directory structure
output_path.parent.mkdir(parents=True, exist_ok=True)
try:
# Ensure patch ends with newline
if patch_content and not patch_content.endswith("\n"):
patch_content += "\n"
output_path.write_text(patch_content, encoding="utf-8")
log_success(f" Written: {output_path.relative_to(ctx.root_dir)}")
return True
except Exception as e:
log_error(f" Failed to write {output_path}: {e}")
return False
def create_deletion_marker(ctx: Context, file_path: str) -> bool:
"""
Create a marker file for deleted files.
Args:
ctx: Build context
file_path: Path of the deleted file
Returns:
True if successful, False otherwise
"""
marker_path = ctx.get_patches_dir() / file_path
marker_path = marker_path.with_suffix(marker_path.suffix + ".deleted")
marker_path.parent.mkdir(parents=True, exist_ok=True)
try:
marker_content = f"File deleted in patch\nOriginal path: {file_path}\n"
marker_path.write_text(marker_content, encoding="utf-8")
log_warning(f" Marked deleted: {marker_path.relative_to(ctx.root_dir)}")
return True
except Exception as e:
log_error(f" Failed to create deletion marker: {e}")
return False
def create_binary_marker(
ctx: Context, file_path: str, operation: FileOperation
) -> bool:
"""
Create a marker file for binary files.
Args:
ctx: Build context
file_path: Path of the binary file
operation: The operation type
Returns:
True if successful, False otherwise
"""
marker_path = ctx.get_patches_dir() / file_path
marker_path = marker_path.with_suffix(marker_path.suffix + ".binary")
marker_path.parent.mkdir(parents=True, exist_ok=True)
try:
marker_content = (
f"Binary file\nOperation: {operation.value}\nOriginal path: {file_path}\n"
)
marker_path.write_text(marker_content, encoding="utf-8")
log_warning(f" Binary file marked: {marker_path.relative_to(ctx.root_dir)}")
return True
except Exception as e:
log_error(f" Failed to create binary marker: {e}")
return False
def apply_single_patch(
patch_path: Path, chromium_src: Path, interactive: bool = True
) -> Tuple[bool, str]:
"""
Apply a single patch file to chromium source with multiple strategies.
Tries in order:
1. Standard git apply
2. Three-way merge
3. Patch command fallback
4. Interactive conflict resolution
Returns:
Tuple of (success, message)
"""
if not patch_path.exists():
return False, f"Patch file not found: {patch_path}"
# Check if it's a deletion marker
if patch_path.suffix == ".deleted":
# Handle file deletion
file_path = patch_path.stem
target_file = chromium_src / file_path
if target_file.exists():
try:
target_file.unlink()
return True, f"Deleted: {file_path}"
except Exception as e:
return False, f"Failed to delete {file_path}: {e}"
else:
return True, f"Already deleted: {file_path}"
# Check if it's a binary marker
if patch_path.suffix == ".binary":
return False, f"Binary file patch not supported: {patch_path.name}"
# Try standard apply
result = run_git_command(["git", "apply", "-p1", str(patch_path)], cwd=chromium_src)
if result.returncode == 0:
return True, f"Applied: {patch_path.name}"
# Try 3-way merge
result = run_git_command(
["git", "apply", "-p1", "--3way", str(patch_path)], cwd=chromium_src
)
if result.returncode == 0:
return True, f"Applied (3-way): {patch_path.name}"
# Try with whitespace options
result = run_git_command(
["git", "apply", "-p1", "--whitespace=fix", str(patch_path)], cwd=chromium_src
)
if result.returncode == 0:
return True, f"Applied (whitespace fixed): {patch_path.name}"
# Handle conflict
if interactive:
return handle_patch_conflict(patch_path, chromium_src, result.stderr)
else:
return False, f"Failed: {patch_path.name} - {result.stderr}"
def handle_patch_conflict(
patch_path: Path, chromium_src: Path, error_msg: str = ""
) -> Tuple[bool, str]:
"""Handle patch conflict interactively with detailed options"""
click.echo(f"\n{click.style('CONFLICT:', fg='red', bold=True)} {patch_path}")
if error_msg:
# Parse error message for more context
lines = error_msg.strip().split("\n")
for line in lines[:5]: # Show first 5 lines of error
click.echo(f" {line}")
click.echo("\nOptions:")
click.echo(" 1) Fix manually and continue")
click.echo(" 2) Skip this patch")
click.echo(" 3) Try with reduced context (--unidiff-zero)")
click.echo(" 4) Show patch content")
click.echo(" 5) Abort all remaining patches")
while True:
choice = click.prompt("Enter choice (1-5)", type=str)
if choice == "1":
click.prompt("Fix the conflicts manually and press Enter to continue")
return True, f"Manually fixed: {patch_path.name}"
elif choice == "2":
return True, f"Skipped: {patch_path.name}"
elif choice == "3":
# Try with reduced context
result = run_git_command(
["git", "apply", "-p1", "--unidiff-zero", str(patch_path)],
cwd=chromium_src,
)
if result.returncode == 0:
return True, f"Applied (reduced context): {patch_path.name}"
else:
click.echo("Failed with reduced context too")
continue
elif choice == "4":
# Show patch content
try:
content = patch_path.read_text()
lines = content.split("\n")
# Show first 50 lines
click.echo("\n--- Patch Content (first 50 lines) ---")
for line in lines[:50]:
click.echo(line)
if len(lines) > 50:
click.echo(f"... and {len(lines) - 50} more lines")
click.echo("--- End of Preview ---\n")
except Exception as e:
click.echo(f"Failed to read patch: {e}")
continue
elif choice == "5":
return False, "Aborted by user"
else:
click.echo("Invalid choice. Please enter 1-5.")
def create_git_commit(chromium_src: Path, message: str) -> bool:
"""Create a git commit with the given message"""
# Check if there are changes to commit
result = run_git_command(["git", "status", "--porcelain"], cwd=chromium_src)
if not result.stdout.strip():
log_warning("Nothing to commit, working tree clean")
return True
# Stage all changes
result = run_git_command(["git", "add", "-A"], cwd=chromium_src)
if result.returncode != 0:
log_error("Failed to stage changes")
return False
# Create commit
result = run_git_command(["git", "commit", "-m", message], cwd=chromium_src)
if result.returncode != 0:
if "nothing to commit" in result.stdout:
log_warning("Nothing to commit")
else:
log_error(f"Failed to create commit: {result.stderr}")
return False
log_success(f"Created commit: {message}")
return True
def get_commit_info(commit_hash: str, chromium_src: Path) -> Optional[Dict[str, str]]:
"""Get detailed information about a commit"""
try:
# Get commit info in a structured format
result = run_git_command(
[
"git",
"show",
"--format=%H%n%an%n%ae%n%at%n%s%n%b",
"--no-patch",
commit_hash,
],
cwd=chromium_src,
)
if result.returncode != 0:
return None
lines = result.stdout.strip().split("\n")
if len(lines) >= 5:
return {
"hash": lines[0],
"author_name": lines[1],
"author_email": lines[2],
"timestamp": lines[3],
"subject": lines[4],
"body": "\n".join(lines[5:]) if len(lines) > 5 else "",
}
return None
except GitError:
return None
def prompt_yes_no(question: str, default: bool = False) -> bool:
"""Prompt user for yes/no question"""
default_str = "Y/n" if default else "y/N"
result = click.prompt(
f"{question} [{default_str}]", type=str, default="y" if default else "n"
)
return result.lower() in ("y", "yes")
def log_extraction_summary(file_patches: Dict[str, FilePatch]):
"""Log a detailed summary of extracted patches"""
total = len(file_patches)
# Count by operation type
operations = {op: 0 for op in FileOperation}
binary_count = 0
for patch in file_patches.values():
operations[patch.operation] += 1
if patch.is_binary:
binary_count += 1
click.echo("\n" + click.style("Extraction Summary", fg="green", bold=True))
click.echo("=" * 60)
click.echo(f"Total files: {total}")
click.echo("-" * 40)
if operations[FileOperation.ADD] > 0:
click.echo(f"New files: {operations[FileOperation.ADD]}")
if operations[FileOperation.MODIFY] > 0:
click.echo(f"Modified: {operations[FileOperation.MODIFY]}")
if operations[FileOperation.DELETE] > 0:
click.echo(f"Deleted: {operations[FileOperation.DELETE]}")
if operations[FileOperation.RENAME] > 0:
click.echo(f"Renamed: {operations[FileOperation.RENAME]}")
if operations[FileOperation.COPY] > 0:
click.echo(f"Copied: {operations[FileOperation.COPY]}")
if binary_count > 0:
click.echo(f"Binary files: {binary_count}")
click.echo("=" * 60)
def log_apply_summary(results: List[Tuple[str, bool, str]]):
"""Log a detailed summary of applied patches"""
total = len(results)
successful = sum(1 for _, success, _ in results if success)
failed = total - successful
click.echo(
"\n"
+ click.style(
"Apply Summary", fg="green" if failed == 0 else "yellow", bold=True
)
)
click.echo("=" * 60)
click.echo(f"Total patches: {total}")
click.echo(f"Successful: {successful}")
click.echo(f"Failed: {failed}")
click.echo("=" * 60)
if failed > 0:
click.echo("\n" + click.style("Failed patches:", fg="red", bold=True))
for file_path, success, message in results:
if not success:
click.echo(f"{file_path}: {message}")

View File

@@ -1,66 +0,0 @@
#!/usr/bin/env python3
"""
Clean module for Nxtscape build system
"""
import os
import shutil
from pathlib import Path
from context import BuildContext
from utils import run_command, log_info, log_success, safe_rmtree
def clean(ctx: BuildContext) -> bool:
"""Clean build artifacts"""
log_info("🧹 Cleaning build artifacts...")
out_path = ctx.chromium_src / ctx.out_dir
if out_path.exists():
safe_rmtree(out_path)
log_success("Cleaned build directory")
log_info("\n🔀 Resetting git branch and removing all tracked files...")
git_reset(ctx)
log_info("\n🧹 Cleaning Sparkle build artifacts...")
clean_sparkle(ctx)
return True
def clean_sparkle(ctx: BuildContext) -> bool:
"""Clean Sparkle build artifacts"""
log_info("\n🧹 Cleaning Sparkle build artifacts...")
sparkle_dir = ctx.get_sparkle_dir()
if sparkle_dir.exists():
safe_rmtree(sparkle_dir)
log_success("Cleaned Sparkle build directory")
return True
def git_reset(ctx: BuildContext) -> bool:
"""Reset git branch and clean with exclusions"""
os.chdir(ctx.chromium_src)
run_command(["git", "reset", "--hard", "HEAD"])
os.chdir(ctx.root_dir)
log_info("\n🧹 Running git clean with exclusions for important directories...")
os.chdir(ctx.chromium_src)
run_command(
[
"git",
"clean",
"-fdx",
"chrome/",
"components/",
"--exclude=third_party/",
"--exclude=build_tools/",
"--exclude=uc_staging/",
"--exclude=buildtools/",
"--exclude=tools/",
"--exclude=build/",
]
)
os.chdir(ctx.root_dir)
log_success("Git reset and clean complete")
return True

View File

@@ -1,69 +0,0 @@
#!/usr/bin/env python3
"""
Build execution module for Nxtscape build system
"""
import os
import tempfile
import shutil
import multiprocessing
from pathlib import Path
from context import BuildContext
from utils import (
run_command,
log_info,
log_success,
log_warning,
join_paths,
IS_WINDOWS,
IS_MACOS,
)
def build(ctx: BuildContext) -> bool:
"""Run the actual build"""
log_info("\n🔨 Building Nxtscape (this will take a while)...")
# Create VERSION file with nxtscape_chromium_version
if ctx.nxtscape_chromium_version:
# Parse the nxtscape_chromium_version back into components
parts = ctx.nxtscape_chromium_version.split(".")
if len(parts) == 4:
version_content = f"MAJOR={parts[0]}\nMINOR={parts[1]}\nBUILD={parts[2]}\nPATCH={parts[3]}"
# Create temporary VERSION file
with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_file:
temp_file.write(version_content)
temp_path = temp_file.name
# Copy VERSION file to chrome/VERSION
chrome_version_path = join_paths(ctx.chromium_src, "chrome", "VERSION")
shutil.copy2(temp_path, chrome_version_path)
# Clean up temp file
os.unlink(temp_path)
log_info(
f"Created VERSION file with nxtscape_chromium_version: {ctx.nxtscape_chromium_version}"
)
else:
log_warning("No nxtscape_chromium_version set. Not building")
os.chdir(ctx.chromium_src)
# Use default autoninja parallelism (it handles this automatically)
autoninja_cmd = "autoninja.bat" if IS_WINDOWS else "autoninja"
log_info("Using default autoninja parallelism")
# Build chrome and chromedriver on Windows
run_command([autoninja_cmd, "-C", ctx.out_dir, "chrome", "chromedriver"])
# Rename Chromium.app to Nxtscape.app
app_path = ctx.get_chromium_app_path()
new_path = ctx.get_app_path()
if app_path.exists() and not new_path.exists():
shutil.move(str(app_path), str(new_path))
log_success("Build complete!")
return True

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env python3
"""
Compilation modules for BrowserOS build system
This package contains different build strategies:
- standard: Single-architecture compilation
- universal: Multi-architecture compilation (macOS universal binaries)
"""
from .standard import CompileModule, build_target
from .universal import UniversalBuildModule
__all__ = [
'CompileModule',
'UniversalBuildModule',
'build_target',
]

View File

@@ -0,0 +1,82 @@
#!/usr/bin/env python3
"""Standard single-architecture build module for BrowserOS"""
import tempfile
import shutil
from pathlib import Path
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import (
run_command,
log_info,
log_success,
log_warning,
join_paths,
IS_WINDOWS,
)
class CompileModule(CommandModule):
produces = ["built_app"]
requires = []
description = "Build BrowserOS using autoninja"
def validate(self, ctx: Context) -> None:
if not ctx.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {ctx.chromium_src}")
if not ctx.browseros_chromium_version:
raise ValidationError("BrowserOS chromium version not set")
args_file = ctx.get_gn_args_file()
if not args_file.exists():
raise ValidationError(f"Build not configured - args.gn not found: {args_file}")
def execute(self, ctx: Context) -> None:
log_info("\n🔨 Building BrowserOS (this will take a while)...")
self._create_version_file(ctx)
autoninja_cmd = "autoninja.bat" if IS_WINDOWS() else "autoninja"
log_info("Using default autoninja parallelism")
run_command([autoninja_cmd, "-C", ctx.out_dir, "chrome", "chromedriver"], cwd=ctx.chromium_src)
app_path = ctx.get_chromium_app_path()
new_path = ctx.get_app_path()
if app_path.exists() and not new_path.exists():
shutil.move(str(app_path), str(new_path))
ctx.artifact_registry.add("built_app", new_path)
log_success("Build complete!")
def _create_version_file(self, ctx: Context) -> None:
parts = ctx.browseros_chromium_version.split(".")
if len(parts) != 4:
log_warning(f"Invalid version format: {ctx.browseros_chromium_version}")
return
version_content = f"MAJOR={parts[0]}\nMINOR={parts[1]}\nBUILD={parts[2]}\nPATCH={parts[3]}"
with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_file:
temp_file.write(version_content)
temp_path = temp_file.name
chrome_version_path = join_paths(ctx.chromium_src, "chrome", "VERSION")
shutil.copy2(temp_path, chrome_version_path)
Path(temp_path).unlink()
log_info(f"Created VERSION file: {ctx.browseros_chromium_version}")
def build_target(ctx: Context, target: str) -> bool:
"""Build a specific target (e.g., mini_installer)"""
log_info(f"\n🔨 Building target: {target}")
autoninja_cmd = "autoninja.bat" if IS_WINDOWS() else "autoninja"
run_command([autoninja_cmd, "-C", ctx.out_dir, target], cwd=ctx.chromium_src)
log_success(f"Target {target} built successfully")
return True

View File

@@ -0,0 +1,328 @@
#!/usr/bin/env python3
"""
Universal Build Module - Build, sign, package, and upload universal binary for macOS
This module orchestrates building both architectures (arm64 + x64), signing each,
packaging each into DMGs, uploading each, then merging into a universal binary
and signing/packaging/uploading that as well.
Design:
For each arch (arm64, x64):
1. resources -> configure -> compile
2. sign -> package -> upload
Then:
3. Merge arm64 + x64 into universal
4. sign universal -> package -> upload
Output: 3 DMGs uploaded to GCS:
- BrowserOS_{version}_arm64_signed.dmg
- BrowserOS_{version}_x64_signed.dmg
- BrowserOS_{version}_universal_signed.dmg
Prerequisites (must run BEFORE this module):
- clean (optional)
- git_setup
- sparkle_setup (macOS)
- chromium_replace
- string_replaces
- patches
This module internally runs (for EACH architecture):
- resources (arch-specific binaries)
- configure (GN configuration)
- compile (ninja build)
- sign_macos (code signing + notarization)
- package_macos (DMG creation)
- upload_gcs (GCS upload)
Then merges and processes the universal binary.
"""
from pathlib import Path
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import log_info, log_success, log_warning, IS_MACOS
# Architectures to build for universal binary
UNIVERSAL_ARCHITECTURES = ["arm64", "x64"]
class UniversalBuildModule(CommandModule):
"""Build, sign, package, and upload universal binary (arm64 + x64) for macOS
This module handles the complete multi-architecture build, sign, package,
and upload workflow. It internally creates separate contexts for arm64 and x64,
builds each, signs each, packages each into DMGs, uploads each, then merges
them into a universal binary and processes that as well.
The base context passed to this module can have any architecture value -
it will be ignored and arm64/x64 will be built explicitly.
Output artifacts (all uploaded to GCS):
- BrowserOS_{version}_arm64_signed.dmg
- BrowserOS_{version}_x64_signed.dmg
- BrowserOS_{version}_universal_signed.dmg
"""
produces = ["dmg_arm64", "dmg_x64", "dmg_universal"]
requires = []
description = "Build, sign, package, and upload universal binary (arm64 + x64) for macOS"
def validate(self, ctx: Context) -> None:
"""Validate universal build can run"""
if not IS_MACOS():
raise ValidationError("Universal builds only supported on macOS")
# Check universalizer script exists
universalizer = ctx.root_dir / "build/modules/package/universalizer_patched.py"
if not universalizer.exists():
raise ValidationError(f"Universalizer script not found: {universalizer}")
# Fail fast: check signing environment is configured
from ..sign.macos import check_signing_environment
if not check_signing_environment():
raise ValidationError(
"Signing environment not configured. "
"Required: MACOS_CERTIFICATE_NAME, notarization credentials"
)
def execute(self, ctx: Context) -> None:
"""Build arm64 + x64, sign/package/upload each, then merge and process universal"""
log_info("\n" + "=" * 70)
log_info("🔄 Universal Build Mode (Full Pipeline)")
log_info("Building arm64 + x64, signing, packaging, uploading each...")
log_info("Then merging into universal and processing that too.")
log_info("=" * 70)
# Import build modules
from ..resources.resources import ResourcesModule
from ..setup.configure import ConfigureModule
from .standard import CompileModule
# Import sign/package/upload modules
from ..sign.macos import MacOSSignModule
from ..package.macos import MacOSPackageModule
from ..upload import GCSUploadModule
# Clean all build directories before starting
self._clean_build_directories(ctx)
built_apps = []
# Build + Sign + Package + Upload each architecture
for arch in UNIVERSAL_ARCHITECTURES:
log_info("\n" + "=" * 70)
log_info(f"🏗️ Processing architecture: {arch}")
log_info("=" * 70)
# Create architecture-specific context with fixed app path
arch_ctx = self._create_arch_context(ctx, arch)
log_info(f"📍 Chromium: {arch_ctx.chromium_version}")
log_info(f"📍 BrowserOS: {arch_ctx.browseros_version}")
log_info(f"📍 Output directory: {arch_ctx.out_dir}")
# === BUILD PHASE ===
# Copy resources (arch-specific binaries like browseros_server, codex)
log_info(f"\n📦 Copying resources for {arch}...")
ResourcesModule().execute(arch_ctx)
# Configure build (GN gen)
log_info(f"\n🔧 Configuring {arch}...")
ConfigureModule().execute(arch_ctx)
# Compile (ninja)
log_info(f"\n🏗️ Compiling {arch}...")
CompileModule().execute(arch_ctx)
# Get app path for this architecture
app_path = arch_ctx.get_app_path()
if not app_path.exists():
raise RuntimeError(f"Build failed - app not found: {app_path}")
log_success(f"{arch} build complete: {app_path}")
built_apps.append(app_path)
# === SIGN PHASE ===
log_info(f"\n🔏 Signing {arch} build...")
MacOSSignModule().execute(arch_ctx)
log_success(f"{arch} signing complete")
# === PACKAGE PHASE ===
log_info(f"\n📦 Packaging {arch} build...")
MacOSPackageModule().execute(arch_ctx)
log_success(f"{arch} packaging complete")
# === UPLOAD PHASE ===
log_info(f"\n☁️ Uploading {arch} artifacts...")
try:
GCSUploadModule().execute(arch_ctx)
log_success(f"{arch} upload complete")
except Exception as e:
log_warning(f"⚠️ {arch} upload failed (non-fatal): {e}")
# === MERGE INTO UNIVERSAL ===
log_info("\n" + "=" * 70)
log_info("🔄 Merging into universal binary...")
log_info("=" * 70)
self._merge_universal(ctx, built_apps[0], built_apps[1])
# Verify universal binary was created
universal_app = ctx.chromium_src / "out/Default_universal/BrowserOS.app"
if not universal_app.exists():
raise RuntimeError(f"Universal binary not found: {universal_app}")
log_success(f"✅ Universal binary created: {universal_app}")
# === SIGN + PACKAGE + UPLOAD UNIVERSAL ===
log_info("\n" + "=" * 70)
log_info("🔏 Processing universal binary...")
log_info("=" * 70)
universal_ctx = self._create_universal_context(ctx)
# Sign universal
log_info("\n🔏 Signing universal build...")
MacOSSignModule().execute(universal_ctx)
log_success("✅ Universal signing complete")
# Package universal
log_info("\n📦 Packaging universal build...")
MacOSPackageModule().execute(universal_ctx)
log_success("✅ Universal packaging complete")
# Upload universal
log_info("\n☁️ Uploading universal artifacts...")
try:
GCSUploadModule().execute(universal_ctx)
log_success("✅ Universal upload complete")
except Exception as e:
log_warning(f"⚠️ Universal upload failed (non-fatal): {e}")
log_info("\n" + "=" * 70)
log_success("✅ Universal build pipeline complete!")
log_info("Artifacts created:")
log_info(f" - arm64 DMG: {ctx.get_dist_dir() / ctx.get_dmg_name(signed=True).replace('universal', 'arm64')}")
log_info(f" - x64 DMG: {ctx.get_dist_dir() / ctx.get_dmg_name(signed=True).replace('universal', 'x64')}")
log_info(f" - universal DMG: {ctx.get_dist_dir() / universal_ctx.get_dmg_name(signed=True)}")
log_info("=" * 70)
def _clean_build_directories(self, ctx: Context) -> None:
"""Clean architecture-specific and universal build directories
Args:
ctx: Base context
"""
from ...common.utils import safe_rmtree
log_info("\n🧹 Cleaning build directories...")
# Clean architecture-specific directories
for arch in UNIVERSAL_ARCHITECTURES:
arch_dir = ctx.chromium_src / f"out/Default_{arch}"
if arch_dir.exists():
log_info(f" Removing {arch_dir}")
safe_rmtree(arch_dir)
# Clean universal directory
universal_dir = ctx.chromium_src / "out/Default_universal"
if universal_dir.exists():
log_info(f" Removing {universal_dir}")
safe_rmtree(universal_dir)
log_success("✅ Build directories cleaned")
def _create_arch_context(self, base_ctx: Context, arch: str) -> Context:
"""Create a new context for a specific architecture
Args:
base_ctx: Base context with common settings
arch: Architecture to build (arm64 or x64)
Returns:
New Context object with architecture set and fixed app path
to prevent universal auto-detection
"""
ctx = Context(
root_dir=base_ctx.root_dir,
chromium_src=base_ctx.chromium_src,
architecture=arch,
build_type=base_ctx.build_type,
)
# Set fixed app path to prevent universal auto-detection in get_app_path()
# This is critical: after arm64 is built, get_app_path() would otherwise
# try to detect the universal dir for x64 context
ctx._fixed_app_path = ctx.chromium_src / f"out/Default_{arch}" / ctx.BROWSEROS_APP_NAME
return ctx
def _create_universal_context(self, base_ctx: Context) -> Context:
"""Create a new context for the universal binary
Args:
base_ctx: Base context with common settings
Returns:
New Context object configured for universal binary
"""
ctx = Context(
root_dir=base_ctx.root_dir,
chromium_src=base_ctx.chromium_src,
architecture="universal",
build_type=base_ctx.build_type,
)
# Set fixed app path to the universal binary
ctx._fixed_app_path = ctx.chromium_src / "out/Default_universal" / ctx.BROWSEROS_APP_NAME
# Override out_dir for universal
ctx.out_dir = "out/Default_universal"
return ctx
def _merge_universal(
self,
ctx: Context,
arm64_app: Path,
x64_app: Path,
) -> None:
"""Merge arm64 + x64 into universal binary
Args:
ctx: Base context
arm64_app: Path to arm64 .app bundle
x64_app: Path to x64 .app bundle
Raises:
RuntimeError: If merge fails
"""
# Use existing merge helper
from ..package.merge import merge_architectures
# Prepare output path
universal_dir = ctx.chromium_src / "out/Default_universal"
# Create universal directory (already cleaned in _clean_build_directories)
universal_dir.mkdir(parents=True, exist_ok=True)
universal_app = universal_dir / "BrowserOS.app"
# Find universalizer script
universalizer_script = ctx.root_dir / "build/modules/package/universalizer_patched.py"
log_info(f"📱 Input 1 (arm64): {arm64_app}")
log_info(f"📱 Input 2 (x64): {x64_app}")
log_info(f"🎯 Output (universal): {universal_app}")
log_info(f"🔧 Universalizer: {universalizer_script}")
# Merge the architectures
success = merge_architectures(
arch1_path=arm64_app,
arch2_path=x64_app,
output_path=universal_app,
universalizer_script=universalizer_script,
)
if not success:
raise RuntimeError("Failed to merge architectures into universal binary")

View File

@@ -1,45 +0,0 @@
#!/usr/bin/env python3
"""
Build configuration module for Nxtscape build system
"""
import os
import sys
from pathlib import Path
from typing import Optional
from context import BuildContext
from utils import run_command, log_info, log_error, log_success, join_paths, IS_WINDOWS
def configure(ctx: BuildContext, gn_flags_file: Optional[Path] = None) -> bool:
"""Configure the build with GN"""
log_info(f"\n⚙️ Configuring {ctx.build_type} build for {ctx.architecture}...")
# Create output directory
out_path = join_paths(ctx.chromium_src, ctx.out_dir)
out_path.mkdir(parents=True, exist_ok=True)
# Copy build flags
if gn_flags_file is None:
flags_file = ctx.get_gn_flags_file()
else:
flags_file = join_paths(ctx.root_dir, gn_flags_file)
if not flags_file.exists():
log_error(f"GN flags file not found: {flags_file}")
raise FileNotFoundError(f"GN flags file not found: {flags_file}")
args_file = ctx.get_gn_args_file()
args_content = flags_file.read_text()
args_content += f'\ntarget_cpu = "{ctx.architecture}"\n'
args_file.write_text(args_content)
# Run gn gen
os.chdir(ctx.chromium_src)
gn_cmd = "gn.bat" if IS_WINDOWS else "gn"
run_command([gn_cmd, "gen", ctx.out_dir, "--fail-on-unused-args"])
log_success("Build configured")
return True

View File

@@ -1,6 +0,0 @@
"""
Dev CLI modules for Chromium patch management
"""
# This will be populated as modules are created
__all__ = ["extract", "apply", "feature", "utils"]

View File

@@ -1,678 +0,0 @@
"""
Extract module - Extract patches from git commits
This module provides commands to extract patches from git commits in a Chromium
repository, storing them as individual file diffs that can be re-applied.
"""
import click
import sys
from pathlib import Path
from typing import Optional, List, Dict
from context import BuildContext
from modules.dev_cli.utils import (
FilePatch,
FileOperation,
GitError,
run_git_command,
validate_git_repository,
validate_commit_exists,
parse_diff_output,
write_patch_file,
create_deletion_marker,
create_binary_marker,
log_extraction_summary,
get_commit_info,
get_commit_changed_files,
)
from utils import log_info, log_error, log_success, log_warning
@click.group(name="extract")
def extract_group():
"""Extract patches from git commits"""
pass
@extract_group.command(name="commit")
@click.argument("commit")
@click.option("--verbose", "-v", is_flag=True, help="Show detailed output")
@click.option("--force", "-f", is_flag=True, help="Overwrite existing patches")
@click.option("--include-binary", is_flag=True, help="Include binary files")
@click.option("--base", help="Extract full diff from base commit for files in COMMIT")
@click.pass_context
def extract_commit(ctx, commit, verbose, force, include_binary, base):
"""Extract patches from a single commit
\b
Examples:
dev extract commit HEAD
dev extract commit abc123
dev extract commit HEAD~1 --verbose
dev extract commit HEAD --base chromium/main
With --base, extracts files changed in COMMIT but shows
the full diff from base..COMMIT for those files.
"""
# Get chromium source from parent context
chromium_src = ctx.parent.obj.get("chromium_src")
# Create build context
from dev import create_build_context
build_ctx = create_build_context(chromium_src)
if not build_ctx:
return
# Validate it's a git repository
if not validate_git_repository(build_ctx.chromium_src):
log_error(f"Not a git repository: {build_ctx.chromium_src}")
ctx.exit(1)
if base:
log_info(f"Extracting patches from commit: {commit} (base: {base})")
# Validate base commit exists
if not validate_commit_exists(base, build_ctx.chromium_src):
log_error(f"Base commit not found: {base}")
ctx.exit(1)
else:
log_info(f"Extracting patches from commit: {commit}")
try:
extracted = extract_single_commit(
build_ctx, commit, verbose, force, include_binary, base
)
if extracted > 0:
log_success(f"Successfully extracted {extracted} patches from {commit}")
else:
log_warning(f"No patches extracted from {commit}")
except GitError as e:
log_error(f"Git error: {e}")
ctx.exit(1)
except Exception as e:
log_error(f"Unexpected error: {e}")
if verbose:
import traceback
traceback.print_exc()
ctx.exit(1)
@extract_group.command(name="range")
@click.argument("base_commit")
@click.argument("head_commit")
@click.option("--verbose", "-v", is_flag=True, help="Show detailed output")
@click.option("--force", "-f", is_flag=True, help="Overwrite existing patches")
@click.option("--include-binary", is_flag=True, help="Include binary files")
@click.option("--squash", is_flag=True, help="Squash all commits into single patches")
@click.option(
"--base",
help="Use different base for diff (gets full diff from base for files in range)",
)
@click.pass_context
def extract_range(
ctx, base_commit, head_commit, verbose, force, include_binary, squash, base
):
"""Extract patches from a range of commits
\b
Examples:
dev extract range main HEAD
dev extract range HEAD~5 HEAD
dev extract range chromium-base HEAD --squash
dev extract range HEAD~5 HEAD --base upstream/main
"""
# Get chromium source from parent context
chromium_src = ctx.parent.obj.get("chromium_src")
# Create build context
from dev import create_build_context
build_ctx = create_build_context(chromium_src)
if not build_ctx:
return
# Validate it's a git repository
if not validate_git_repository(build_ctx.chromium_src):
log_error(f"Not a git repository: {build_ctx.chromium_src}")
ctx.exit(1)
if base:
log_info(
f"Extracting patches from range: {base_commit}..{head_commit} (with base: {base})"
)
else:
log_info(f"Extracting patches from range: {base_commit}..{head_commit}")
try:
if squash:
# Extract as single cumulative diff
extracted = extract_commit_range(
build_ctx,
base_commit,
head_commit,
verbose,
force,
include_binary,
base,
)
else:
# Extract each commit separately
extracted = extract_commits_individually(
build_ctx,
base_commit,
head_commit,
verbose,
force,
include_binary,
base,
)
if extracted > 0:
log_success(f"Successfully extracted {extracted} patches from range")
else:
log_warning(f"No patches extracted from range")
except GitError as e:
log_error(f"Git error: {e}")
ctx.exit(1)
except Exception as e:
log_error(f"Unexpected error: {e}")
if verbose:
import traceback
traceback.print_exc()
ctx.exit(1)
def extract_single_commit(
ctx: BuildContext,
commit_hash: str,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
base: Optional[str] = None,
) -> int:
"""Extract patches from a single commit
Args:
ctx: Build context
commit_hash: Commit to extract
verbose: Show detailed output
force: Overwrite existing patches
include_binary: Include binary files
base: If provided, extract full diff from base for files in commit
Returns:
Number of patches successfully extracted
"""
# Step 1: Validate commit
if not validate_commit_exists(commit_hash, ctx.chromium_src):
raise GitError(f"Commit not found: {commit_hash}")
# Get commit info for logging
commit_info = get_commit_info(commit_hash, ctx.chromium_src)
if commit_info and verbose:
log_info(
f" Author: {commit_info['author_name']} <{commit_info['author_email']}>"
)
log_info(f" Subject: {commit_info['subject']}")
if base:
# With --base: Get files from commit, but diff from base
return extract_with_base(ctx, commit_hash, base, verbose, force, include_binary)
else:
# Normal behavior: diff against parent
return extract_normal(ctx, commit_hash, verbose, force, include_binary)
def extract_normal(
ctx: BuildContext,
commit_hash: str,
verbose: bool,
force: bool,
include_binary: bool,
) -> int:
"""Extract patches normally (diff against parent)"""
# Get diff against parent
diff_cmd = ["git", "diff", f"{commit_hash}^..{commit_hash}"]
if include_binary:
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src)
if result.returncode != 0:
raise GitError(f"Failed to get diff for commit {commit_hash}: {result.stderr}")
# Parse diff into file patches
file_patches = parse_diff_output(result.stdout)
if not file_patches:
log_warning("No changes found in commit")
return 0
# Check for existing patches
if not force and not check_overwrite(ctx, file_patches, verbose):
return 0
# Write patches
return write_patches(ctx, file_patches, verbose, include_binary)
def extract_with_base(
ctx: BuildContext,
commit_hash: str,
base: str,
verbose: bool,
force: bool,
include_binary: bool,
) -> int:
"""Extract patches with custom base (full diff from base for files in commit)"""
# Step 1: Get list of files changed in the commit
changed_files = get_commit_changed_files(commit_hash, ctx.chromium_src)
if not changed_files:
log_warning(f"No files changed in commit {commit_hash}")
return 0
if verbose:
log_info(f"Files changed in {commit_hash}: {len(changed_files)}")
# Step 2: For each file, get diff from base to commit
file_patches = {}
for file_path in changed_files:
if verbose:
log_info(f" Getting diff for: {file_path}")
# Get diff for this specific file from base to commit
diff_cmd = ["git", "diff", f"{base}..{commit_hash}", "--", file_path]
if include_binary:
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src)
if result.returncode != 0:
log_warning(f"Failed to get diff for {file_path}")
continue
if result.stdout.strip():
# Parse this single file's diff
patches = parse_diff_output(result.stdout)
# Should only have one file in the result
if patches:
file_patches.update(patches)
else:
# File might have been added/deleted
# Check if file exists in base and commit
base_exists = (
run_git_command(
["git", "cat-file", "-e", f"{base}:{file_path}"],
cwd=ctx.chromium_src,
).returncode
== 0
)
commit_exists = (
run_git_command(
["git", "cat-file", "-e", f"{commit_hash}:{file_path}"],
cwd=ctx.chromium_src,
).returncode
== 0
)
if not base_exists and commit_exists:
# File was added - get full content
diff_cmd = ["git", "diff", f"{base}..{commit_hash}", "--", file_path]
if include_binary:
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src)
if result.stdout.strip():
patches = parse_diff_output(result.stdout)
if patches:
file_patches.update(patches)
elif base_exists and not commit_exists:
# File was deleted
file_patches[file_path] = FilePatch(
file_path=file_path,
operation=FileOperation.DELETE,
patch_content=None,
is_binary=False,
)
if not file_patches:
log_warning("No patches to extract")
return 0
log_info(f"Extracting {len(file_patches)} patches with base {base}")
# Check for existing patches
if not force and not check_overwrite(ctx, file_patches, verbose):
return 0
# Write patches
return write_patches(ctx, file_patches, verbose, include_binary)
def check_overwrite(ctx: BuildContext, file_patches: Dict, verbose: bool) -> bool:
"""Check for existing patches and prompt for overwrite"""
existing_patches = []
for file_path in file_patches.keys():
patch_path = ctx.get_patch_path_for_file(file_path)
if patch_path.exists():
existing_patches.append(file_path)
if existing_patches:
log_warning(f"Found {len(existing_patches)} existing patches")
if verbose:
for path in existing_patches[:5]:
log_warning(f" - {path}")
if len(existing_patches) > 5:
log_warning(f" ... and {len(existing_patches) - 5} more")
if not click.confirm("Overwrite existing patches?", default=False):
log_info("Extraction cancelled")
return False
return True
def write_patches(
ctx: BuildContext,
file_patches: Dict[str, FilePatch],
verbose: bool,
include_binary: bool,
) -> int:
"""Write patches to disk"""
success_count = 0
fail_count = 0
skip_count = 0
for file_path, patch in file_patches.items():
if verbose:
op_str = patch.operation.value.capitalize()
log_info(f"Processing ({op_str}): {file_path}")
# Handle different operations
if patch.operation == FileOperation.DELETE:
# Create deletion marker
if create_deletion_marker(ctx, file_path):
success_count += 1
else:
fail_count += 1
elif patch.is_binary:
if include_binary:
# Create binary marker
if create_binary_marker(ctx, file_path, patch.operation):
success_count += 1
else:
fail_count += 1
else:
log_warning(f" Skipping binary file: {file_path}")
skip_count += 1
elif patch.operation == FileOperation.RENAME:
# Write patch with rename info
if patch.patch_content:
# If there are changes beyond the rename
if write_patch_file(ctx, file_path, patch.patch_content):
success_count += 1
else:
fail_count += 1
else:
# Pure rename - create marker
marker_path = ctx.get_dev_patches_dir() / file_path
marker_path = marker_path.with_suffix(marker_path.suffix + ".rename")
marker_path.parent.mkdir(parents=True, exist_ok=True)
try:
marker_content = f"Renamed from: {patch.old_path}\nSimilarity: {patch.similarity}%\n"
marker_path.write_text(marker_content)
log_info(f" Rename marked: {file_path}")
success_count += 1
except Exception as e:
log_error(f" Failed to mark rename: {e}")
fail_count += 1
else:
# Normal patch (ADD, MODIFY, COPY)
if patch.patch_content:
if write_patch_file(ctx, file_path, patch.patch_content):
success_count += 1
else:
fail_count += 1
else:
log_warning(f" No patch content for: {file_path}")
skip_count += 1
# Log summary
log_extraction_summary(file_patches)
if fail_count > 0:
log_warning(f"Failed to extract {fail_count} patches")
if skip_count > 0:
log_info(f"Skipped {skip_count} files")
return success_count
def extract_commit_range(
ctx: BuildContext,
base_commit: str,
head_commit: str,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
custom_base: Optional[str] = None,
) -> int:
"""Extract patches from a commit range as a single cumulative diff
Returns:
Number of patches successfully extracted
"""
# Step 1: Validate commits
if not validate_commit_exists(base_commit, ctx.chromium_src):
raise GitError(f"Base commit not found: {base_commit}")
if not validate_commit_exists(head_commit, ctx.chromium_src):
raise GitError(f"Head commit not found: {head_commit}")
if custom_base and not validate_commit_exists(custom_base, ctx.chromium_src):
raise GitError(f"Custom base commit not found: {custom_base}")
# Count commits in range for progress
result = run_git_command(
["git", "rev-list", "--count", f"{base_commit}..{head_commit}"],
cwd=ctx.chromium_src,
)
commit_count = int(result.stdout.strip()) if result.returncode == 0 else 0
if commit_count == 0:
log_warning(f"No commits between {base_commit} and {head_commit}")
return 0
log_info(f"Processing {commit_count} commits")
# Step 2: Get diff based on whether we have a custom base
if custom_base:
# First get list of files changed in the range
range_files_cmd = [
"git",
"diff",
"--name-only",
f"{base_commit}..{head_commit}",
]
result = run_git_command(range_files_cmd, cwd=ctx.chromium_src)
if result.returncode != 0:
raise GitError(f"Failed to get changed files: {result.stderr}")
changed_files = (
result.stdout.strip().split("\n") if result.stdout.strip() else []
)
if not changed_files:
log_warning("No files changed in range")
return 0
log_info(f"Found {len(changed_files)} files changed in range")
# Now get diff from custom base to head for these files
diff_cmd = ["git", "diff", f"{custom_base}..{head_commit}"]
if include_binary:
diff_cmd.append("--binary")
# Add the specific files to diff command
diff_cmd.append("--")
diff_cmd.extend(changed_files)
else:
# Regular diff from base_commit to head_commit
diff_cmd = ["git", "diff", f"{base_commit}..{head_commit}"]
if include_binary:
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src, timeout=120)
if result.returncode != 0:
raise GitError(f"Failed to get diff for range: {result.stderr}")
# Step 3-5: Process diff
file_patches = parse_diff_output(result.stdout)
if not file_patches:
log_warning("No changes found in commit range")
return 0
# Check for existing patches
if not force and not check_overwrite(ctx, file_patches, verbose):
return 0
success_count = 0
fail_count = 0
skip_count = 0
# Process with progress indicator
with click.progressbar(
file_patches.items(),
label="Extracting patches",
show_pos=True,
show_percent=True,
) as patches_bar:
for file_path, patch in patches_bar:
# Handle different operations
if patch.operation == FileOperation.DELETE:
if create_deletion_marker(ctx, file_path):
success_count += 1
else:
fail_count += 1
elif patch.is_binary:
if include_binary:
if create_binary_marker(ctx, file_path, patch.operation):
success_count += 1
else:
fail_count += 1
else:
skip_count += 1
elif patch.patch_content:
if write_patch_file(ctx, file_path, patch.patch_content):
success_count += 1
else:
fail_count += 1
else:
skip_count += 1
# Step 6: Log summary
log_extraction_summary(file_patches)
if fail_count > 0:
log_warning(f"Failed to extract {fail_count} patches")
if skip_count > 0:
log_info(f"Skipped {skip_count} files")
return success_count
def extract_commits_individually(
ctx: BuildContext,
base_commit: str,
head_commit: str,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
custom_base: Optional[str] = None,
) -> int:
"""Extract patches from each commit in a range individually
This preserves commit boundaries and can help with conflict resolution.
Returns:
Total number of patches successfully extracted
"""
# Validate custom base if provided
if custom_base and not validate_commit_exists(custom_base, ctx.chromium_src):
raise GitError(f"Custom base commit not found: {custom_base}")
# Get list of commits in range
result = run_git_command(
["git", "rev-list", "--reverse", f"{base_commit}..{head_commit}"],
cwd=ctx.chromium_src,
)
if result.returncode != 0:
raise GitError(f"Failed to list commits: {result.stderr}")
commits = [c.strip() for c in result.stdout.strip().split("\n") if c.strip()]
if not commits:
log_warning(f"No commits between {base_commit} and {head_commit}")
return 0
log_info(f"Extracting patches from {len(commits)} commits individually")
if custom_base:
log_info(f"Using custom base: {custom_base}")
total_extracted = 0
failed_commits = []
with click.progressbar(
commits, label="Processing commits", show_pos=True, show_percent=True
) as commits_bar:
for commit in commits_bar:
try:
if custom_base:
# Use extract_with_base for full diff from custom base
extracted = extract_with_base(
ctx,
commit,
custom_base,
verbose=False,
force=force,
include_binary=include_binary,
)
else:
# Normal extraction from parent
extracted = extract_single_commit(
ctx,
commit,
verbose=False,
force=force,
include_binary=include_binary,
)
total_extracted += extracted
except GitError as e:
failed_commits.append((commit, str(e)))
if verbose:
log_error(f"Failed to extract {commit}: {e}")
if failed_commits:
log_warning(f"Failed to extract {len(failed_commits)} commits:")
for commit, error in failed_commits[:5]:
log_warning(f" - {commit[:8]}: {error}")
if len(failed_commits) > 5:
log_warning(f" ... and {len(failed_commits) - 5} more")
return total_extracted

View File

@@ -1,248 +0,0 @@
"""
Feature module - Manage feature-to-file mappings
Simple feature management with YAML persistence.
"""
import click
import yaml
from pathlib import Path
from typing import Dict, List
from context import BuildContext
from modules.dev_cli.utils import get_commit_changed_files, run_git_command
from utils import log_info, log_error, log_success, log_warning
@click.group(name="feature")
def feature_group():
"""Manage feature-to-file mappings"""
pass
@feature_group.command(name="add")
@click.argument("feature_name")
@click.argument("commit")
@click.option("--description", "-d", help="Description of the feature")
@click.pass_context
def add_feature(ctx, feature_name, commit, description):
"""Add files from a commit to a feature
\b
Examples:
dev feature add llm-chat HEAD
dev feature add my-feature abc123 -d "My new feature"
"""
chromium_src = ctx.parent.obj.get("chromium_src")
from dev import create_build_context
build_ctx = create_build_context(chromium_src)
if not build_ctx:
return
# Get changed files from commit
changed_files = get_commit_changed_files(commit, build_ctx.chromium_src)
if not changed_files:
log_error(f"No files changed in commit {commit}")
ctx.exit(1)
# Load or create features.yaml
features_path = build_ctx.get_features_yaml_path()
if features_path.exists():
with open(features_path) as f:
data = yaml.safe_load(f) or {}
else:
data = {"version": "1.0", "features": {}}
features = data.get("features", {})
# Add or update feature
if feature_name in features:
existing_files = set(features[feature_name].get("files", []))
all_files = list(existing_files | set(changed_files))
features[feature_name]["files"] = sorted(all_files)
log_info(f"Updated feature '{feature_name}' ({len(all_files)} files total)")
else:
features[feature_name] = {
"description": description or f"Feature from commit {commit[:8]}",
"files": sorted(changed_files),
}
log_info(f"Created feature '{feature_name}' with {len(changed_files)} files")
# Save back
data["features"] = features
with open(features_path, "w") as f:
yaml.dump(data, f, default_flow_style=False, sort_keys=False)
log_success(f"Feature '{feature_name}' saved")
@feature_group.command(name="list")
@click.pass_context
def list_features(ctx):
"""List all features"""
# Use current directory's features.yaml
features_path = Path.cwd() / "features.yaml"
if not features_path.exists():
log_warning("No features defined (features.yaml not found)")
return
with open(features_path) as f:
data = yaml.safe_load(f) or {}
features = data.get("features", {})
if not features:
log_warning("No features defined")
return
log_info("Features:")
for name, info in features.items():
file_count = len(info.get("files", []))
description = info.get("description", "No description")
log_info(f" {name} ({file_count} files) - {description}")
@feature_group.command(name="show")
@click.argument("feature_name")
@click.pass_context
def show_feature(ctx, feature_name):
"""Show details of a specific feature"""
features_path = Path.cwd() / "features.yaml"
if not features_path.exists():
log_error("No features.yaml found")
ctx.exit(1)
with open(features_path) as f:
data = yaml.safe_load(f)
features = data.get("features", {})
if feature_name not in features:
log_error(f"Feature '{feature_name}' not found")
ctx.exit(1)
info = features[feature_name]
files = info.get("files", [])
log_info(f"Feature: {feature_name}")
log_info(f"Description: {info.get('description', 'No description')}")
log_info(f"Files ({len(files)}):")
for file_path in files:
log_info(f" - {file_path}")
@feature_group.command(name="generate-patch")
@click.argument("feature_name")
@click.option("--output", "-o", type=click.Path(), help="Output file path")
@click.pass_context
def generate_patch(ctx, feature_name, output):
"""Generate combined patch for a feature
\b
Examples:
dev feature generate-patch llm-chat
dev feature generate-patch my-feature -o combined.patch
"""
# Load feature
features_path = Path.cwd() / "features.yaml"
if not features_path.exists():
log_error("No features.yaml found")
ctx.exit(1)
with open(features_path) as f:
data = yaml.safe_load(f)
features = data.get("features", {})
if feature_name not in features:
log_error(f"Feature '{feature_name}' not found")
ctx.exit(1)
file_list = features[feature_name].get("files", [])
if not file_list:
log_error(f"Feature '{feature_name}' has no files")
ctx.exit(1)
# Find patches directory
patches_dir = Path.cwd() / "chromium_src"
if not patches_dir.exists():
log_error(f"Patches directory not found: {patches_dir}")
ctx.exit(1)
# Collect all patches
combined_patches = []
missing = []
for file_path in file_list:
patch_path = patches_dir / f"{file_path}.patch"
if patch_path.exists():
with open(patch_path) as f:
combined_patches.append(f.read())
else:
missing.append(file_path)
if missing:
log_warning(f"Missing patches for {len(missing)} files:")
for m in missing[:5]:
log_warning(f" - {m}")
if len(missing) > 5:
log_warning(f" ... and {len(missing) - 5} more")
if not combined_patches:
log_error("No patches found to combine")
ctx.exit(1)
# Create combined patch with headers
header = f"# Combined patch for feature: {feature_name}\n"
header += f"# Files: {len(file_list)}\n"
header += f"# Description: {features[feature_name].get('description', 'No description')}\n\n"
combined = header + "\n".join(combined_patches)
# Write output
if output:
output_path = Path(output)
output_path.write_text(combined)
log_success(f"Generated patch: {output_path}")
else:
# Output to stdout
click.echo(combined)
@feature_group.command(name="remove")
@click.argument("feature_name")
@click.pass_context
def remove_feature(ctx, feature_name):
"""Remove a feature"""
features_path = Path.cwd() / "features.yaml"
if not features_path.exists():
log_error("No features.yaml found")
ctx.exit(1)
with open(features_path) as f:
data = yaml.safe_load(f)
features = data.get("features", {})
if feature_name not in features:
log_error(f"Feature '{feature_name}' not found")
ctx.exit(1)
# Remove and save
del features[feature_name]
data["features"] = features
with open(features_path, "w") as f:
yaml.dump(data, f, default_flow_style=False, sort_keys=False)
log_success(f"Removed feature '{feature_name}'")

View File

@@ -1,306 +0,0 @@
#!/usr/bin/env python3
"""
Test script for diff parser functionality
This script tests various edge cases for the diff parser to ensure
it handles all types of git diff outputs correctly.
"""
import sys
from pathlib import Path
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from modules.dev_cli.utils import parse_diff_output, FilePatch, FileOperation
def test_regular_modify():
"""Test regular file modification"""
diff = """diff --git a/file.txt b/file.txt
index abc123..def456 100644
--- a/file.txt
+++ b/file.txt
@@ -1,3 +1,3 @@
line1
-old line2
+new line2
line3"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "file.txt" in result
patch = result["file.txt"]
assert patch.operation == FileOperation.MODIFY
assert not patch.is_binary
assert patch.patch_content is not None
print("✓ Regular modify test passed")
def test_new_file():
"""Test new file addition"""
diff = """diff --git a/newfile.txt b/newfile.txt
new file mode 100644
index 0000000..abc123
--- /dev/null
+++ b/newfile.txt
@@ -0,0 +1,3 @@
+line1
+line2
+line3"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "newfile.txt" in result
patch = result["newfile.txt"]
assert patch.operation == FileOperation.ADD
assert patch.patch_content is not None
print("✓ New file test passed")
def test_deleted_file():
"""Test file deletion"""
diff = """diff --git a/deleted.txt b/deleted.txt
deleted file mode 100644
index abc123..0000000
--- a/deleted.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-line1
-line2
-line3"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "deleted.txt" in result
patch = result["deleted.txt"]
assert patch.operation == FileOperation.DELETE
print("✓ Deleted file test passed")
def test_renamed_file():
"""Test file rename"""
diff = """diff --git a/old_name.txt b/new_name.txt
similarity index 100%
rename from old_name.txt
rename to new_name.txt"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "new_name.txt" in result
patch = result["new_name.txt"]
assert patch.operation == FileOperation.RENAME
assert patch.old_path == "old_name.txt"
assert patch.similarity == 100
print("✓ Renamed file test passed")
def test_renamed_with_changes():
"""Test file rename with content changes"""
diff = """diff --git a/old_name.txt b/new_name.txt
similarity index 85%
rename from old_name.txt
rename to new_name.txt
index abc123..def456 100644
--- a/old_name.txt
+++ b/new_name.txt
@@ -1,3 +1,4 @@
line1
line2
-line3
+modified line3
+new line4"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "new_name.txt" in result
patch = result["new_name.txt"]
assert patch.operation == FileOperation.RENAME
assert patch.old_path == "old_name.txt"
assert patch.similarity == 85
assert patch.patch_content is not None
print("✓ Renamed with changes test passed")
def test_binary_file():
"""Test binary file handling"""
diff = """diff --git a/image.png b/image.png
index abc123..def456 100644
Binary files a/image.png and b/image.png differ"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "image.png" in result
patch = result["image.png"]
assert patch.is_binary
assert patch.patch_content is None # Binary content not stored
print("✓ Binary file test passed")
def test_multiple_files():
"""Test multiple files in one diff"""
diff = """diff --git a/file1.txt b/file1.txt
index abc123..def456 100644
--- a/file1.txt
+++ b/file1.txt
@@ -1 +1 @@
-old content
+new content
diff --git a/file2.txt b/file2.txt
new file mode 100644
index 0000000..xyz789
--- /dev/null
+++ b/file2.txt
@@ -0,0 +1 @@
+new file content
diff --git a/file3.txt b/file3.txt
deleted file mode 100644
index 111111..000000
--- a/file3.txt
+++ /dev/null
@@ -1 +0,0 @@
-deleted content"""
result = parse_diff_output(diff)
assert len(result) == 3
assert "file1.txt" in result
assert "file2.txt" in result
assert "file3.txt" in result
assert result["file1.txt"].operation == FileOperation.MODIFY
assert result["file2.txt"].operation == FileOperation.ADD
assert result["file3.txt"].operation == FileOperation.DELETE
print("✓ Multiple files test passed")
def test_no_newline_marker():
"""Test handling of 'No newline at end of file' marker"""
diff = """diff --git a/file.txt b/file.txt
index abc123..def456 100644
--- a/file.txt
+++ b/file.txt
@@ -1 +1 @@
-old content
\\ No newline at end of file
+new content
\\ No newline at end of file"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "file.txt" in result
patch = result["file.txt"]
assert patch.operation == FileOperation.MODIFY
assert "\\ No newline at end of file" in patch.patch_content
print("✓ No newline marker test passed")
def test_complex_path():
"""Test handling of complex file paths"""
diff = """diff --git a/src/chrome/browser/ui/views/file.cc b/src/chrome/browser/ui/views/file.cc
index abc123..def456 100644
--- a/src/chrome/browser/ui/views/file.cc
+++ b/src/chrome/browser/ui/views/file.cc
@@ -100,7 +100,7 @@ void Function() {
int x = 1;
- int y = 2;
+ int y = 3;
return x + y;
}"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "src/chrome/browser/ui/views/file.cc" in result
patch = result["src/chrome/browser/ui/views/file.cc"]
assert patch.operation == FileOperation.MODIFY
print("✓ Complex path test passed")
def test_empty_diff():
"""Test empty diff handling"""
diff = ""
result = parse_diff_output(diff)
assert len(result) == 0
print("✓ Empty diff test passed")
def test_mode_change():
"""Test file mode change"""
diff = """diff --git a/script.sh b/script.sh
old mode 100644
new mode 100755
index abc123..abc123
--- a/script.sh
+++ b/script.sh
@@ -1 +1 @@
#!/bin/bash"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "script.sh" in result
patch = result["script.sh"]
# Mode changes are captured in the patch content
assert "old mode 100644" in patch.patch_content
assert "new mode 100755" in patch.patch_content
print("✓ Mode change test passed")
def test_copied_file():
"""Test file copy"""
diff = """diff --git a/original.txt b/copy.txt
similarity index 100%
copy from original.txt
copy to copy.txt"""
result = parse_diff_output(diff)
assert len(result) == 1
assert "copy.txt" in result
patch = result["copy.txt"]
assert patch.operation == FileOperation.COPY
assert patch.old_path == "original.txt"
assert patch.similarity == 100
print("✓ Copied file test passed")
def run_all_tests():
"""Run all test cases"""
tests = [
test_regular_modify,
test_new_file,
test_deleted_file,
test_renamed_file,
test_renamed_with_changes,
test_binary_file,
test_multiple_files,
test_no_newline_marker,
test_complex_path,
test_empty_diff,
test_mode_change,
test_copied_file,
]
print("Running diff parser tests...")
print("=" * 60)
failed_tests = []
for test in tests:
try:
test()
except Exception as e:
test_name = test.__name__
print(f"{test_name} failed: {e}")
failed_tests.append((test_name, str(e)))
print("=" * 60)
if failed_tests:
print(f"\n{len(failed_tests)} tests failed:")
for name, error in failed_tests:
print(f" - {name}: {error}")
return False
else:
print(f"\nAll {len(tests)} tests passed!")
return True
if __name__ == "__main__":
success = run_all_tests()
sys.exit(0 if success else 1)

View File

@@ -0,0 +1,25 @@
"""
Extract module - Extract patches from git commits.
Provides commands for extracting patches:
- extract_commit: Extract patches from a single commit
- extract_range: Extract patches from a range of commits
- extract_patch: Extract patch for a single file
"""
from .extract_commit import extract_single_commit, ExtractCommitModule
from .extract_range import (
extract_commit_range,
extract_commits_individually,
ExtractRangeModule,
)
from .extract_patch import extract_single_file_patch
__all__ = [
"extract_single_commit",
"ExtractCommitModule",
"extract_commit_range",
"extract_commits_individually",
"ExtractRangeModule",
"extract_single_file_patch",
]

View File

@@ -0,0 +1,255 @@
"""
Common functions shared across extract module commands.
Contains core extraction logic used by extract_commit and extract_range.
"""
import click
from pathlib import Path
from typing import Dict, Optional
from ...common.context import Context
from ...common.utils import log_info, log_error, log_warning
from .utils import (
FilePatch,
FileOperation,
run_git_command,
parse_diff_output,
write_patch_file,
create_deletion_marker,
create_binary_marker,
log_extraction_summary,
get_commit_changed_files,
)
def check_overwrite(ctx: Context, file_patches: Dict, verbose: bool) -> bool:
"""Check for existing patches and prompt for overwrite"""
existing_patches = []
for file_path in file_patches.keys():
patch_path = ctx.get_patch_path_for_file(file_path)
if patch_path.exists():
existing_patches.append(file_path)
if existing_patches:
log_warning(f"Found {len(existing_patches)} existing patches")
if verbose:
for path in existing_patches[:5]:
log_warning(f" - {path}")
if len(existing_patches) > 5:
log_warning(f" ... and {len(existing_patches) - 5} more")
if not click.confirm("Overwrite existing patches?", default=False):
log_info("Extraction cancelled")
return False
return True
def write_patches(
ctx: Context,
file_patches: Dict[str, FilePatch],
verbose: bool,
include_binary: bool,
) -> int:
"""Write patches to disk"""
success_count = 0
fail_count = 0
skip_count = 0
for file_path, patch in file_patches.items():
if verbose:
op_str = patch.operation.value.capitalize()
log_info(f"Processing ({op_str}): {file_path}")
# Handle different operations
if patch.operation == FileOperation.DELETE:
# Create deletion marker
if create_deletion_marker(ctx, file_path):
success_count += 1
else:
fail_count += 1
elif patch.is_binary:
if include_binary:
# Create binary marker
if create_binary_marker(ctx, file_path, patch.operation):
success_count += 1
else:
fail_count += 1
else:
log_warning(f" Skipping binary file: {file_path}")
skip_count += 1
elif patch.operation == FileOperation.RENAME:
# Write patch with rename info
if patch.patch_content:
# If there are changes beyond the rename
if write_patch_file(ctx, file_path, patch.patch_content):
success_count += 1
else:
fail_count += 1
else:
# Pure rename - create marker
marker_path = ctx.get_patches_dir() / file_path
marker_path = marker_path.with_suffix(marker_path.suffix + ".rename")
marker_path.parent.mkdir(parents=True, exist_ok=True)
try:
marker_content = f"Renamed from: {patch.old_path}\nSimilarity: {patch.similarity}%\n"
marker_path.write_text(marker_content)
log_info(f" Rename marked: {file_path}")
success_count += 1
except Exception as e:
log_error(f" Failed to mark rename: {e}")
fail_count += 1
else:
# Normal patch (ADD, MODIFY, COPY)
if patch.patch_content:
if write_patch_file(ctx, file_path, patch.patch_content):
success_count += 1
else:
fail_count += 1
else:
log_warning(f" No patch content for: {file_path}")
skip_count += 1
# Log summary
log_extraction_summary(file_patches)
if fail_count > 0:
log_warning(f"Failed to extract {fail_count} patches")
if skip_count > 0:
log_info(f"Skipped {skip_count} files")
return success_count
def extract_normal(
ctx: Context,
commit_hash: str,
verbose: bool,
force: bool,
include_binary: bool,
) -> int:
"""Extract patches normally (diff against parent)"""
from .utils import GitError
# Get diff against parent
diff_cmd = ["git", "diff", f"{commit_hash}^..{commit_hash}"]
if include_binary:
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src)
if result.returncode != 0:
raise GitError(f"Failed to get diff for commit {commit_hash}: {result.stderr}")
# Parse diff into file patches
file_patches = parse_diff_output(result.stdout)
if not file_patches:
log_warning("No changes found in commit")
return 0
# Check for existing patches
if not force and not check_overwrite(ctx, file_patches, verbose):
return 0
# Write patches
return write_patches(ctx, file_patches, verbose, include_binary)
def extract_with_base(
ctx: Context,
commit_hash: str,
base: str,
verbose: bool,
force: bool,
include_binary: bool,
) -> int:
"""Extract patches with custom base (full diff from base for files in commit)"""
# Step 1: Get list of files changed in the commit
changed_files = get_commit_changed_files(commit_hash, ctx.chromium_src)
if not changed_files:
log_warning(f"No files changed in commit {commit_hash}")
return 0
if verbose:
log_info(f"Files changed in {commit_hash}: {len(changed_files)}")
# Step 2: For each file, get diff from base to commit
file_patches = {}
for file_path in changed_files:
if verbose:
log_info(f" Getting diff for: {file_path}")
# Get diff for this specific file from base to commit
diff_cmd = ["git", "diff", f"{base}..{commit_hash}", "--", file_path]
if include_binary:
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src)
if result.returncode != 0:
log_warning(f"Failed to get diff for {file_path}")
continue
if result.stdout.strip():
# Parse this single file's diff
patches = parse_diff_output(result.stdout)
# Should only have one file in the result
if patches:
file_patches.update(patches)
else:
# File might have been added/deleted
# Check if file exists in base and commit
base_exists = (
run_git_command(
["git", "cat-file", "-e", f"{base}:{file_path}"],
cwd=ctx.chromium_src,
).returncode
== 0
)
commit_exists = (
run_git_command(
["git", "cat-file", "-e", f"{commit_hash}:{file_path}"],
cwd=ctx.chromium_src,
).returncode
== 0
)
if not base_exists and commit_exists:
# File was added - get full content
diff_cmd = ["git", "diff", f"{base}..{commit_hash}", "--", file_path]
if include_binary:
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src)
if result.stdout.strip():
patches = parse_diff_output(result.stdout)
if patches:
file_patches.update(patches)
elif base_exists and not commit_exists:
# File was deleted
file_patches[file_path] = FilePatch(
file_path=file_path,
operation=FileOperation.DELETE,
patch_content=None,
is_binary=False,
)
if not file_patches:
log_warning("No patches to extract")
return 0
log_info(f"Extracting {len(file_patches)} patches with base {base}")
# Check for existing patches
if not force and not check_overwrite(ctx, file_patches, verbose):
return 0
# Write patches
return write_patches(ctx, file_patches, verbose, include_binary)

View File

@@ -0,0 +1,111 @@
"""
Extract Commit - Extract patches from a single git commit.
"""
from pathlib import Path
from typing import Optional
from ...common.context import Context
from ...common.module import CommandModule, ValidationError
from ...common.utils import log_info, log_success, log_warning
from .utils import (
GitError,
validate_git_repository,
validate_commit_exists,
get_commit_info,
)
from .common import extract_normal, extract_with_base
def extract_single_commit(
ctx: Context,
commit_hash: str,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
base: Optional[str] = None,
) -> int:
"""Extract patches from a single commit
Args:
ctx: Build context
commit_hash: Commit to extract
verbose: Show detailed output
force: Overwrite existing patches
include_binary: Include binary files
base: If provided, extract full diff from base for files in commit
Returns:
Number of patches successfully extracted
"""
# Step 1: Validate commit
if not validate_commit_exists(commit_hash, ctx.chromium_src):
raise GitError(f"Commit not found: {commit_hash}")
# Get commit info for logging
commit_info = get_commit_info(commit_hash, ctx.chromium_src)
if commit_info and verbose:
log_info(
f" Author: {commit_info['author_name']} <{commit_info['author_email']}>"
)
log_info(f" Subject: {commit_info['subject']}")
if base:
# With --base: Get files from commit, but diff from base
return extract_with_base(ctx, commit_hash, base, verbose, force, include_binary)
else:
# Normal behavior: diff against parent
return extract_normal(ctx, commit_hash, verbose, force, include_binary)
class ExtractCommitModule(CommandModule):
"""Extract patches from a single commit"""
produces = []
requires = []
description = "Extract patches from a single commit"
def validate(self, ctx: Context) -> None:
"""Validate git repository"""
import shutil
if not shutil.which("git"):
raise ValidationError("Git is not available in PATH")
if not validate_git_repository(ctx.chromium_src):
raise ValidationError(f"Not a git repository: {ctx.chromium_src}")
def execute(
self,
ctx: Context,
commit: str,
output: Optional[Path] = None,
interactive: bool = True,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
base: Optional[str] = None,
) -> None:
"""Execute extract commit
Args:
commit: Git commit reference (e.g., HEAD)
output: Output directory (unused, kept for compatibility)
interactive: Interactive mode (unused, kept for compatibility)
verbose: Show detailed output
force: Overwrite existing patches
include_binary: Include binary files
base: Extract full diff from base commit for files in COMMIT
"""
try:
count = extract_single_commit(
ctx,
commit_hash=commit,
verbose=verbose,
force=force,
include_binary=include_binary,
base=base,
)
if count == 0:
log_warning(f"No patches extracted from {commit}")
else:
log_success(f"Successfully extracted {count} patches from {commit}")
except GitError as e:
raise RuntimeError(f"Git error: {e}")

View File

@@ -0,0 +1,120 @@
"""
Extract Patch - Extract patch for a single chromium file.
"""
from typing import Tuple, Optional
from ...common.context import Context
from ...common.utils import log_info, log_warning
from .utils import (
run_git_command,
parse_diff_output,
write_patch_file,
create_deletion_marker,
validate_commit_exists,
FileOperation,
GitError,
)
def extract_single_file_patch(
build_ctx: Context,
chromium_path: str,
base: str,
force: bool = False,
) -> Tuple[bool, Optional[str]]:
"""Extract patch for a single chromium file.
Extracts the diff from base commit to current working directory
(including unstaged changes) for the specified file.
Args:
build_ctx: Build context
chromium_path: Path to file in chromium (e.g., chrome/common/foo.h)
base: Base commit to diff against
force: If True, overwrite existing patch without prompting
Returns:
Tuple of (success: bool, error_message: Optional[str])
"""
if not validate_commit_exists(base, build_ctx.chromium_src):
return False, f"Base commit not found: {base}"
log_info(f"Extracting patch for: {chromium_path}")
log_info(f" Base: {base[:12]}")
# Get diff from base to working directory for this file
diff_cmd = ["git", "diff", base, "--", chromium_path]
result = run_git_command(diff_cmd, cwd=build_ctx.chromium_src)
if result.returncode != 0:
return False, f"Failed to get diff: {result.stderr}"
if not result.stdout.strip():
# No diff - check if file exists in base vs working directory
base_exists = (
run_git_command(
["git", "cat-file", "-e", f"{base}:{chromium_path}"],
cwd=build_ctx.chromium_src,
).returncode
== 0
)
working_file = build_ctx.chromium_src / chromium_path
working_exists = working_file.exists()
if not base_exists and not working_exists:
return False, f"File does not exist in base or working directory: {chromium_path}"
if base_exists and working_exists:
return False, f"No changes found for: {chromium_path}"
if not base_exists and working_exists:
# New file - get full content as diff
diff_cmd = ["git", "diff", "--no-index", "/dev/null", chromium_path]
result = run_git_command(diff_cmd, cwd=build_ctx.chromium_src)
# --no-index returns 1 when files differ, which is expected
if not result.stdout.strip():
return False, f"Failed to generate diff for new file: {chromium_path}"
# Parse the diff
file_patches = parse_diff_output(result.stdout)
if not file_patches:
return False, f"Failed to parse diff for: {chromium_path}"
if chromium_path not in file_patches:
# The file might be in the patches under a different key
if len(file_patches) == 1:
patch = list(file_patches.values())[0]
else:
return False, f"Unexpected diff output for: {chromium_path}"
else:
patch = file_patches[chromium_path]
# Check for existing patch
patch_path = build_ctx.get_patch_path_for_file(chromium_path)
if patch_path.exists() and not force:
import click
if not click.confirm(f"Patch already exists: {chromium_path}. Overwrite?", default=False):
log_info("Extraction cancelled")
return False, "Cancelled by user"
# Handle different operations
if patch.operation == FileOperation.DELETE:
if create_deletion_marker(build_ctx, chromium_path):
return True, None
return False, f"Failed to create deletion marker for: {chromium_path}"
if patch.is_binary:
return False, f"Binary files not supported: {chromium_path}"
if not patch.patch_content:
return False, f"No patch content for: {chromium_path}"
# Write the patch
if write_patch_file(build_ctx, chromium_path, patch.patch_content):
return True, None
return False, f"Failed to write patch for: {chromium_path}"

View File

@@ -0,0 +1,311 @@
"""
Extract Range - Extract patches from a range of git commits.
"""
import click
from pathlib import Path
from typing import Optional
from ...common.context import Context
from ...common.module import CommandModule, ValidationError
from ...common.utils import log_info, log_error, log_success, log_warning
from .utils import (
FileOperation,
GitError,
run_git_command,
validate_git_repository,
validate_commit_exists,
parse_diff_output,
write_patch_file,
create_deletion_marker,
create_binary_marker,
log_extraction_summary,
)
from .common import check_overwrite, extract_with_base
from .extract_commit import extract_single_commit
def extract_commit_range(
ctx: Context,
base_commit: str,
head_commit: str,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
custom_base: Optional[str] = None,
) -> int:
"""Extract patches from a commit range as a single cumulative diff
Returns:
Number of patches successfully extracted
"""
# Step 1: Validate commits
if not validate_commit_exists(base_commit, ctx.chromium_src):
raise GitError(f"Base commit not found: {base_commit}")
if not validate_commit_exists(head_commit, ctx.chromium_src):
raise GitError(f"Head commit not found: {head_commit}")
if custom_base and not validate_commit_exists(custom_base, ctx.chromium_src):
raise GitError(f"Custom base commit not found: {custom_base}")
# Count commits in range for progress
result = run_git_command(
["git", "rev-list", "--count", f"{base_commit}..{head_commit}"],
cwd=ctx.chromium_src,
)
commit_count = int(result.stdout.strip()) if result.returncode == 0 else 0
if commit_count == 0:
log_warning(f"No commits between {base_commit} and {head_commit}")
return 0
log_info(f"Processing {commit_count} commits")
# Step 2: Get diff based on whether we have a custom base
if custom_base:
# First get list of files changed in the range
range_files_cmd = [
"git",
"diff",
"--name-only",
f"{base_commit}..{head_commit}",
]
result = run_git_command(range_files_cmd, cwd=ctx.chromium_src)
if result.returncode != 0:
raise GitError(f"Failed to get changed files: {result.stderr}")
changed_files = (
result.stdout.strip().split("\n") if result.stdout.strip() else []
)
if not changed_files:
log_warning("No files changed in range")
return 0
log_info(f"Found {len(changed_files)} files changed in range")
# Now get diff from custom base to head for these files
diff_cmd = ["git", "diff", f"{custom_base}..{head_commit}"]
if include_binary:
diff_cmd.append("--binary")
# Add the specific files to diff command
diff_cmd.append("--")
diff_cmd.extend(changed_files)
else:
# Regular diff from base_commit to head_commit
diff_cmd = ["git", "diff", f"{base_commit}..{head_commit}"]
if include_binary:
diff_cmd.append("--binary")
result = run_git_command(diff_cmd, cwd=ctx.chromium_src, timeout=120)
if result.returncode != 0:
raise GitError(f"Failed to get diff for range: {result.stderr}")
# Step 3-5: Process diff
file_patches = parse_diff_output(result.stdout)
if not file_patches:
log_warning("No changes found in commit range")
return 0
# Check for existing patches
if not force and not check_overwrite(ctx, file_patches, verbose):
return 0
success_count = 0
fail_count = 0
skip_count = 0
# Process with progress indicator
with click.progressbar(
file_patches.items(),
label="Extracting patches",
show_pos=True,
show_percent=True,
) as patches_bar:
for file_path, patch in patches_bar:
# Handle different operations
if patch.operation == FileOperation.DELETE:
if create_deletion_marker(ctx, file_path):
success_count += 1
else:
fail_count += 1
elif patch.is_binary:
if include_binary:
if create_binary_marker(ctx, file_path, patch.operation):
success_count += 1
else:
fail_count += 1
else:
skip_count += 1
elif patch.patch_content:
if write_patch_file(ctx, file_path, patch.patch_content):
success_count += 1
else:
fail_count += 1
else:
skip_count += 1
# Step 6: Log summary
log_extraction_summary(file_patches)
if fail_count > 0:
log_warning(f"Failed to extract {fail_count} patches")
if skip_count > 0:
log_info(f"Skipped {skip_count} files")
return success_count
def extract_commits_individually(
ctx: Context,
base_commit: str,
head_commit: str,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
custom_base: Optional[str] = None,
) -> int:
"""Extract patches from each commit in a range individually
This preserves commit boundaries and can help with conflict resolution.
Returns:
Total number of patches successfully extracted
"""
# Validate custom base if provided
if custom_base and not validate_commit_exists(custom_base, ctx.chromium_src):
raise GitError(f"Custom base commit not found: {custom_base}")
# Get list of commits in range
result = run_git_command(
["git", "rev-list", "--reverse", f"{base_commit}..{head_commit}"],
cwd=ctx.chromium_src,
)
if result.returncode != 0:
raise GitError(f"Failed to list commits: {result.stderr}")
commits = [c.strip() for c in result.stdout.strip().split("\n") if c.strip()]
if not commits:
log_warning(f"No commits between {base_commit} and {head_commit}")
return 0
log_info(f"Extracting patches from {len(commits)} commits individually")
if custom_base:
log_info(f"Using custom base: {custom_base}")
total_extracted = 0
failed_commits = []
with click.progressbar(
commits, label="Processing commits", show_pos=True, show_percent=True
) as commits_bar:
for commit in commits_bar:
try:
if custom_base:
# Use extract_with_base for full diff from custom base
extracted = extract_with_base(
ctx,
commit,
custom_base,
verbose=False,
force=force,
include_binary=include_binary,
)
else:
# Normal extraction from parent
extracted = extract_single_commit(
ctx,
commit,
verbose=False,
force=force,
include_binary=include_binary,
)
total_extracted += extracted
except GitError as e:
failed_commits.append((commit, str(e)))
if verbose:
log_error(f"Failed to extract {commit}: {e}")
if failed_commits:
log_warning(f"Failed to extract {len(failed_commits)} commits:")
for commit, error in failed_commits[:5]:
log_warning(f" - {commit[:8]}: {error}")
if len(failed_commits) > 5:
log_warning(f" ... and {len(failed_commits) - 5} more")
return total_extracted
class ExtractRangeModule(CommandModule):
"""Extract patches from a range of commits"""
produces = []
requires = []
description = "Extract patches from a range of commits"
def validate(self, ctx: Context) -> None:
"""Validate git repository"""
import shutil
if not shutil.which("git"):
raise ValidationError("Git is not available in PATH")
if not validate_git_repository(ctx.chromium_src):
raise ValidationError(f"Not a git repository: {ctx.chromium_src}")
def execute(
self,
ctx: Context,
start: str,
end: str,
output: Optional[Path] = None,
interactive: bool = True,
verbose: bool = False,
force: bool = False,
include_binary: bool = False,
squash: bool = False,
base: Optional[str] = None,
) -> None:
"""Execute extract range
Args:
start: Start commit (exclusive)
end: End commit (inclusive)
output: Output directory (unused, kept for compatibility)
interactive: Interactive mode (unused, kept for compatibility)
verbose: Show detailed output
force: Overwrite existing patches
include_binary: Include binary files
squash: Squash all commits into single patches
base: Use different base for diff (full diff from base for files in range)
"""
try:
if squash:
count = extract_commit_range(
ctx,
base_commit=start,
head_commit=end,
verbose=verbose,
force=force,
include_binary=include_binary,
custom_base=base,
)
else:
count = extract_commits_individually(
ctx,
base_commit=start,
head_commit=end,
verbose=verbose,
force=force,
include_binary=include_binary,
custom_base=base,
)
if count == 0:
log_warning(f"No patches extracted from range {start}..{end}")
else:
log_success(f"Successfully extracted {count} patches from {start}..{end}")
except GitError as e:
raise RuntimeError(f"Git error: {e}")

View File

@@ -6,16 +6,14 @@ and patch management with comprehensive error handling.
"""
import subprocess
import sys
import time
import click
import re
from pathlib import Path
from typing import Optional, List, Dict, Tuple, NamedTuple
from typing import Optional, List, Dict, Tuple
from enum import Enum
from dataclasses import dataclass
from context import BuildContext
from utils import log_info, log_error, log_success, log_warning
from ...common.context import Context
from ...common.utils import log_error, log_success, log_warning
class FileOperation(Enum):
@@ -220,7 +218,7 @@ def parse_diff_output(diff_output: str) -> Dict[str, FilePatch]:
# Parse file paths from diff line
match = re.match(r"diff --git a/(.*) b/(.*)", line)
if match:
old_file = match.group(1)
_old_file = match.group(1)
new_file = match.group(2)
current_file = new_file
current_patch_lines = [line]
@@ -308,7 +306,7 @@ def parse_diff_output(diff_output: str) -> Dict[str, FilePatch]:
return patches
def write_patch_file(ctx: BuildContext, file_path: str, patch_content: str) -> bool:
def write_patch_file(ctx: Context, file_path: str, patch_content: str) -> bool:
"""
Write a patch file to chromium_src directory structure.
@@ -339,7 +337,7 @@ def write_patch_file(ctx: BuildContext, file_path: str, patch_content: str) -> b
return False
def create_deletion_marker(ctx: BuildContext, file_path: str) -> bool:
def create_deletion_marker(ctx: Context, file_path: str) -> bool:
"""
Create a marker file for deleted files.
@@ -350,7 +348,7 @@ def create_deletion_marker(ctx: BuildContext, file_path: str) -> bool:
Returns:
True if successful, False otherwise
"""
marker_path = ctx.get_dev_patches_dir() / file_path
marker_path = ctx.get_patches_dir() / file_path
marker_path = marker_path.with_suffix(marker_path.suffix + ".deleted")
marker_path.parent.mkdir(parents=True, exist_ok=True)
@@ -366,7 +364,7 @@ def create_deletion_marker(ctx: BuildContext, file_path: str) -> bool:
def create_binary_marker(
ctx: BuildContext, file_path: str, operation: FileOperation
ctx: Context, file_path: str, operation: FileOperation
) -> bool:
"""
Create a marker file for binary files.
@@ -379,7 +377,7 @@ def create_binary_marker(
Returns:
True if successful, False otherwise
"""
marker_path = ctx.get_dev_patches_dir() / file_path
marker_path = ctx.get_patches_dir() / file_path
marker_path = marker_path.with_suffix(marker_path.suffix + ".binary")
marker_path.parent.mkdir(parents=True, exist_ok=True)

View File

@@ -0,0 +1,26 @@
"""
Feature module - Manage feature-to-file mappings.
Provides commands for managing features:
- add_feature: Add files from a commit to a feature
- list_features: List all defined features
- show_feature: Show details of a specific feature
"""
from .feature import (
add_feature,
AddFeatureModule,
list_features,
ListFeaturesModule,
show_feature,
ShowFeatureModule,
)
__all__ = [
"add_feature",
"AddFeatureModule",
"list_features",
"ListFeaturesModule",
"show_feature",
"ShowFeatureModule",
]

View File

@@ -0,0 +1,154 @@
"""
Feature module - Manage feature-to-file mappings
Simple feature management with YAML persistence.
"""
import yaml
from typing import Dict, Optional
from ...common.context import Context
from ...common.module import CommandModule, ValidationError
from ..extract.utils import get_commit_changed_files
from ...common.utils import log_info, log_error, log_success, log_warning
def add_feature(ctx: Context, feature_name: str, commit: str, description: Optional[str] = None) -> bool:
"""Add files from a commit to a feature
Examples:
dev feature add my-feature HEAD
dev feature add llm-chat HEAD~3 --description "LLM chat integration"
"""
features_file = ctx.get_features_yaml_path()
# Get changed files from commit
changed_files = get_commit_changed_files(ctx, commit)
if not changed_files:
log_error(f"No changed files found in commit {commit}")
return False
# Load existing features
features: Dict = {"features": {}}
if features_file.exists():
with open(features_file, "r") as f:
content = yaml.safe_load(f)
if content and "features" in content:
features = content
# Add or update feature
features["features"][feature_name] = {
"description": description or f"Feature: {feature_name}",
"files": sorted(changed_files),
"commit": commit,
}
# Save to file
with open(features_file, "w") as f:
yaml.safe_dump(features, f, sort_keys=False, default_flow_style=False)
log_success(f"✓ Added feature '{feature_name}' with {len(changed_files)} files")
return True
def list_features(ctx: Context):
"""List all defined features"""
features_file = ctx.get_features_yaml_path()
if not features_file.exists():
log_warning("No features.yaml found")
return
with open(features_file, "r") as f:
content = yaml.safe_load(f)
if not content or "features" not in content:
log_warning("No features defined")
return
features = content["features"]
log_info(f"Features ({len(features)}):")
log_info("-" * 60)
for name, config in features.items():
file_count = len(config.get("files", []))
description = config.get("description", "")
log_info(f" {name}: {file_count} files - {description}")
def show_feature(ctx: Context, feature_name: str):
"""Show details of a specific feature"""
features_file = ctx.get_features_yaml_path()
if not features_file.exists():
log_error("No features.yaml found")
return
with open(features_file, "r") as f:
content = yaml.safe_load(f)
if not content or "features" not in content:
log_error("No features defined")
return
features = content["features"]
if feature_name not in features:
log_error(f"Feature '{feature_name}' not found")
log_info("Available features:")
for name in features.keys():
log_info(f" - {name}")
return
feature = features[feature_name]
log_info(f"Feature: {feature_name}")
log_info("-" * 60)
log_info(f"Description: {feature.get('description', '')}")
log_info(f"Commit: {feature.get('commit', 'Unknown')}")
log_info(f"Files ({len(feature.get('files', []))}):")
for file_path in feature.get("files", []):
log_info(f" - {file_path}")
# CommandModule wrappers for dev CLI
class ListFeaturesModule(CommandModule):
"""List all defined features"""
produces = []
requires = []
description = "List all defined features"
def validate(self, ctx: Context) -> None:
"""No validation needed - will show warning if no features exist"""
pass
def execute(self, ctx: Context, **kwargs) -> None:
list_features(ctx)
class ShowFeatureModule(CommandModule):
"""Show details of a specific feature"""
produces = []
requires = []
description = "Show details of a specific feature"
def validate(self, ctx: Context) -> None:
"""Validation happens in execute (feature existence check)"""
pass
def execute(self, ctx: Context, feature_name: str, **kwargs) -> None:
show_feature(ctx, feature_name)
class AddFeatureModule(CommandModule):
"""Add files from a commit to a feature"""
produces = []
requires = []
description = "Add files from a commit to a feature"
def validate(self, ctx: Context) -> None:
"""Validate git is available"""
import shutil
if not shutil.which("git"):
raise ValidationError("Git is not available in PATH")
if not ctx.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {ctx.chromium_src}")
def execute(self, ctx: Context, feature_name: str, commit: str, description: Optional[str] = None, **kwargs) -> None:
success = add_feature(ctx, feature_name, commit, description)
if not success:
raise RuntimeError(f"Failed to add feature '{feature_name}'")

View File

@@ -1,92 +0,0 @@
#!/usr/bin/env python3
"""
Git operations module for Nxtscape build system
"""
import os
import sys
import subprocess
import shutil
import tarfile
import urllib.request
from pathlib import Path
from context import BuildContext
from utils import run_command, log_info, log_error, log_success, IS_WINDOWS, safe_rmtree
def setup_git(ctx: BuildContext) -> bool:
"""Setup git and checkout Chromium"""
log_info(f"\n🔀 Setting up Chromium {ctx.chromium_version}...")
os.chdir(ctx.chromium_src)
# Fetch all tags and checkout
log_info("📥 Fetching all tags from remote...")
run_command(["git", "fetch", "--tags", "--force"])
# Verify tag exists before checkout
result = subprocess.run(
["git", "tag", "-l", ctx.chromium_version],
text=True,
capture_output=True,
cwd=ctx.chromium_src,
)
if not result.stdout or ctx.chromium_version not in result.stdout:
log_error(f"Tag {ctx.chromium_version} not found!")
log_info("Available tags (last 10):")
list_result = subprocess.run(
["git", "tag", "-l", "--sort=-version:refname"],
text=True,
capture_output=True,
cwd=ctx.chromium_src,
)
if list_result.stdout:
for tag in list_result.stdout.strip().split("\n")[:10]:
log_info(f" {tag}")
raise ValueError(f"Git tag {ctx.chromium_version} not found")
log_info(f"🔀 Checking out tag: {ctx.chromium_version}")
run_command(["git", "checkout", f"tags/{ctx.chromium_version}"])
# Sync dependencies
log_info("📥 Syncing dependencies (this may take a while)...")
# Windows gclient doesn't support --shallow flag
if IS_WINDOWS:
run_command(["gclient.bat", "sync", "-D", "--no-history", "--shallow"])
else:
run_command(["gclient", "sync", "-D", "--no-history", "--shallow"])
log_success("Git setup complete")
return True
def setup_sparkle(ctx: BuildContext) -> bool:
"""Download and setup Sparkle framework"""
log_info("\n✨ Setting up Sparkle framework...")
sparkle_dir = ctx.get_sparkle_dir()
# Clean existing
if sparkle_dir.exists():
safe_rmtree(sparkle_dir)
sparkle_dir.mkdir(parents=True)
# Download Sparkle
sparkle_url = ctx.get_sparkle_url()
sparkle_archive = sparkle_dir / "sparkle.tar.xz"
# Download using urllib (cross-platform)
log_info(f"Downloading Sparkle from {sparkle_url}...")
urllib.request.urlretrieve(sparkle_url, sparkle_archive)
# Extract using tarfile module (cross-platform)
log_info("Extracting Sparkle...")
with tarfile.open(sparkle_archive, "r:xz") as tar:
tar.extractall(sparkle_dir)
# Clean up
sparkle_archive.unlink()
log_success("Sparkle setup complete")
return True

View File

@@ -1,178 +0,0 @@
#!/usr/bin/env python3
"""
Version injection module for manifest.json files
Injects nxtscape browser version into extension manifests
"""
import json
from pathlib import Path
from typing import List, Dict, Any
from context import BuildContext
from utils import log_info, log_error, log_success, join_paths
def inject_version(ctx: BuildContext) -> bool:
"""Inject browser version into manifest.json files"""
log_info("\n💉 Injecting browser version into extension manifests...")
# Hardcoded paths to manifest files
manifest_paths = [
join_paths(
ctx.root_dir, "resources", "files", "ai_side_panel", "manifest.json"
),
join_paths(ctx.root_dir, "resources", "files", "bug_reporter", "manifest.json"),
]
success = True
for manifest_path in manifest_paths:
if not inject_version_to_manifest(
manifest_path,
ctx.get_nxtscape_chromium_version(),
ctx.get_nxtscape_version(),
):
success = False
if success:
log_success("Browser version injected into all manifests")
else:
log_error("Failed to inject version into some manifests")
return success
def inject_version_to_manifest(
manifest_path: Path, browser_version: str, nxtscape_version: str
) -> bool:
"""Inject browser version and increment version into a single manifest.json file"""
try:
if not manifest_path.exists():
log_error(f"Manifest not found: {manifest_path}")
return False
# Read existing manifest
with open(manifest_path, "r", encoding="utf-8") as f:
manifest_data = json.load(f)
# Set version to NXTSCAPE_VERSION formatted as X.0.0.0
if "version" in manifest_data:
current_version = manifest_data["version"]
# Format version as X.0.0.0
formatted_version = f"{nxtscape_version}.0.0.0"
manifest_data["version"] = formatted_version
log_info(
f" Manifest version updated: {current_version}{formatted_version}"
)
# Add browser_version field
manifest_data["browser_version"] = browser_version
# Write back with proper formatting
with open(manifest_path, "w", encoding="utf-8") as f:
json.dump(manifest_data, f, indent=2, ensure_ascii=False)
f.write("\n") # Add trailing newline
# Validate the written JSON
if validate_json_file(manifest_path):
log_success(f"✓ Updated: {manifest_path.name}")
return True
else:
log_error(f"✗ Invalid JSON after injection: {manifest_path.name}")
return False
except json.JSONDecodeError as e:
log_error(f"Failed to parse JSON in {manifest_path}: {e}")
return False
except Exception as e:
log_error(f"Failed to inject version into {manifest_path}: {e}")
return False
def increment_version(version: str) -> str:
"""Increment version string by 1 in the last component"""
parts = version.split(".")
if not parts:
return "0.0.1"
# Try to increment the last numeric part
for i in range(len(parts) - 1, -1, -1):
try:
# Convert to int, increment, and convert back
incremented = int(parts[i]) + 1
parts[i] = str(incremented)
return ".".join(parts)
except ValueError:
# If this part is not numeric, continue to the previous part
continue
# If no numeric part found, append .1
return version + ".1"
def validate_json_file(file_path: Path) -> bool:
"""Validate that a file contains valid JSON"""
try:
with open(file_path, "r", encoding="utf-8") as f:
json.load(f)
return True
except json.JSONDecodeError:
return False
except Exception:
return False
def remove_browser_version(ctx: BuildContext) -> bool:
"""Remove browser version from manifest.json files (for cleanup)"""
log_info("\n🧹 Removing browser version from extension manifests...")
# Hardcoded paths to manifest files
manifest_paths = [
join_paths(
ctx.root_dir, "resources", "files", "ai_side_panel", "manifest.json"
),
join_paths(ctx.root_dir, "resources", "files", "bug_reporter", "manifest.json"),
]
success = True
for manifest_path in manifest_paths:
if not remove_version_from_manifest(manifest_path):
success = False
return success
def remove_version_from_manifest(manifest_path: Path) -> bool:
"""Remove browser version from a single manifest.json file"""
try:
if not manifest_path.exists():
return True # Nothing to remove
# Read existing manifest
with open(manifest_path, "r", encoding="utf-8") as f:
manifest_data = json.load(f)
# Remove browser_version field if it exists
if "browser_version" in manifest_data:
del manifest_data["browser_version"]
# Write back with proper formatting
with open(manifest_path, "w", encoding="utf-8") as f:
json.dump(manifest_data, f, indent=2, ensure_ascii=False)
f.write("\n") # Add trailing newline
log_info(f"Removed browser_version from: {manifest_path.name}")
return True
except Exception as e:
log_error(f"Failed to remove version from {manifest_path}: {e}")
return False
def get_manifest_version(manifest_path: Path) -> str:
"""Get the current version from a manifest.json file"""
try:
with open(manifest_path, "r", encoding="utf-8") as f:
manifest_data = json.load(f)
return manifest_data.get("version", "unknown")
except Exception:
return "unknown"

View File

@@ -0,0 +1,556 @@
#!/usr/bin/env python3
"""Linux packaging module for BrowserOS (AppImage and .deb)"""
import os
import shutil
import subprocess
from pathlib import Path
from typing import List, Optional
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import (
log_info,
log_error,
log_warning,
log_success,
run_command,
safe_rmtree,
join_paths,
IS_LINUX,
)
class LinuxPackageModule(CommandModule):
produces = ["appimage", "deb"]
requires = []
description = "Create AppImage and .deb packages for Linux"
def validate(self, ctx: Context) -> None:
if not IS_LINUX():
raise ValidationError("Linux packaging requires Linux")
out_dir = join_paths(ctx.chromium_src, ctx.out_dir)
chrome_binary = join_paths(out_dir, ctx.BROWSEROS_APP_NAME)
if not chrome_binary.exists():
raise ValidationError(f"Chrome binary not found: {chrome_binary}")
def execute(self, ctx: Context) -> None:
log_info(
f"\n📦 Packaging {ctx.BROWSEROS_APP_BASE_NAME} {ctx.get_browseros_chromium_version()} for Linux ({ctx.architecture})"
)
package_dir = ctx.get_dist_dir()
package_dir.mkdir(parents=True, exist_ok=True)
appimage_path = self._package_appimage(ctx, package_dir)
deb_path = self._package_deb(ctx, package_dir)
if appimage_path:
ctx.artifact_registry.add("appimage", appimage_path)
if deb_path:
ctx.artifact_registry.add("deb", deb_path)
if not (appimage_path or deb_path):
raise RuntimeError("Both AppImage and .deb packaging failed")
log_success("✅ Linux packaging complete!")
if appimage_path and deb_path:
log_info(" Both AppImage and .deb created successfully")
elif appimage_path:
log_warning(" Only AppImage created (.deb failed)")
elif deb_path:
log_warning(" Only .deb created (AppImage failed)")
def _package_appimage(self, ctx: Context, package_dir: Path) -> Optional[Path]:
return package_appimage(ctx, package_dir)
def _package_deb(self, ctx: Context, package_dir: Path) -> Optional[Path]:
return package_deb(ctx, package_dir)
# =============================================================================
# Shared Helper Functions (used by both AppImage and .deb)
# =============================================================================
def copy_browser_files(
ctx: Context, target_dir: Path, set_sandbox_suid: bool = True
) -> bool:
"""Copy browser binaries, libraries, and resources to target directory.
Args:
ctx: Build context
target_dir: Destination directory for browser files
set_sandbox_suid: If True, set SUID bit on chrome_sandbox (AppImage only)
Returns:
True if successful, False otherwise
"""
target_dir.mkdir(parents=True, exist_ok=True)
out_dir = join_paths(ctx.chromium_src, ctx.out_dir)
files_to_copy = [
ctx.BROWSEROS_APP_NAME,
"chrome_crashpad_handler",
"chrome_sandbox",
"chromedriver",
"libEGL.so",
"libGLESv2.so",
"libvk_swiftshader.so",
"libvulkan.so.1",
"vk_swiftshader_icd.json",
"icudtl.dat",
"snapshot_blob.bin",
"v8_context_snapshot.bin",
"chrome_100_percent.pak",
"chrome_200_percent.pak",
"resources.pak",
]
for file in files_to_copy:
src = join_paths(out_dir, file)
if Path(src).exists():
shutil.copy2(src, join_paths(target_dir, file))
log_info(f" ✓ Copied {file}")
else:
log_warning(f" ⚠ File not found: {file}")
dirs_to_copy = ["locales", "MEIPreload", "BrowserOSServer"]
for dir_name in dirs_to_copy:
src = join_paths(out_dir, dir_name)
if Path(src).exists():
shutil.copytree(src, join_paths(target_dir, dir_name), dirs_exist_ok=True)
log_info(f" ✓ Copied {dir_name}/")
browseros_path = Path(join_paths(target_dir, ctx.BROWSEROS_APP_NAME))
if browseros_path.exists():
browseros_path.chmod(0o755)
sandbox_path = Path(join_paths(target_dir, "chrome_sandbox"))
if sandbox_path.exists():
if set_sandbox_suid:
sandbox_path.chmod(0o4755)
else:
sandbox_path.chmod(0o755)
crashpad_path = Path(join_paths(target_dir, "chrome_crashpad_handler"))
if crashpad_path.exists():
crashpad_path.chmod(0o755)
return True
def create_desktop_file(apps_dir: Path, exec_path: str) -> Path:
"""Create .desktop file with specified Exec path.
Args:
apps_dir: Directory where .desktop file should be created
exec_path: Full path for Exec= line in desktop file
Returns:
Path to created .desktop file
"""
apps_dir.mkdir(parents=True, exist_ok=True)
desktop_content = f"""[Desktop Entry]
Version=1.0
Name=BrowserOS
GenericName=Web Browser
Comment=Browse the World Wide Web
Exec={exec_path} %U
Terminal=false
Type=Application
Categories=Network;WebBrowser;
MimeType=text/html;text/xml;application/xhtml+xml;application/xml;application/vnd.mozilla.xul+xml;application/rss+xml;application/rdf+xml;image/gif;image/jpeg;image/png;x-scheme-handler/http;x-scheme-handler/https;x-scheme-handler/ftp;x-scheme-handler/chrome;video/webm;application/x-xpinstall;
Icon=browseros
"""
desktop_file = Path(join_paths(apps_dir, "browseros.desktop"))
desktop_file.write_text(desktop_content)
log_info(" ✓ Created desktop file")
return desktop_file
def copy_icon(ctx: Context, icons_dir: Path) -> bool:
"""Copy product icon to hicolor icon directory.
Args:
ctx: Build context
icons_dir: Base icons directory (usr/share/icons/hicolor)
Returns:
True if icon was copied, False if not found
"""
icon_src = Path(join_paths(ctx.root_dir, "resources", "icons", "product_logo.png"))
if not icon_src.exists():
log_warning(" ⚠ Icon not found at resources/icons/product_logo.png")
return False
icon_dest = Path(join_paths(icons_dir, "256x256", "apps", "browseros.png"))
icon_dest.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(icon_src, icon_dest)
log_info(" ✓ Copied icon")
return True
# =============================================================================
# AppImage Packaging Functions
# =============================================================================
def prepare_appdir(ctx: Context, appdir: Path) -> bool:
"""Prepare the AppDir structure for AppImage"""
log_info("📁 Preparing AppDir structure...")
app_root = join_paths(appdir, "opt", "browseros")
usr_share = join_paths(appdir, "usr", "share")
icons_dir = join_paths(usr_share, "icons", "hicolor")
apps_dir = join_paths(usr_share, "applications")
# Copy browser files (with SUID on chrome_sandbox for AppImage)
if not copy_browser_files(ctx, app_root, set_sandbox_suid=True):
return False
# Create desktop file
desktop_file = create_desktop_file(
apps_dir, f"/opt/browseros/{ctx.BROWSEROS_APP_NAME}"
)
# Copy icon
icon_src = Path(join_paths(ctx.root_dir, "resources", "icons", "product_logo.png"))
copy_icon(ctx, icons_dir)
# AppImage-specific: Copy desktop file to root and update Exec line
appdir_desktop = Path(join_paths(appdir, "browseros.desktop"))
shutil.copy2(desktop_file, appdir_desktop)
desktop_content = appdir_desktop.read_text()
desktop_content = desktop_content.replace(
f"Exec=/opt/browseros/{ctx.BROWSEROS_APP_NAME} %U", "Exec=AppRun %U"
)
appdir_desktop.write_text(desktop_content)
# AppImage-specific: Copy icon to root
if icon_src.exists():
appdir_icon = Path(join_paths(appdir, "browseros.png"))
shutil.copy2(icon_src, appdir_icon)
# AppImage-specific: Create AppRun script
apprun_content = f"""#!/bin/sh
THIS="$(readlink -f "${{0}}")"
HERE="$(dirname "${{THIS}}")"
export LD_LIBRARY_PATH="${{HERE}}"/opt/browseros:$LD_LIBRARY_PATH
export CHROME_WRAPPER="${{THIS}}"
"${{HERE}}"/opt/browseros/{ctx.BROWSEROS_APP_NAME} "$@"
"""
apprun_file = Path(join_paths(appdir, "AppRun"))
apprun_file.write_text(apprun_content)
apprun_file.chmod(0o755)
log_info(" ✓ Created AppRun script")
return True
def download_appimagetool(ctx: Context) -> Optional[Path]:
"""Download appimagetool if not available"""
tool_dir = Path(join_paths(ctx.root_dir, "build", "tools"))
tool_dir.mkdir(exist_ok=True)
tool_path = Path(join_paths(tool_dir, "appimagetool-x86_64.AppImage"))
if tool_path.exists():
log_info("✓ appimagetool already available")
return tool_path
log_info("📥 Downloading appimagetool...")
url = "https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage"
cmd = ["wget", "-O", str(tool_path), url]
result = run_command(cmd, check=False)
if result.returncode == 0:
tool_path.chmod(0o755)
log_success("✓ Downloaded appimagetool")
return tool_path
else:
log_error("Failed to download appimagetool")
return None
def create_appimage(ctx: Context, appdir: Path, output_path: Path) -> bool:
"""Create AppImage from AppDir"""
log_info("📦 Creating AppImage...")
# Download appimagetool if needed
appimagetool = download_appimagetool(ctx)
if not appimagetool:
return False
# Set architecture environment variable (required by appimagetool)
arch = "x86_64" if ctx.architecture == "x64" else "aarch64"
# Create AppImage with ARCH env var set for this command only
cmd = [
str(appimagetool),
"--comp",
"gzip", # Use gzip compression
str(appdir),
str(output_path),
]
# Pass ARCH as environment variable to the subprocess
env = os.environ.copy()
env["ARCH"] = arch
result = subprocess.run(
cmd,
capture_output=True,
text=True,
env=env,
check=False
)
if result.returncode == 0:
log_success(f"✓ Created AppImage: {output_path}")
# Make executable
output_path.chmod(0o755)
return True
else:
log_error("Failed to create AppImage")
if result.stderr:
log_error(result.stderr)
return False
# =============================================================================
# Debian Package (.deb) Functions
# =============================================================================
def create_launcher_script(ctx: Context, bin_dir: Path) -> None:
"""Create launcher script in /usr/bin/browseros."""
bin_dir.mkdir(parents=True, exist_ok=True)
launcher_content = f"""#!/bin/sh
# BrowserOS launcher script
export LD_LIBRARY_PATH=/usr/lib/browseros:$LD_LIBRARY_PATH
exec /usr/lib/browseros/{ctx.BROWSEROS_APP_NAME} "$@"
"""
launcher_path = Path(join_paths(bin_dir, "browseros"))
launcher_path.write_text(launcher_content)
launcher_path.chmod(0o755)
log_info(" ✓ Created launcher script")
def create_control_file(ctx: Context, debian_dir: Path) -> None:
"""Create DEBIAN/control file with package metadata."""
debian_dir.mkdir(parents=True, exist_ok=True)
# Version formatting: strip 'v' prefix and spaces, ensure numeric
version = ctx.get_browseros_chromium_version()
version = version.lstrip("v").replace(" ", "").replace("_", ".")
# Architecture mapping
deb_arch = "amd64" if ctx.architecture == "x64" else "arm64"
control_content = f"""Package: browseros
Version: {version}
Section: web
Priority: optional
Architecture: {deb_arch}
Depends: libc6 (>= 2.31), libglib2.0-0, libnss3, libnspr4, libx11-6, libatk1.0-0, libatk-bridge2.0-0, libcups2, libasound2, libdrm2, libgbm1, libpango-1.0-0, libcairo2, libudev1, libxcomposite1, libxdamage1, libxrandr2, libxkbcommon0, libgtk-3-0
Maintainer: BrowserOS Team <support@browseros.com>
Homepage: https://www.browseros.com/
Description: BrowserOS - The open source agentic browser
BrowserOS is a privacy-focused web browser built on Chromium,
designed for modern web browsing with AI capabilities.
"""
control_path = Path(join_paths(debian_dir, "control"))
control_path.write_text(control_content)
log_info(" ✓ Created DEBIAN/control")
def create_postinst_script(debian_dir: Path) -> None:
"""Create DEBIAN/postinst script to set SUID on chrome_sandbox.
Debian policy prohibits setting SUID in package files directly,
so we set it in postinst after installation.
"""
postinst_content = """#!/bin/sh
# Post-installation script for BrowserOS
set -e
# Set SUID bit on chrome_sandbox for sandboxing support
if [ -f /usr/lib/browseros/chrome_sandbox ]; then
chmod 4755 /usr/lib/browseros/chrome_sandbox
fi
exit 0
"""
postinst_path = Path(join_paths(debian_dir, "postinst"))
postinst_path.write_text(postinst_content)
postinst_path.chmod(0o755)
log_info(" ✓ Created DEBIAN/postinst")
def prepare_debdir(ctx: Context, debdir: Path) -> bool:
"""Prepare directory structure for .deb package.
Structure:
debdir/
├── DEBIAN/
│ ├── control
│ └── postinst
├── usr/
│ ├── bin/
│ │ └── browseros (launcher script)
│ ├── lib/browseros/
│ │ └── [all browser files]
│ └── share/
│ ├── applications/browseros.desktop
│ └── icons/hicolor/256x256/apps/browseros.png
"""
log_info("📁 Preparing .deb directory structure...")
lib_dir = join_paths(debdir, "usr", "lib", "browseros")
bin_dir = join_paths(debdir, "usr", "bin")
share_dir = join_paths(debdir, "usr", "share")
apps_dir = join_paths(share_dir, "applications")
icons_dir = join_paths(share_dir, "icons", "hicolor")
debian_dir = join_paths(debdir, "DEBIAN")
# Copy browser files (without SUID, will be set in postinst)
if not copy_browser_files(ctx, lib_dir, set_sandbox_suid=False):
return False
# Create launcher script in /usr/bin/
create_launcher_script(ctx, bin_dir)
# Create desktop file
create_desktop_file(apps_dir, "/usr/bin/browseros")
# Copy icon
copy_icon(ctx, icons_dir)
# Create DEBIAN metadata files
create_control_file(ctx, debian_dir)
create_postinst_script(debian_dir)
log_success("✓ .deb directory prepared")
return True
def create_deb(ctx: Context, debdir: Path, output_path: Path) -> bool:
"""Build .deb package using dpkg-deb."""
log_info("📦 Creating .deb package...")
# Verify dpkg-deb is available
if not shutil.which("dpkg-deb"):
log_error("dpkg-deb not found. Install with: sudo apt install dpkg")
return False
cmd = [
"dpkg-deb",
"--build",
"--root-owner-group", # Ensure files owned by root:root
str(debdir),
str(output_path),
]
result = run_command(cmd, check=False)
if result.returncode == 0:
log_success(f"✓ Created .deb package: {output_path}")
output_path.chmod(0o644) # Standard package permissions
return True
else:
log_error("Failed to create .deb package")
return False
# =============================================================================
# Main Packaging Entry Points
# =============================================================================
def package_appimage(ctx: Context, package_dir: Path) -> Optional[Path]:
"""Create AppImage package.
Returns:
Path to created AppImage, or None if failed
"""
log_info("🖼️ Building AppImage...")
appdir = Path(join_paths(package_dir, f"{ctx.BROWSEROS_APP_BASE_NAME}.AppDir"))
if appdir.exists():
safe_rmtree(appdir)
if not prepare_appdir(ctx, appdir):
safe_rmtree(appdir)
return None
version = ctx.get_browseros_chromium_version().replace(" ", "_")
arch_suffix = "x86_64" if ctx.architecture == "x64" else "arm64"
filename = f"{ctx.BROWSEROS_APP_BASE_NAME}-{version}-{arch_suffix}.AppImage"
output_path = Path(join_paths(package_dir, filename))
success = create_appimage(ctx, appdir, output_path)
safe_rmtree(appdir)
if success:
log_success(f"✅ AppImage created: {output_path.name}")
log_info(f" Size: {output_path.stat().st_size / 1024 / 1024:.1f} MB")
return output_path
return None
def package_deb(ctx: Context, package_dir: Path) -> Optional[Path]:
"""Create .deb package.
Returns:
Path to created .deb, or None if failed
"""
log_info("📦 Building .deb package...")
debdir = Path(join_paths(package_dir, f"{ctx.BROWSEROS_APP_BASE_NAME}_deb"))
if debdir.exists():
safe_rmtree(debdir)
if not prepare_debdir(ctx, debdir):
safe_rmtree(debdir)
return None
version = (
ctx.get_browseros_chromium_version()
.lstrip("v")
.replace(" ", "")
.replace("_", ".")
)
arch_suffix = "amd64" if ctx.architecture == "x64" else "arm64"
filename = f"browseros_{version}_{arch_suffix}.deb"
output_path = Path(join_paths(package_dir, filename))
success = create_deb(ctx, debdir, output_path)
safe_rmtree(debdir)
if success:
log_success(f"✅ .deb package created: {output_path.name}")
log_info(f" Size: {output_path.stat().st_size / 1024 / 1024:.1f} MB")
return output_path
return None
def package_universal(contexts: List[Context]) -> bool:
"""Linux doesn't support universal binaries"""
log_warning("Universal binaries are not supported on Linux")
return False
# Sign functions moved to sign/linux.py
# - sign_binaries()
# These are now in modules/sign/linux.py

View File

@@ -9,8 +9,8 @@ import subprocess
from pathlib import Path
from typing import List, Tuple, Optional
from context import BuildContext
from utils import (
from ...common.context import BuildContext
from ...common.utils import (
log_info,
log_error,
log_warning,
@@ -43,7 +43,7 @@ def copy_browser_files(
out_dir = join_paths(ctx.chromium_src, ctx.out_dir)
files_to_copy = [
ctx.NXTSCAPE_APP_NAME,
ctx.BROWSEROS_APP_NAME,
"chrome_crashpad_handler",
"chrome_sandbox",
"chromedriver",
@@ -75,7 +75,7 @@ def copy_browser_files(
shutil.copytree(src, join_paths(target_dir, dir_name), dirs_exist_ok=True)
log_info(f" ✓ Copied {dir_name}/")
browseros_path = Path(join_paths(target_dir, ctx.NXTSCAPE_APP_NAME))
browseros_path = Path(join_paths(target_dir, ctx.BROWSEROS_APP_NAME))
if browseros_path.exists():
browseros_path.chmod(0o755)
@@ -166,7 +166,7 @@ def prepare_appdir(ctx: BuildContext, appdir: Path) -> bool:
# Create desktop file
desktop_file = create_desktop_file(
apps_dir, f"/opt/browseros/{ctx.NXTSCAPE_APP_NAME}"
apps_dir, f"/opt/browseros/{ctx.BROWSEROS_APP_NAME}"
)
# Copy icon
@@ -178,7 +178,7 @@ def prepare_appdir(ctx: BuildContext, appdir: Path) -> bool:
shutil.copy2(desktop_file, appdir_desktop)
desktop_content = appdir_desktop.read_text()
desktop_content = desktop_content.replace(
f"Exec=/opt/browseros/{ctx.NXTSCAPE_APP_NAME} %U", "Exec=AppRun %U"
f"Exec=/opt/browseros/{ctx.BROWSEROS_APP_NAME} %U", "Exec=AppRun %U"
)
appdir_desktop.write_text(desktop_content)
@@ -193,7 +193,7 @@ THIS="$(readlink -f "${{0}}")"
HERE="$(dirname "${{THIS}}")"
export LD_LIBRARY_PATH="${{HERE}}"/opt/browseros:$LD_LIBRARY_PATH
export CHROME_WRAPPER="${{THIS}}"
"${{HERE}}"/opt/browseros/{ctx.NXTSCAPE_APP_NAME} "$@"
"${{HERE}}"/opt/browseros/{ctx.BROWSEROS_APP_NAME} "$@"
"""
apprun_file = Path(join_paths(appdir, "AppRun"))
@@ -276,7 +276,7 @@ def create_launcher_script(ctx: BuildContext, bin_dir: Path) -> None:
launcher_content = f"""#!/bin/sh
# BrowserOS launcher script
export LD_LIBRARY_PATH=/usr/lib/browseros:$LD_LIBRARY_PATH
exec /usr/lib/browseros/{ctx.NXTSCAPE_APP_NAME} "$@"
exec /usr/lib/browseros/{ctx.BROWSEROS_APP_NAME} "$@"
"""
launcher_path = Path(join_paths(bin_dir, "browseros"))
@@ -290,7 +290,7 @@ def create_control_file(ctx: BuildContext, debian_dir: Path) -> None:
debian_dir.mkdir(parents=True, exist_ok=True)
# Version formatting: strip 'v' prefix and spaces, ensure numeric
version = ctx.get_nxtscape_chromium_version()
version = ctx.get_browseros_chromium_version()
version = version.lstrip("v").replace(" ", "").replace("_", ".")
# Architecture mapping
@@ -426,7 +426,7 @@ def package_appimage(ctx: BuildContext, package_dir: Path) -> Optional[Path]:
"""
log_info("🖼️ Building AppImage...")
appdir = Path(join_paths(package_dir, f"{ctx.NXTSCAPE_APP_BASE_NAME}.AppDir"))
appdir = Path(join_paths(package_dir, f"{ctx.BROWSEROS_APP_BASE_NAME}.AppDir"))
if appdir.exists():
safe_rmtree(appdir)
@@ -434,9 +434,9 @@ def package_appimage(ctx: BuildContext, package_dir: Path) -> Optional[Path]:
safe_rmtree(appdir)
return None
version = ctx.get_nxtscape_chromium_version().replace(" ", "_")
version = ctx.get_browseros_chromium_version().replace(" ", "_")
arch_suffix = "x86_64" if ctx.architecture == "x64" else "arm64"
filename = f"{ctx.NXTSCAPE_APP_BASE_NAME}-{version}-{arch_suffix}.AppImage"
filename = f"{ctx.BROWSEROS_APP_BASE_NAME}-{version}-{arch_suffix}.AppImage"
output_path = Path(join_paths(package_dir, filename))
success = create_appimage(ctx, appdir, output_path)
@@ -458,7 +458,7 @@ def package_deb(ctx: BuildContext, package_dir: Path) -> Optional[Path]:
"""
log_info("📦 Building .deb package...")
debdir = Path(join_paths(package_dir, f"{ctx.NXTSCAPE_APP_BASE_NAME}_deb"))
debdir = Path(join_paths(package_dir, f"{ctx.BROWSEROS_APP_BASE_NAME}_deb"))
if debdir.exists():
safe_rmtree(debdir)
@@ -467,7 +467,7 @@ def package_deb(ctx: BuildContext, package_dir: Path) -> Optional[Path]:
return None
version = (
ctx.get_nxtscape_chromium_version()
ctx.get_browseros_chromium_version()
.lstrip("v")
.replace(" ", "")
.replace("_", ".")
@@ -490,7 +490,7 @@ def package_deb(ctx: BuildContext, package_dir: Path) -> Optional[Path]:
def package(ctx: BuildContext) -> bool:
"""Package BrowserOS for Linux as both AppImage and .deb"""
log_info(
f"📦 Packaging {ctx.NXTSCAPE_APP_BASE_NAME} {ctx.get_nxtscape_chromium_version()} for Linux ({ctx.architecture})"
f"📦 Packaging {ctx.BROWSEROS_APP_BASE_NAME} {ctx.get_browseros_chromium_version()} for Linux ({ctx.architecture})"
)
# Create packaging directory
@@ -530,7 +530,6 @@ def package_universal(contexts: List[BuildContext]) -> bool:
return False
def sign_binaries(ctx: BuildContext) -> bool:
"""Linux doesn't require code signing like macOS/Windows"""
log_info("Code signing is not required for Linux packages")
return True
# Sign functions moved to sign/linux.py
# - sign_binaries()
# These are now in modules/sign/linux.py

View File

@@ -0,0 +1,332 @@
#!/usr/bin/env python3
"""DMG creation and packaging module for BrowserOS"""
import shutil
from pathlib import Path
from typing import Optional, List
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import run_command, log_info, log_error, log_success, IS_MACOS
class MacOSPackageModule(CommandModule):
produces = ["dmg"]
requires = []
description = "Create DMG package for macOS"
def validate(self, ctx: Context) -> None:
if not IS_MACOS():
raise ValidationError("DMG creation requires macOS")
app_path = ctx.get_app_path()
if not app_path.exists():
raise ValidationError(f"App not found: {app_path}")
def execute(self, ctx: Context) -> None:
log_info("\n📀 Creating DMG package...")
app_path = ctx.get_app_path()
dmg_dir = ctx.get_dist_dir()
dmg_name = ctx.get_dmg_name()
dmg_path = dmg_dir / dmg_name
pkg_dmg_path = ctx.get_pkg_dmg_path()
# Determine if we should create signed DMG based on whether app was signed
# If signed_app artifact exists, the MacOSSignModule ran and we should sign the DMG
if ctx.artifact_registry.has("signed_app"):
self._create_signed_notarized_dmg(app_path, dmg_path, pkg_dmg_path, ctx)
else:
self._create_dmg(app_path, dmg_path, pkg_dmg_path)
ctx.artifact_registry.add("dmg", dmg_path)
log_success(f"DMG created: {dmg_name}")
def _create_dmg(self, app_path: Path, dmg_path: Path, pkg_dmg_path: Path) -> None:
if not create_dmg(app_path, dmg_path, "BrowserOS", pkg_dmg_path):
raise RuntimeError("Failed to create DMG")
def _create_signed_notarized_dmg(
self, app_path: Path, dmg_path: Path, pkg_dmg_path: Path, ctx: Context
) -> None:
from ..sign.macos import check_environment
env_ok, env_vars = check_environment()
if not env_ok:
raise ValidationError("Signing environment not configured")
certificate_name = env_vars["certificate_name"]
keychain_profile = env_vars.get("keychain_profile", "notarytool-profile")
if not create_signed_notarized_dmg(
app_path, dmg_path, certificate_name, "BrowserOS", pkg_dmg_path, keychain_profile
):
raise RuntimeError("Failed to create signed and notarized DMG")
def create_dmg(
app_path: Path,
dmg_path: Path,
volume_name: str = "BrowserOS",
pkg_dmg_path: Optional[Path] = None,
) -> bool:
"""Create a DMG package from an app bundle"""
log_info(f"\n📀 Creating DMG package: {dmg_path.name}")
# Verify app exists
if not app_path.exists():
log_error(f"App not found at: {app_path}")
return False
# Create DMG directory if needed
dmg_path.parent.mkdir(parents=True, exist_ok=True)
# Remove existing DMG if present
if dmg_path.exists():
log_info(f" Removing existing DMG: {dmg_path.name}")
dmg_path.unlink()
# Build command
cmd = []
if pkg_dmg_path and pkg_dmg_path.exists():
# Use Chromium's pkg-dmg tool if available
cmd = [str(pkg_dmg_path)]
else:
# Fallback to system pkg-dmg if available
pkg_dmg_system = shutil.which("pkg-dmg")
if pkg_dmg_system:
cmd = [pkg_dmg_system]
else:
log_error("No pkg-dmg tool found")
return False
cmd.extend(
[
"--sourcefile",
"--source",
str(app_path),
"--target",
str(dmg_path),
"--volname",
volume_name,
"--symlink",
"/Applications:/Applications",
"--format",
"UDBZ",
]
)
# Add verbosity for Chromium's pkg-dmg
if pkg_dmg_path:
cmd.extend(["--verbosity", "2"])
try:
run_command(cmd)
log_success(f"DMG created: {dmg_path}")
return True
except Exception as e:
log_error(f"Failed to create DMG: {e}")
return False
def sign_dmg(dmg_path: Path, certificate_name: str) -> bool:
"""Sign a DMG file"""
log_info(f"\n🔏 Signing DMG: {dmg_path.name}")
if not dmg_path.exists():
log_error(f"DMG not found at: {dmg_path}")
return False
try:
run_command(
[
"codesign",
"--sign",
certificate_name,
"--force",
"--timestamp",
str(dmg_path),
]
)
# Verify signature
log_info("🔍 Verifying DMG signature...")
run_command(["codesign", "-vvv", str(dmg_path)])
log_success("DMG signed successfully")
return True
except Exception as e:
log_error(f"Failed to sign DMG: {e}")
return False
def notarize_dmg(dmg_path: Path, keychain_profile: str = "notarytool-profile") -> bool:
"""Notarize a DMG file"""
log_info(f"\n📤 Notarizing DMG: {dmg_path.name}")
if not dmg_path.exists():
log_error(f"DMG not found at: {dmg_path}")
return False
try:
# Submit for notarization
log_info("📤 Submitting DMG for notarization (this may take a while)...")
result = run_command(
[
"xcrun",
"notarytool",
"submit",
str(dmg_path),
"--keychain-profile",
keychain_profile,
"--wait",
],
check=False,
)
log_info(result.stdout)
if result.stderr:
log_error(result.stderr)
if result.returncode != 0:
log_error("DMG notarization submission failed")
return False
# Check if accepted
if "status: Accepted" not in result.stdout:
log_error("DMG notarization failed - status was not 'Accepted'")
# Try to extract submission ID for debugging
for line in result.stdout.split("\n"):
if "id:" in line:
submission_id = line.split("id:")[1].strip().split()[0]
log_info(
f'Get detailed logs with: xcrun notarytool log {submission_id} --keychain-profile "{keychain_profile}"'
)
break
return False
log_success("DMG notarization successful - status: Accepted")
# Staple the ticket
log_info("📎 Stapling notarization ticket to DMG...")
result = run_command(["xcrun", "stapler", "staple", str(dmg_path)], check=False)
if result.returncode != 0:
log_error("Failed to staple notarization ticket to DMG")
return False
log_success("DMG notarization ticket stapled successfully")
# Verify stapling
log_info("🔍 Verifying DMG stapling...")
result = run_command(
["xcrun", "stapler", "validate", str(dmg_path)], check=False
)
if result.returncode != 0:
log_error("DMG stapling verification failed")
return False
log_success("DMG stapling verification successful")
# Final security assessment
log_info("🔍 Performing final security assessment...")
result = run_command(
[
"spctl",
"-a",
"-vvv",
"-t",
"open",
"--context",
"context:primary-signature",
str(dmg_path),
],
check=False,
)
if result.returncode != 0:
log_error("Final security assessment failed")
return False
log_success("Final security assessment passed")
return True
except Exception as e:
log_error(f"Unexpected error during DMG notarization: {e}")
return False
def create_signed_notarized_dmg(
app_path: Path,
dmg_path: Path,
certificate_name: str,
volume_name: str = "BrowserOS",
pkg_dmg_path: Optional[Path] = None,
keychain_profile: str = "notarytool-profile",
) -> bool:
"""Create, sign, and notarize a DMG in one go"""
log_info("=" * 70)
log_info("📦 Creating signed and notarized DMG package")
log_info("=" * 70)
# Create DMG
if not create_dmg(app_path, dmg_path, volume_name, pkg_dmg_path):
return False
# Sign DMG
if not sign_dmg(dmg_path, certificate_name):
return False
# Notarize DMG
if not notarize_dmg(dmg_path, keychain_profile):
return False
log_info("=" * 70)
log_success(f"DMG package ready: {dmg_path}")
log_info("=" * 70)
return True
def package_universal(contexts: List[Context]) -> bool:
"""Create DMG package for universal binary"""
log_info("=" * 70)
log_info("📦 Creating universal DMG package...")
log_info("=" * 70)
if len(contexts) < 2:
log_error("Universal packaging requires at least 2 architectures")
return False
# Use the universal app path
universal_dir = contexts[0].chromium_src / "out/Default_universal"
universal_app_path = universal_dir / contexts[0].BROWSEROS_APP_NAME
if not universal_app_path.exists():
log_error(f"Universal app not found: {universal_app_path}")
return False
# Create a temporary universal context for DMG naming
universal_ctx = Context(
root_dir=contexts[0].root_dir,
chromium_src=contexts[0].chromium_src,
architecture="universal",
build_type=contexts[0].build_type,
)
# Create DMG in dist/<version> directory
dmg_dir = universal_ctx.get_dist_dir()
dmg_dir.mkdir(parents=True, exist_ok=True)
# Use context's DMG naming
dmg_name = universal_ctx.get_dmg_name()
dmg_path = dmg_dir / dmg_name
# Get pkg-dmg tool
pkg_dmg_path = contexts[0].get_pkg_dmg_path()
# Create the universal DMG
if create_dmg(universal_app_path, dmg_path, "BrowserOS", pkg_dmg_path):
log_success(f"Universal DMG created: {dmg_name}")
return True
else:
log_error("Failed to create universal DMG")
return False

View File

@@ -8,8 +8,8 @@ import sys
import shutil
from pathlib import Path
from typing import Optional, List
from context import BuildContext
from utils import run_command, log_info, log_error, log_success
from ...common.context import BuildContext
from ...common.utils import run_command, log_info, log_error, log_success
def package(ctx: BuildContext) -> bool:
@@ -273,7 +273,7 @@ def package_universal(contexts: List[BuildContext]) -> bool:
# Use the universal app path
universal_dir = contexts[0].chromium_src / "out/Default_universal"
universal_app_path = universal_dir / contexts[0].NXTSCAPE_APP_NAME
universal_app_path = universal_dir / contexts[0].BROWSEROS_APP_NAME
if not universal_app_path.exists():
log_error(f"Universal app not found: {universal_app_path}")

View File

@@ -4,13 +4,11 @@ Universal binary merge module for Nxtscape Browser
Provides functions to merge two architecture builds into a universal binary
"""
import os
import sys
import shutil
from pathlib import Path
from typing import List
from context import BuildContext
from utils import run_command, log_info, log_error, log_success, log_warning
from ...common.context import Context
from ...common.utils import run_command, log_info, log_error, log_success
def merge_architectures(
@@ -48,8 +46,8 @@ def merge_architectures(
# Find universalizer script
if universalizer_script is None:
# Try to find it relative to this module
current_dir = Path(__file__).parent.parent
# Try to find it in the same package module directory
current_dir = Path(__file__).parent
universalizer_script = current_dir / "universalizer_patched.py"
if not universalizer_script.exists():
@@ -74,7 +72,7 @@ def merge_architectures(
str(output_path),
]
log_info(f"Running universalizer...")
log_info("Running universalizer...")
log_info(f"Command: {' '.join(cmd)}")
run_command(cmd)
@@ -92,7 +90,7 @@ def merge_architectures(
def create_minimal_context(
app_path: Path, chromium_src: Path, root_dir: Path, architecture: str = "universal"
) -> BuildContext:
) -> Context:
"""Create a minimal BuildContext for signing/packaging operations"""
out_dir_path = app_path.parent # out/Default_universal
@@ -102,15 +100,11 @@ def create_minimal_context(
log_info(f" Chromium src: {chromium_src}")
log_info(f" Root dir: {root_dir}")
ctx = BuildContext(
ctx = Context(
root_dir=root_dir,
chromium_src=chromium_src,
architecture=architecture,
build_type="release", # Assume release for universal builds
apply_patches=False,
sign_package=True,
package=True,
build=False,
)
# Override out_dir to match the actual location
@@ -172,7 +166,7 @@ def merge_sign_package(
log_info("=" * 70)
try:
from modules.sign import sign_app
from ..sign import sign_app
ctx = create_minimal_context(output_path, chromium_src, root_dir)
if not sign_app(ctx, create_dmg=False):
@@ -195,7 +189,7 @@ def merge_sign_package(
log_info("=" * 70)
try:
from modules.package import create_dmg
from . import create_dmg
ctx = create_minimal_context(output_path, chromium_src, root_dir)
@@ -271,21 +265,21 @@ def handle_merge_command(
return False
# Get root_dir from where this module is located
root_dir = Path(__file__).parent.parent.parent
root_dir = Path(__file__).parent.parent.parent.parent
log_info(f"📂 Using root directory: {root_dir}")
# Auto-generate output path in chromium source
# Get the app name from BuildContext
from context import BuildContext
from ...common.context import Context
temp_ctx = BuildContext(
temp_ctx = Context(
root_dir=root_dir,
chromium_src=chromium_src,
architecture="universal",
build_type="release",
)
output_path = (
chromium_src / "out" / "Default_universal" / temp_ctx.NXTSCAPE_APP_NAME
chromium_src / "out" / "Default_universal" / temp_ctx.BROWSEROS_APP_NAME
)
log_info(f" Output: {output_path} (auto-generated)")

View File

@@ -0,0 +1,274 @@
#!/usr/bin/env python3
"""Windows packaging module for BrowserOS"""
import shutil
import zipfile
from pathlib import Path
from typing import List
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import (
run_command,
log_info,
log_error,
log_success,
log_warning,
join_paths,
IS_WINDOWS,
)
class WindowsPackageModule(CommandModule):
produces = ["installer", "installer_zip"]
requires = []
description = "Create Windows installer and portable ZIP"
def validate(self, ctx: Context) -> None:
if not IS_WINDOWS():
raise ValidationError("Windows packaging requires Windows")
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
raise ValidationError(f"mini_installer.exe not found: {mini_installer_path}")
def execute(self, ctx: Context) -> None:
log_info("\n📦 Creating Windows packages...")
installer_path = self._create_installer(ctx)
zip_path = self._create_portable_zip(ctx)
ctx.artifact_registry.add("installer", installer_path)
ctx.artifact_registry.add("installer_zip", zip_path)
log_success("Windows packages created successfully")
def _create_installer(self, ctx: Context) -> Path:
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_browseros_chromium_version()}_{ctx.architecture}_installer.exe"
installer_path = output_dir / installer_name
try:
shutil.copy2(mini_installer_path, installer_path)
log_success(f"Installer created: {installer_name}")
return installer_path
except Exception as e:
raise RuntimeError(f"Failed to create installer: {e}")
def _create_portable_zip(self, ctx: Context) -> Path:
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
zip_name = f"{ctx.get_app_base_name()}_{ctx.get_browseros_chromium_version()}_{ctx.architecture}_installer.zip"
zip_path = output_dir / zip_name
try:
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_browseros_version()}_{ctx.architecture}_installer.exe"
zipf.write(mini_installer_path, installer_name)
file_size = mini_installer_path.stat().st_size
log_info(f"Added installer to ZIP ({file_size // (1024*1024)} MB)")
log_success(f"Installer ZIP created: {zip_name}")
return zip_path
except Exception as e:
raise RuntimeError(f"Failed to create installer ZIP: {e}")
def build_mini_installer(ctx: Context) -> bool:
"""Build the mini_installer target if it doesn't exist"""
log_info("\n🔨 Checking mini_installer build...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
setup_exe_path = build_output_dir / "setup.exe"
if mini_installer_path.exists() and setup_exe_path.exists():
log_info(
"mini_installer.exe and setup.exe already exist; rebuilding to ensure freshness"
)
elif setup_exe_path.exists() and not mini_installer_path.exists():
log_info("setup.exe exists but mini_installer.exe missing")
elif mini_installer_path.exists() and not setup_exe_path.exists():
log_info("mini_installer.exe exists but setup.exe missing")
log_info("Building setup and mini_installer targets...")
# Build mini_installer using autoninja
try:
# Use autoninja.bat on Windows
autoninja_cmd = "autoninja.bat" if IS_WINDOWS else "autoninja"
# Build the mini_installer target
cmd = [
autoninja_cmd,
"-C",
ctx.out_dir, # Use relative path like in compile.py
"setup",
"mini_installer",
]
# Change to chromium_src directory before running (like compile.py does)
import os
old_cwd = os.getcwd()
os.chdir(ctx.chromium_src)
try:
run_command(cmd)
finally:
os.chdir(old_cwd)
# Verify the file was created
missing_artifacts = []
if not setup_exe_path.exists():
missing_artifacts.append("setup.exe")
if not mini_installer_path.exists():
missing_artifacts.append("mini_installer.exe")
if not missing_artifacts:
log_success("mini_installer and setup built successfully")
return True
log_error(
"Build completed but missing artifacts: "
+ ", ".join(missing_artifacts)
)
return False
except Exception as e:
log_error(f"Failed to build setup/mini_installer: {e}")
return False
def create_installer(ctx: Context) -> bool:
"""Create Windows installer (mini_installer.exe)"""
log_info("\n🔧 Creating Windows installer...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
log_warning(f"mini_installer.exe not found at: {mini_installer_path}")
log_info(
"To build the installer, run: autoninja -C out\\Default_x64 mini_installer"
)
return False
# Create output directory
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
# Generate installer filename with version and architecture
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_browseros_chromium_version()}_{ctx.architecture}_installer.exe"
installer_path = output_dir / installer_name
# Copy mini_installer to final location
try:
shutil.copy2(mini_installer_path, installer_path)
log_success(f"Installer created: {installer_name}")
return True
except Exception as e:
log_error(f"Failed to create installer: {e}")
return False
def create_portable_zip(ctx: Context) -> bool:
"""Create ZIP of just the installer for easier distribution"""
log_info("\n📦 Creating installer ZIP package...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
log_warning(f"mini_installer.exe not found at: {mini_installer_path}")
log_info(
"To build the installer, run: autoninja -C out\\Default_x64 mini_installer"
)
return False
# Create output directory
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
# Generate ZIP filename with version and architecture
zip_name = f"{ctx.get_app_base_name()}_{ctx.get_browseros_chromium_version()}_{ctx.architecture}_installer.zip"
zip_path = output_dir / zip_name
# Create ZIP file containing just the installer
try:
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
# Add mini_installer.exe to the zip
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_browseros_version()}_{ctx.architecture}_installer.exe"
zipf.write(mini_installer_path, installer_name)
# Get file size for logging
file_size = mini_installer_path.stat().st_size
log_info(f"Added installer to ZIP ({file_size // (1024*1024)} MB)")
log_success(f"Installer ZIP created: {zip_name}")
return True
except Exception as e:
log_error(f"Failed to create installer ZIP: {e}")
return False
# Sign functions moved to sign/windows.py
# - sign_binaries()
# - sign_with_codesigntool()
# - get_browseros_server_binary_paths()
# These are now in modules/sign/windows.py
def package_universal(contexts: List[Context]) -> bool:
"""Windows doesn't support universal binaries like macOS"""
log_warning("Universal binaries are not supported on Windows")
log_info("Consider creating separate packages for each architecture")
return True
def get_target_cpu(build_output_dir: Path) -> str:
"""Get target CPU architecture from build configuration"""
args_gn_path = build_output_dir / "args.gn"
if not args_gn_path.exists():
return "x64" # Default
try:
args_gn_content = args_gn_path.read_text(encoding="utf-8")
for cpu in ("x64", "x86", "arm64"):
if f'target_cpu="{cpu}"' in args_gn_content:
return cpu
except Exception:
pass
return "x64" # Default
def create_files_cfg_package(ctx: Context) -> bool:
"""Create package using Chromium's FILES.cfg approach (alternative method)"""
log_info("\n📦 Creating FILES.cfg-based package...")
files_cfg_path = (
ctx.chromium_src / "chrome" / "tools" / "build" / "win" / "FILES.cfg"
)
if not files_cfg_path.exists():
log_error(f"FILES.cfg not found at: {files_cfg_path}")
return False
# This would require implementing the filescfg module functionality
# from ungoogled-chromium, which is quite complex
log_warning("FILES.cfg packaging not yet implemented")
return False

View File

@@ -0,0 +1,243 @@
#!/usr/bin/env python3
"""
Windows packaging module for Nxtscape Browser
Based on ungoogled-chromium-windows packaging approach
"""
import os
import sys
import shutil
import zipfile
from pathlib import Path
from typing import Optional, List
from ...common.context import BuildContext
from ...common.utils import (
run_command,
log_info,
log_error,
log_success,
log_warning,
join_paths,
IS_WINDOWS,
)
# BrowserOS Server binary definitions moved to sign/windows.py
def package(ctx: BuildContext) -> bool:
"""Create Windows packages (installer and portable zip)"""
log_info("\n📦 Creating Windows packages...")
# First, ensure mini_installer is built
# if not build_mini_installer(ctx):
# log_error("Failed to build mini_installer")
# return False
# Create both installer and portable zip
success = True
if create_installer(ctx):
log_success("Installer created successfully")
else:
log_error("Failed to create installer")
success = False
if create_portable_zip(ctx):
log_success("Portable ZIP created successfully")
else:
log_error("Failed to create portable ZIP")
success = False
return success
def build_mini_installer(ctx: BuildContext) -> bool:
"""Build the mini_installer target if it doesn't exist"""
log_info("\n🔨 Checking mini_installer build...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
setup_exe_path = build_output_dir / "setup.exe"
if mini_installer_path.exists() and setup_exe_path.exists():
log_info(
"mini_installer.exe and setup.exe already exist; rebuilding to ensure freshness"
)
elif setup_exe_path.exists() and not mini_installer_path.exists():
log_info("setup.exe exists but mini_installer.exe missing")
elif mini_installer_path.exists() and not setup_exe_path.exists():
log_info("mini_installer.exe exists but setup.exe missing")
log_info("Building setup and mini_installer targets...")
# Build mini_installer using autoninja
try:
# Use autoninja.bat on Windows
autoninja_cmd = "autoninja.bat" if IS_WINDOWS else "autoninja"
# Build the mini_installer target
cmd = [
autoninja_cmd,
"-C",
ctx.out_dir, # Use relative path like in compile.py
"setup",
"mini_installer",
]
# Change to chromium_src directory before running (like compile.py does)
import os
old_cwd = os.getcwd()
os.chdir(ctx.chromium_src)
try:
run_command(cmd)
finally:
os.chdir(old_cwd)
# Verify the file was created
missing_artifacts = []
if not setup_exe_path.exists():
missing_artifacts.append("setup.exe")
if not mini_installer_path.exists():
missing_artifacts.append("mini_installer.exe")
if not missing_artifacts:
log_success("mini_installer and setup built successfully")
return True
log_error(
"Build completed but missing artifacts: "
+ ", ".join(missing_artifacts)
)
return False
except Exception as e:
log_error(f"Failed to build setup/mini_installer: {e}")
return False
def create_installer(ctx: BuildContext) -> bool:
"""Create Windows installer (mini_installer.exe)"""
log_info("\n🔧 Creating Windows installer...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
log_warning(f"mini_installer.exe not found at: {mini_installer_path}")
log_info(
"To build the installer, run: autoninja -C out\\Default_x64 mini_installer"
)
return False
# Create output directory
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
# Generate installer filename with version and architecture
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_browseros_chromium_version()}_{ctx.architecture}_installer.exe"
installer_path = output_dir / installer_name
# Copy mini_installer to final location
try:
shutil.copy2(mini_installer_path, installer_path)
log_success(f"Installer created: {installer_name}")
return True
except Exception as e:
log_error(f"Failed to create installer: {e}")
return False
def create_portable_zip(ctx: BuildContext) -> bool:
"""Create ZIP of just the installer for easier distribution"""
log_info("\n📦 Creating installer ZIP package...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
log_warning(f"mini_installer.exe not found at: {mini_installer_path}")
log_info(
"To build the installer, run: autoninja -C out\\Default_x64 mini_installer"
)
return False
# Create output directory
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
# Generate ZIP filename with version and architecture
zip_name = f"{ctx.get_app_base_name()}_{ctx.get_browseros_chromium_version()}_{ctx.architecture}_installer.zip"
zip_path = output_dir / zip_name
# Create ZIP file containing just the installer
try:
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
# Add mini_installer.exe to the zip
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_browseros_version()}_{ctx.architecture}_installer.exe"
zipf.write(mini_installer_path, installer_name)
# Get file size for logging
file_size = mini_installer_path.stat().st_size
log_info(f"Added installer to ZIP ({file_size // (1024*1024)} MB)")
log_success(f"Installer ZIP created: {zip_name}")
return True
except Exception as e:
log_error(f"Failed to create installer ZIP: {e}")
return False
# Sign functions moved to sign/windows.py
# - sign_binaries()
# - sign_with_codesigntool()
# - get_browseros_server_binary_paths()
# These are now in modules/sign/windows.py
def package_universal(contexts: List[BuildContext]) -> bool:
"""Windows doesn't support universal binaries like macOS"""
log_warning("Universal binaries are not supported on Windows")
log_info("Consider creating separate packages for each architecture")
return True
def get_target_cpu(build_output_dir: Path) -> str:
"""Get target CPU architecture from build configuration"""
args_gn_path = build_output_dir / "args.gn"
if not args_gn_path.exists():
return "x64" # Default
try:
args_gn_content = args_gn_path.read_text(encoding="utf-8")
for cpu in ("x64", "x86", "arm64"):
if f'target_cpu="{cpu}"' in args_gn_content:
return cpu
except Exception:
pass
return "x64" # Default
def create_files_cfg_package(ctx: BuildContext) -> bool:
"""Create package using Chromium's FILES.cfg approach (alternative method)"""
log_info("\n📦 Creating FILES.cfg-based package...")
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
files_cfg_path = (
ctx.chromium_src / "chrome" / "tools" / "build" / "win" / "FILES.cfg"
)
if not files_cfg_path.exists():
log_error(f"FILES.cfg not found at: {files_cfg_path}")
return False
# This would require implementing the filescfg module functionality
# from ungoogled-chromium, which is quite complex
log_warning("FILES.cfg packaging not yet implemented")
return False

View File

@@ -1,444 +0,0 @@
#!/usr/bin/env python3
"""
Windows packaging module for Nxtscape Browser
Based on ungoogled-chromium-windows packaging approach
"""
import os
import sys
import shutil
import zipfile
from pathlib import Path
from typing import Optional, List
from context import BuildContext
from utils import (
run_command,
log_info,
log_error,
log_success,
log_warning,
join_paths,
IS_WINDOWS,
)
# BrowserOS Server binaries packaged alongside Chrome that must be signed prior to
# building the installer. Extend this list when new server-side executables are added.
BROWSEROS_SERVER_BINARIES: List[str] = [
"browseros_server.exe",
"codex.exe",
]
def get_browseros_server_binary_paths(build_output_dir: Path) -> List[Path]:
"""Return absolute paths to BrowserOS Server binaries for signing."""
server_dir = build_output_dir / "BrowserOSServer" / "default" / "resources" / "bin"
return [server_dir / binary for binary in BROWSEROS_SERVER_BINARIES]
def package(ctx: BuildContext) -> bool:
"""Create Windows packages (installer and portable zip)"""
log_info("\n📦 Creating Windows packages...")
# First, ensure mini_installer is built
# if not build_mini_installer(ctx):
# log_error("Failed to build mini_installer")
# return False
# Create both installer and portable zip
success = True
if create_installer(ctx):
log_success("Installer created successfully")
else:
log_error("Failed to create installer")
success = False
if create_portable_zip(ctx):
log_success("Portable ZIP created successfully")
else:
log_error("Failed to create portable ZIP")
success = False
return success
def build_mini_installer(ctx: BuildContext) -> bool:
"""Build the mini_installer target if it doesn't exist"""
log_info("\n🔨 Checking mini_installer build...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
setup_exe_path = build_output_dir / "setup.exe"
if mini_installer_path.exists() and setup_exe_path.exists():
log_info(
"mini_installer.exe and setup.exe already exist; rebuilding to ensure freshness"
)
elif setup_exe_path.exists() and not mini_installer_path.exists():
log_info("setup.exe exists but mini_installer.exe missing")
elif mini_installer_path.exists() and not setup_exe_path.exists():
log_info("mini_installer.exe exists but setup.exe missing")
log_info("Building setup and mini_installer targets...")
# Build mini_installer using autoninja
try:
# Use autoninja.bat on Windows
autoninja_cmd = "autoninja.bat" if IS_WINDOWS else "autoninja"
# Build the mini_installer target
cmd = [
autoninja_cmd,
"-C",
ctx.out_dir, # Use relative path like in compile.py
"setup",
"mini_installer",
]
# Change to chromium_src directory before running (like compile.py does)
import os
old_cwd = os.getcwd()
os.chdir(ctx.chromium_src)
try:
run_command(cmd)
finally:
os.chdir(old_cwd)
# Verify the file was created
missing_artifacts = []
if not setup_exe_path.exists():
missing_artifacts.append("setup.exe")
if not mini_installer_path.exists():
missing_artifacts.append("mini_installer.exe")
if not missing_artifacts:
log_success("mini_installer and setup built successfully")
return True
log_error(
"Build completed but missing artifacts: "
+ ", ".join(missing_artifacts)
)
return False
except Exception as e:
log_error(f"Failed to build setup/mini_installer: {e}")
return False
def create_installer(ctx: BuildContext) -> bool:
"""Create Windows installer (mini_installer.exe)"""
log_info("\n🔧 Creating Windows installer...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
log_warning(f"mini_installer.exe not found at: {mini_installer_path}")
log_info(
"To build the installer, run: autoninja -C out\\Default_x64 mini_installer"
)
return False
# Create output directory
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
# Generate installer filename with version and architecture
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_nxtscape_chromium_version()}_{ctx.architecture}_installer.exe"
installer_path = output_dir / installer_name
# Copy mini_installer to final location
try:
shutil.copy2(mini_installer_path, installer_path)
log_success(f"Installer created: {installer_name}")
return True
except Exception as e:
log_error(f"Failed to create installer: {e}")
return False
def create_portable_zip(ctx: BuildContext) -> bool:
"""Create ZIP of just the installer for easier distribution"""
log_info("\n📦 Creating installer ZIP package...")
# Get paths
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
log_warning(f"mini_installer.exe not found at: {mini_installer_path}")
log_info(
"To build the installer, run: autoninja -C out\\Default_x64 mini_installer"
)
return False
# Create output directory
output_dir = ctx.get_dist_dir()
output_dir.mkdir(parents=True, exist_ok=True)
# Generate ZIP filename with version and architecture
zip_name = f"{ctx.get_app_base_name()}_{ctx.get_nxtscape_chromium_version()}_{ctx.architecture}_installer.zip"
zip_path = output_dir / zip_name
# Create ZIP file containing just the installer
try:
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
# Add mini_installer.exe to the zip
installer_name = f"{ctx.get_app_base_name()}_{ctx.get_nxtscape_version()}_{ctx.architecture}_installer.exe"
zipf.write(mini_installer_path, installer_name)
# Get file size for logging
file_size = mini_installer_path.stat().st_size
log_info(f"Added installer to ZIP ({file_size // (1024*1024)} MB)")
log_success(f"Installer ZIP created: {zip_name}")
return True
except Exception as e:
log_error(f"Failed to create installer ZIP: {e}")
return False
def sign_binaries(ctx: BuildContext, certificate_name: Optional[str] = None) -> bool:
"""Sign Windows binaries using SSL.com CodeSignTool"""
log_info("\n🔏 Signing Windows binaries...")
# Get paths to sign
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
# STEP 1: Sign chrome.exe and BrowserOS Server binaries BEFORE building mini_installer
log_info("\nStep 1/3: Signing executables before packaging...")
binaries_to_sign_first = [build_output_dir / "chrome.exe"]
binaries_to_sign_first.extend(get_browseros_server_binary_paths(build_output_dir))
# Check which binaries exist
existing_binaries = []
for binary in binaries_to_sign_first:
if binary.exists():
existing_binaries.append(binary)
log_info(f"Found binary to sign: {binary.name}")
else:
log_warning(f"Binary not found: {binary}")
if not existing_binaries:
log_error("No binaries found to sign")
return False
# Sign the executables
if not sign_with_codesigntool(existing_binaries):
log_error("Failed to sign executables")
return False
# STEP 2: Build mini_installer to package the signed binaries
log_info("\nStep 2/3: Building mini_installer with signed binaries...")
if not build_mini_installer(ctx):
log_error("Failed to build mini_installer")
return False
# STEP 3: Sign the mini_installer.exe
log_info("\nStep 3/3: Signing mini_installer.exe...")
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
log_error(f"mini_installer.exe not found at: {mini_installer_path}")
return False
if not sign_with_codesigntool([mini_installer_path]):
log_error("Failed to sign mini_installer.exe")
return False
log_success("✅ All binaries signed successfully!")
return True
def sign_with_codesigntool(binaries: List[Path]) -> bool:
"""Sign binaries using SSL.com CodeSignTool"""
log_info("Using SSL.com CodeSignTool for signing...")
# Get CodeSignTool directory from environment
codesigntool_dir = os.environ.get("CODE_SIGN_TOOL_PATH")
if not codesigntool_dir:
log_error("CODE_SIGN_TOOL_PATH not set in .env file")
log_error("Set CODE_SIGN_TOOL_PATH=C:/src/CodeSignTool-v1.3.2-windows")
return False
# Construct path to CodeSignTool.bat
codesigntool_path = Path(codesigntool_dir) / "CodeSignTool.bat"
if not codesigntool_path.exists():
log_error(f"CodeSignTool.bat not found at: {codesigntool_path}")
log_error(f"Make sure CODE_SIGN_TOOL_PATH points to the CodeSignTool directory")
return False
# Check for required environment variables
username = os.environ.get("ESIGNER_USERNAME")
password = os.environ.get("ESIGNER_PASSWORD")
totp_secret = os.environ.get("ESIGNER_TOTP_SECRET")
credential_id = os.environ.get("ESIGNER_CREDENTIAL_ID")
if not all([username, password, totp_secret]):
log_error("Missing required eSigner environment variables in .env:")
log_error(" ESIGNER_USERNAME=your-email")
log_error(" ESIGNER_PASSWORD=your-password")
log_error(" ESIGNER_TOTP_SECRET=your-totp-secret")
if not credential_id:
log_warning(" ESIGNER_CREDENTIAL_ID is recommended but optional")
return False
all_success = True
for binary in binaries:
try:
log_info(f"Signing {binary.name}...")
# Build command
# Create a temp output directory to avoid source/dest conflict
temp_output_dir = binary.parent / "signed_temp"
temp_output_dir.mkdir(exist_ok=True)
cmd = [
str(codesigntool_path),
"sign",
"-username",
username,
"-password",
f'"{password}"', # Always quote the password for shell
]
# Add credential_id BEFORE totp_secret (order matters!)
if credential_id:
cmd.extend(["-credential_id", credential_id])
cmd.extend(
[
"-totp_secret",
totp_secret,
"-input_file_path",
str(binary),
"-output_dir_path",
str(temp_output_dir),
"-override", # Add this back
]
)
# Note: Timestamp server is configured on SSL.com side automatically
# CodeSignTool needs to be run as a shell command for proper quote handling
cmd_str = " ".join(cmd)
log_info(f"Running: {cmd_str}")
import subprocess
result = subprocess.run(
cmd_str,
shell=True,
capture_output=True,
text=True,
cwd=str(codesigntool_path.parent),
)
# Print output for debugging
if result.stdout:
for line in result.stdout.split("\n"):
if line.strip():
log_info(line.strip())
if result.stderr:
for line in result.stderr.split("\n"):
if line.strip() and "WARNING" not in line:
log_error(line.strip())
# Check if signing actually succeeded by looking for error messages
# CodeSignTool returns 0 even on auth errors, so we need to check output
if result.stdout and "Error:" in result.stdout:
log_error(
f"✗ Failed to sign {binary.name} - Authentication or signing error"
)
all_success = False
continue
# Move the signed file back to original location
signed_file = temp_output_dir / binary.name
if signed_file.exists():
import shutil
shutil.move(str(signed_file), str(binary))
log_info(f"Moved signed {binary.name} to original location")
# Clean up temp directory
try:
temp_output_dir.rmdir()
except:
pass # Directory might not be empty
# Verify the file is actually signed (Windows only)
verify_cmd = [
"powershell",
"-Command",
f"(Get-AuthenticodeSignature '{binary}').Status",
]
try:
import subprocess
verify_result = subprocess.run(
verify_cmd, capture_output=True, text=True
)
if "Valid" in verify_result.stdout:
log_success(f"{binary.name} signed and verified successfully")
else:
log_error(
f"{binary.name} signing verification failed - Status: {verify_result.stdout.strip()}"
)
all_success = False
except:
log_warning(f"Could not verify signature for {binary.name}")
except Exception as e:
log_error(f"Failed to sign {binary.name}: {e}")
all_success = False
return all_success
def package_universal(contexts: List[BuildContext]) -> bool:
"""Windows doesn't support universal binaries like macOS"""
log_warning("Universal binaries are not supported on Windows")
log_info("Consider creating separate packages for each architecture")
return True
def get_target_cpu(build_output_dir: Path) -> str:
"""Get target CPU architecture from build configuration"""
args_gn_path = build_output_dir / "args.gn"
if not args_gn_path.exists():
return "x64" # Default
try:
args_gn_content = args_gn_path.read_text(encoding="utf-8")
for cpu in ("x64", "x86", "arm64"):
if f'target_cpu="{cpu}"' in args_gn_content:
return cpu
except Exception:
pass
return "x64" # Default
def create_files_cfg_package(ctx: BuildContext) -> bool:
"""Create package using Chromium's FILES.cfg approach (alternative method)"""
log_info("\n📦 Creating FILES.cfg-based package...")
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
files_cfg_path = (
ctx.chromium_src / "chrome" / "tools" / "build" / "win" / "FILES.cfg"
)
if not files_cfg_path.exists():
log_error(f"FILES.cfg not found at: {files_cfg_path}")
return False
# This would require implementing the filescfg module functionality
# from ungoogled-chromium, which is quite complex
log_warning("FILES.cfg packaging not yet implemented")
return False

View File

@@ -1,351 +0,0 @@
#!/usr/bin/env python3
"""
Patch management module for Nxtscape build system
"""
import sys
import shutil
import subprocess
from pathlib import Path
from typing import Iterator, List, Tuple, Optional
from context import BuildContext
from utils import (
log_info,
log_error,
log_success,
log_warning,
IS_WINDOWS,
IS_LINUX,
IS_MACOS,
)
# Switch to new patching system using dev CLI
NEW_PATCHING = True
def apply_patches_with_dev_cli(
ctx: BuildContext, interactive: bool = False, commit_each: bool = False
) -> bool:
"""Apply patches using the new dev CLI system"""
if not ctx.apply_patches:
log_info("\n⏭️ Skipping patches")
return True
log_info("\n🩹 Applying patches using new dev CLI system...")
# Check if git is available
if not shutil.which("git"):
log_error("Git is not available in PATH")
log_error("Please install Git to apply patches")
raise RuntimeError("Git not found in PATH")
# Import dev CLI module
sys.path.insert(0, str(Path(__file__).parent.parent))
from modules.dev_cli.apply import apply_all_patches
# Call the dev CLI function directly
applied, failed = apply_all_patches(
build_ctx=ctx,
commit_each=commit_each,
dry_run=False,
interactive=interactive,
)
# Handle results
if failed and not interactive:
# In non-interactive mode, fail if any patches failed
raise RuntimeError(f"Failed to apply {len(failed)} patches")
return True
def apply_patches(
ctx: BuildContext, interactive: bool = False, commit_each: bool = False
) -> bool:
"""Apply Nxtscape patches"""
# Use new patching system if enabled
if NEW_PATCHING:
return apply_patches_with_dev_cli(ctx, interactive, commit_each)
# Otherwise, use the legacy patching system
if not ctx.apply_patches:
log_info("\n⏭️ Skipping patches")
return True
log_info("\n🩹 Applying patches...")
# Check if git is available
if not shutil.which("git"):
log_error("Git is not available in PATH")
log_error("Please install Git to apply patches")
raise RuntimeError("Git not found in PATH")
# Get list of patches
root_patches_dir = ctx.get_patches_dir()
nxtscape_patches_dir = ctx.get_nxtscape_patches_dir()
if not nxtscape_patches_dir.exists():
log_error(f"Patches directory not found: {nxtscape_patches_dir}")
raise FileNotFoundError(f"Patches directory not found: {nxtscape_patches_dir}")
# get all patches in nxtscape_patches_dir
all_patches = list(parse_series_file(root_patches_dir))
# Filter out patches that should be skipped on this platform
patches = []
skipped_count = 0
for patch_path, skip_platforms in all_patches:
if should_skip_patch(skip_platforms):
log_info(
f"⏭️ Skipping {patch_path.name} (not for {get_current_platform()})"
)
skipped_count += 1
else:
patches.append((patch_path, skip_platforms))
if not patches:
if skipped_count > 0:
log_info(
f"⚠️ All {skipped_count} patches were skipped for {get_current_platform()}"
)
else:
log_info("⚠️ No patches found to apply")
return True
log_info(
f"Found {len(patches)} patches to apply ({skipped_count} skipped for {get_current_platform()})"
)
if interactive:
log_info(
"🔍 Interactive mode enabled - will ask for confirmation before each patch"
)
if commit_each:
log_info("📝 Git commit mode enabled - will create a commit after each patch")
# Apply each patch
for i, (patch_path, _) in enumerate(patches, 1):
if not patch_path.exists():
log_info(f"⚠️ Patch file not found: {patch_path}")
continue
if interactive:
# Show patch info and ask for confirmation
log_info(f"\n{'='*60}")
log_info(f"Patch {i}/{len(patches)}: {patch_path.name}")
log_info(f"{'='*60}")
while True:
choice = input(
"\nOptions:\n 1) Apply this patch\n 2) Skip this patch\n 3) Stop patching here\nEnter your choice (1-3): "
).strip()
if choice == "1":
apply_single_patch(
patch_path, ctx.chromium_src, i, len(patches), commit_each
)
break
elif choice == "2":
log_warning(f"⏭️ Skipping patch {patch_path.name}")
break
elif choice == "3":
log_info("Stopping patch process as requested")
return True
else:
log_error("Invalid choice. Please enter 1, 2, or 3.")
else:
apply_single_patch(
patch_path, ctx.chromium_src, i, len(patches), commit_each
)
log_success("Patches applied")
return True
def get_current_platform() -> str:
"""Get the current platform name for skip checking"""
if IS_WINDOWS:
return "windows"
elif IS_LINUX:
return "linux"
elif IS_MACOS:
return "darwin"
else:
return "unknown"
def should_skip_patch(skip_platforms: Optional[List[str]]) -> bool:
"""Check if a patch should be skipped on the current platform"""
if skip_platforms is None:
return False
current_platform = get_current_platform()
# Also check for common aliases
platform_aliases = {
"darwin": ["darwin", "macos", "mac", "osx"],
"linux": ["linux"],
"windows": ["windows", "win32", "win"],
}
current_aliases = platform_aliases.get(current_platform, [current_platform])
# Check if any skip platform matches our current platform or its aliases
for skip_platform in skip_platforms:
if skip_platform in current_aliases:
return True
return False
def parse_series_file(patches_dir: Path) -> Iterator[Tuple[Path, Optional[List[str]]]]:
"""Parse the series file to get list of patches with skip directives
Returns tuples of (patch_path, skip_platforms) where skip_platforms
is None if no platforms should be skipped, or a list of platform names
"""
series_file = patches_dir / "series"
# Read series file
with series_file.open("r") as f:
lines = f.read().splitlines()
patches = []
for line in lines:
# Skip empty lines and comments
line = line.strip()
if not line or line.startswith("#"):
continue
skip_platforms = None
# Check for #skip directive
if " #skip:" in line:
parts = line.split(" #skip:")
line = parts[0].strip()
# Parse platforms to skip
skip_platforms = [p.strip().lower() for p in parts[1].split(",")]
elif " #" in line:
# Remove other inline comments
line = line.split(" #")[0].strip()
patches.append((patches_dir / line, skip_platforms))
return patches
def apply_single_patch(
patch_path: Path,
tree_path: Path,
current_num: int,
total: int,
commit_each: bool = False,
) -> bool:
"""Apply a single patch using git apply"""
# Use git apply which is cross-platform and handles patch format better
cmd = [
"git",
"apply",
"--ignore-whitespace",
"--whitespace=nowarn",
"-p1",
str(patch_path),
]
log_info(f" * Applying {patch_path.name} ({current_num}/{total})")
# Run from the tree_path directory
result = subprocess.run(cmd, text=True, capture_output=True, cwd=tree_path)
if result.returncode == 0:
if commit_each:
commit_patch(patch_path, tree_path)
return True
# Patch failed - try with --3way for better conflict resolution
log_warning(f"Standard apply failed, trying 3-way merge for {patch_path.name}")
cmd.append("--3way")
result = subprocess.run(
cmd[:-1] + ["--3way", str(patch_path)],
text=True,
capture_output=True,
cwd=tree_path,
)
if result.returncode == 0:
log_info(f"✓ Applied {patch_path.name} with 3-way merge")
if commit_each:
commit_patch(patch_path, tree_path)
return True
# Patch still failed
log_error(f"Failed to apply patch: {patch_path.name}")
if result.stderr:
log_error(f"Error: {result.stderr}")
# Interactive prompt for handling failure
log_error("\n============================================")
log_error(f"Patch {patch_path.name} failed to apply.")
log_info("Options:")
log_info(" 1) Skip this patch and continue")
log_info(" 2) Retry this patch")
log_info(" 3) Abort patching")
log_info(" 4) Interactive mode - Fix manually and continue")
while True:
choice = input("Enter your choice (1-4): ").strip()
if choice == "1":
log_warning(f"⏭️ Skipping patch {patch_path.name}")
return True # Continue with next patch
elif choice == "2":
return apply_single_patch(
patch_path, tree_path, current_num, total, commit_each
)
elif choice == "3":
log_error("Aborting patch process")
raise RuntimeError("Patch process aborted by user")
elif choice == "4":
log_info("\nPlease fix the issue manually, then press Enter to continue...")
input("Press Enter when ready: ")
# Retry after manual fix
return apply_single_patch(
patch_path, tree_path, current_num, total, commit_each
)
def commit_patch(patch_path: Path, tree_path: Path) -> bool:
"""Create a git commit for the applied patch"""
try:
# Stage all changes
cmd_add = ["git", "add", "-A"]
result = subprocess.run(cmd_add, capture_output=True, text=True, cwd=tree_path)
if result.returncode != 0:
log_warning(f"Failed to stage changes for patch {patch_path.name}")
if result.stderr:
log_warning(f"Error: {result.stderr}")
return False
# Create commit message
patch_name = patch_path.stem # Remove .patch extension
commit_message = f"patch: {patch_name}"
# Create the commit
cmd_commit = ["git", "commit", "-m", commit_message]
result = subprocess.run(
cmd_commit, capture_output=True, text=True, cwd=tree_path
)
if result.returncode == 0:
log_success(f"📝 Created commit for patch: {patch_name}")
return True
else:
log_warning(f"Failed to commit patch {patch_path.name}")
if result.stderr:
log_warning(f"Error: {result.stderr}")
return False
except Exception as e:
log_warning(f"Error creating commit for patch {patch_path.name}: {e}")
return False

View File

@@ -0,0 +1,67 @@
#!/usr/bin/env python3
"""Patch management module for BrowserOS build system"""
import shutil
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import log_info, log_error
class PatchesModule(CommandModule):
produces = []
requires = []
description = "Apply BrowserOS patches to Chromium"
def validate(self, ctx: Context) -> None:
if not shutil.which("git"):
raise ValidationError(
"Git is not available in PATH - required for applying patches"
)
patches_dir = ctx.get_patches_dir()
if not patches_dir.exists():
raise ValidationError(f"Patches directory not found: {patches_dir}")
def execute(self, ctx: Context) -> None:
log_info("\n🩹 Applying patches...")
if not apply_patches_impl(ctx, interactive=False, commit_each=False):
raise RuntimeError("Failed to apply patches")
def apply_patches_impl(
ctx: Context, interactive: bool = False, commit_each: bool = False
) -> bool:
"""Apply patches using the dev CLI patch system
Returns:
True if patches applied successfully (or interactively handled)
Raises:
RuntimeError: If patches fail in non-interactive mode
"""
log_info("\n🩹 Applying patches using dev CLI system...")
# Check if git is available
if not shutil.which("git"):
log_error("Git is not available in PATH")
log_error("Please install Git to apply patches")
raise RuntimeError("Git not found in PATH")
# Import apply module via package path so relative imports work
from build.modules.apply.apply_all import apply_all_patches
# Call the dev CLI function directly
_, failed = apply_all_patches(
build_ctx=ctx,
commit_each=commit_each,
dry_run=False,
interactive=interactive,
)
# Handle results
if failed and not interactive:
# In non-interactive mode, fail if any patches failed
raise RuntimeError(f"Failed to apply {len(failed)} patches")
# Success: patches applied or interactively handled
return True

View File

@@ -0,0 +1,188 @@
#!/usr/bin/env python3
"""Series-based patch module for BrowserOS build system (GNU Quilt format)"""
import shutil
import subprocess
from pathlib import Path
from typing import Iterator
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import log_info, log_success, log_error
ENCODING = "UTF-8"
class SeriesPatchesModule(CommandModule):
produces = []
requires = []
description = "Apply series-based patches (GNU Quilt format)"
def validate(self, ctx: Context) -> None:
if not shutil.which("git"):
raise ValidationError("Git is not available in PATH")
series_dir = ctx.get_series_patches_dir()
if not series_dir.exists():
raise ValidationError(f"Series patches directory not found: {series_dir}")
series_file = series_dir / "series"
if not series_file.exists():
raise ValidationError(f"Series file not found: {series_file}")
def execute(self, ctx: Context) -> None:
log_info("\n🩹 Applying series patches...")
applied, failed = apply_series_patches_impl(ctx)
if failed:
raise RuntimeError(f"Failed to apply {len(failed)} series patches")
log_success(f"Applied {len(applied)} series patches")
def parse_series(series_path: Path) -> Iterator[str]:
"""
Parse a GNU Quilt series file, yielding patch paths.
Format:
- One patch path per line (relative to series directory)
- Lines starting with # are comments
- Inline comments with ' #' are stripped
- Blank lines are ignored
"""
with series_path.open(encoding=ENCODING) as f:
lines = f.read().splitlines()
for line in lines:
line = line.strip()
if not line:
continue
if line.startswith("#"):
continue
# Strip inline comments
if " #" in line:
line = line.split(" #")[0].strip()
if line:
yield line
def apply_single_patch(patch_path: Path, chromium_src: Path) -> tuple[bool, str]:
"""
Apply a single patch using git apply.
Returns:
(success, error_message)
"""
cmd = [
"git", "apply",
"--ignore-whitespace",
"--whitespace=nowarn",
"-p1",
str(patch_path)
]
result = subprocess.run(
cmd,
cwd=chromium_src,
capture_output=True,
text=True
)
if result.returncode == 0:
return True, ""
# Fallback to 3-way merge
cmd_3way = [
"git", "apply",
"--3way",
"--ignore-whitespace",
"--whitespace=nowarn",
"-p1",
str(patch_path)
]
result = subprocess.run(
cmd_3way,
cwd=chromium_src,
capture_output=True,
text=True
)
if result.returncode == 0:
return True, ""
return False, result.stderr or result.stdout
def apply_series_patches_impl(
ctx: Context,
dry_run: bool = False
) -> tuple[list[Path], list[Path]]:
"""
Apply all patches listed in the series file.
Args:
ctx: Build context
dry_run: If True, only check if patches would apply
Returns:
(applied_patches, failed_patches)
"""
series_dir = ctx.get_series_patches_dir()
series_file = series_dir / "series"
chromium_src = ctx.chromium_src
patch_paths = list(parse_series(series_file))
total = len(patch_paths)
if total == 0:
log_info(" No patches listed in series file")
return [], []
log_info(f" Found {total} patches in series file")
applied = []
failed = []
for i, relative_path in enumerate(patch_paths, 1):
patch_path = series_dir / relative_path
if not patch_path.exists():
log_error(f" [{i}/{total}] ✗ Patch file not found: {relative_path}")
failed.append(patch_path)
continue
if dry_run:
# Dry run: check if patch would apply
cmd = [
"git", "apply",
"--check",
"--ignore-whitespace",
"-p1",
str(patch_path)
]
result = subprocess.run(
cmd,
cwd=chromium_src,
capture_output=True,
text=True
)
if result.returncode == 0:
log_info(f" [{i}/{total}] ✓ Would apply: {relative_path}")
applied.append(patch_path)
else:
log_error(f" [{i}/{total}] ✗ Would fail: {relative_path}")
failed.append(patch_path)
else:
success, error = apply_single_patch(patch_path, chromium_src)
if success:
log_info(f" [{i}/{total}] ✓ Applied: {relative_path}")
applied.append(patch_path)
else:
log_error(f" [{i}/{total}] ✗ Failed: {relative_path}")
if error:
log_error(f" {error.strip()}")
failed.append(patch_path)
return applied, failed

View File

@@ -1,97 +0,0 @@
#!/usr/bin/env python3
"""
Post-build module to fix Info.plist and other post-processing tasks
"""
import plistlib
from pathlib import Path
from context import BuildContext
from utils import log_info, log_success, log_error
def add_sparkle_keys_to_info_plist(ctx: BuildContext):
"""Add Sparkle keys to the built app's Info.plist"""
app_path = ctx.get_app_path()
info_plist_path = app_path / "Contents" / "Info.plist"
if not info_plist_path.exists():
raise FileNotFoundError(f"Info.plist not found: {info_plist_path}")
log_info(f"Adding keys to Info.plist: {info_plist_path}")
# Info.plist.additions file is required
additions_file = (
ctx.root_dir / "resources" / "entitlements" / "Info.plist.additions"
)
if not additions_file.exists():
raise FileNotFoundError(
f"Required file not found: {additions_file}\n"
"Info.plist.additions is required for build"
)
log_info(f"Reading additions from: {additions_file}")
# Parse the additions file to extract key-value pairs
import xml.etree.ElementTree as ET
with open(additions_file, "r") as f:
additions_content = f.read()
# Wrap in a root element for parsing
wrapped_content = f"<plist>{additions_content}</plist>"
try:
root = ET.fromstring(wrapped_content)
except ET.ParseError as e:
raise ValueError(f"Failed to parse Info.plist.additions: {e}")
# Read the existing plist
with open(info_plist_path, "rb") as f:
plist_data = plistlib.load(f)
# Parse key-value pairs from additions
elements = list(root)
i = 0
added_count = 0
while i < len(elements):
if elements[i].tag == "key":
key = elements[i].text
i += 1
if i < len(elements):
value_elem = elements[i]
if value_elem.tag == "string":
value = value_elem.text
elif value_elem.tag == "true":
value = True
elif value_elem.tag == "false":
value = False
elif value_elem.tag == "integer":
value = int(value_elem.text)
else:
value = value_elem.text
plist_data[key] = value
log_info(f" Added {key}: {value}")
added_count += 1
i += 1
if added_count == 0:
raise ValueError("No keys found in Info.plist.additions")
# Write the updated plist
with open(info_plist_path, "wb") as f:
plistlib.dump(plist_data, f)
log_success(f"Added {added_count} keys to Info.plist from additions file")
def run_postbuild(ctx: BuildContext):
"""Run all post-build tasks"""
log_info("\n🔧 Running post-build tasks...")
# Add Sparkle keys - will raise exception if it fails
# add_sparkle_keys_to_info_plist(ctx)
# Add other post-build tasks here as needed
log_success("Post-build tasks completed")

View File

@@ -1,16 +1,29 @@
#!/usr/bin/env python3
"""
Chromium file replacement module for Nxtscape build system
"""
"""Chromium file replacement module for BrowserOS build system"""
import sys
import shutil
from pathlib import Path
from context import BuildContext
from utils import log_info, log_success, log_error, log_warning
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import log_info, log_success, log_error
def replace_chromium_files(ctx: BuildContext, replacements=None) -> bool:
class ChromiumReplaceModule(CommandModule):
produces = []
requires = []
description = "Replace Chromium source files with custom versions"
def validate(self, ctx: Context) -> None:
if not ctx.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {ctx.chromium_src}")
def execute(self, ctx: Context) -> None:
log_info("\n🔄 Replacing chromium files...")
if not replace_chromium_files_impl(ctx):
raise RuntimeError("Failed to replace chromium files")
def replace_chromium_files_impl(ctx: Context, replacements=None) -> bool:
"""Replace files in chromium source with custom files from chromium_files directory"""
log_info("\n🔄 Replacing chromium files...")
log_info(f" Build type: {ctx.build_type}")
@@ -109,7 +122,7 @@ def add_file_to_replacements(
replacement_dir = ctx.get_chromium_replace_files_dir()
dest_file = replacement_dir / relative_path
log_info(f"📂 Adding file to replacements:")
log_info("📂 Adding file to replacements:")
log_info(f" Source: {file_path}")
log_info(f" Destination: {dest_file}")
@@ -122,7 +135,7 @@ def add_file_to_replacements(
log_success(f"✓ File added to chromium_files replacements: {relative_path}")
log_info(
f" This file will be replaced during builds with --chromium-replace flag"
" This file will be replaced during builds with --chromium-replace flag"
)
return True
except Exception as e:

View File

@@ -1,19 +1,33 @@
#!/usr/bin/env python3
"""
Resource management module for Nxtscape build system
"""
"""Resource management module for BrowserOS build system"""
import sys
import glob
import shutil
import yaml
import subprocess
from pathlib import Path
from context import BuildContext
from utils import log_info, log_success, log_error, log_warning, get_platform
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import log_info, log_success, log_error, log_warning, get_platform
def copy_resources(ctx: BuildContext, commit_each: bool = False) -> bool:
class ResourcesModule(CommandModule):
produces = []
requires = []
description = "Copy resources (icons, extensions) to Chromium"
def validate(self, ctx: Context) -> None:
copy_config_path = ctx.get_copy_resources_config()
if not copy_config_path.exists():
raise ValidationError(f"Copy configuration file not found: {copy_config_path}")
def execute(self, ctx: Context) -> None:
log_info("\n📦 Copying resources...")
if not copy_resources_impl(ctx, commit_each=False):
raise RuntimeError("Failed to copy resources")
def copy_resources_impl(ctx: Context, commit_each: bool = False) -> bool:
"""Copy AI extensions and icons based on YAML configuration"""
log_info("\n📦 Copying resources...")

View File

@@ -1,12 +1,25 @@
#!/usr/bin/env python3
"""
String replacement module for BrowserOS build system
"""
"""String replacement module for BrowserOS build system"""
import re
from pathlib import Path
from context import BuildContext
from utils import log_info, log_success, log_error, log_warning
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import log_info, log_success, log_error, log_warning
class StringReplacesModule(CommandModule):
produces = []
requires = []
description = "Apply branding string replacements in Chromium"
def validate(self, ctx: Context) -> None:
if not ctx.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {ctx.chromium_src}")
def execute(self, ctx: Context) -> None:
log_info("\n🔤 Applying string replacements...")
if not apply_string_replacements_impl(ctx):
raise RuntimeError("Failed to apply string replacements")
# Strings we want to replace but that we also replace automatically
@@ -34,9 +47,8 @@ target_files = [
]
def apply_string_replacements(ctx: BuildContext) -> bool:
"""Apply string replacements to specified files"""
log_info("\n🔤 Applying string replacements...")
def apply_string_replacements_impl(ctx: Context) -> bool:
"""Internal implementation for applying string replacements"""
success = True
@@ -71,7 +83,7 @@ def apply_string_replacements(ctx: BuildContext) -> bool:
f.write(content)
log_success(f" Updated with {replacement_count} total replacements")
else:
log_info(f" No replacements needed")
log_info(" No replacements needed")
except Exception as e:
log_error(f" Error processing {file_path}: {e}")

View File

@@ -0,0 +1,58 @@
#!/usr/bin/env python3
"""Clean module for BrowserOS build system"""
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import run_command, log_info, log_success, safe_rmtree
class CleanModule(CommandModule):
produces = []
requires = []
description = "Clean build artifacts and reset git state"
def validate(self, ctx: Context) -> None:
if not ctx.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {ctx.chromium_src}")
def execute(self, ctx: Context) -> None:
log_info("🧹 Cleaning build artifacts...")
out_path = ctx.chromium_src / ctx.out_dir
if out_path.exists():
safe_rmtree(out_path)
log_success("Cleaned build directory")
log_info("\n🔀 Resetting git branch and removing tracked files...")
self._git_reset(ctx)
log_info("\n🧹 Cleaning Sparkle build artifacts...")
self._clean_sparkle(ctx)
def _clean_sparkle(self, ctx: Context) -> None:
sparkle_dir = ctx.get_sparkle_dir()
if sparkle_dir.exists():
safe_rmtree(sparkle_dir)
log_success("Cleaned Sparkle build directory")
def _git_reset(self, ctx: Context) -> None:
run_command(["git", "reset", "--hard", "HEAD"], cwd=ctx.chromium_src)
log_info("🧹 Running git clean with exclusions...")
run_command(
[
"git",
"clean",
"-fdx",
"chrome/",
"components/",
"--exclude=third_party/",
"--exclude=build_tools/",
"--exclude=uc_staging/",
"--exclude=buildtools/",
"--exclude=tools/",
"--exclude=build/",
],
cwd=ctx.chromium_src,
)
log_success("Git reset and clean complete")

View File

@@ -0,0 +1,42 @@
#!/usr/bin/env python3
"""Build configuration module for BrowserOS build system"""
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import run_command, log_info, log_success, join_paths, IS_WINDOWS
class ConfigureModule(CommandModule):
produces = []
requires = []
description = "Configure build with GN"
def validate(self, ctx: Context) -> None:
if not ctx.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {ctx.chromium_src}")
if not ctx.paths.gn_flags_file:
raise ValidationError("GN flags file not set")
flags_file = join_paths(ctx.root_dir, ctx.paths.gn_flags_file)
if not flags_file.exists():
raise ValidationError(f"GN flags file not found: {flags_file}")
def execute(self, ctx: Context) -> None:
log_info(f"\n⚙️ Configuring {ctx.build_type} build for {ctx.architecture}...")
out_path = join_paths(ctx.chromium_src, ctx.out_dir)
out_path.mkdir(parents=True, exist_ok=True)
flags_file = join_paths(ctx.root_dir, ctx.paths.gn_flags_file)
args_file = ctx.get_gn_args_file()
args_content = flags_file.read_text()
args_content += f'\ntarget_cpu = "{ctx.architecture}"\n'
args_file.write_text(args_content)
gn_cmd = "gn.bat" if IS_WINDOWS() else "gn"
run_command([gn_cmd, "gen", ctx.out_dir, "--fail-on-unused-args"], cwd=ctx.chromium_src)
log_success("Build configured")

View File

@@ -0,0 +1,97 @@
#!/usr/bin/env python3
"""Git operations module for BrowserOS build system"""
import subprocess
import tarfile
import urllib.request
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import run_command, log_info, log_error, log_success, IS_WINDOWS, safe_rmtree
class GitSetupModule(CommandModule):
produces = []
requires = []
description = "Checkout Chromium version and sync dependencies"
def validate(self, ctx: Context) -> None:
if not ctx.chromium_src.exists():
raise ValidationError(f"Chromium source not found: {ctx.chromium_src}")
if not ctx.chromium_version:
raise ValidationError("Chromium version not set")
def execute(self, ctx: Context) -> None:
log_info(f"\n🔀 Setting up Chromium {ctx.chromium_version}...")
log_info("📥 Fetching all tags from remote...")
run_command(["git", "fetch", "--tags", "--force"], cwd=ctx.chromium_src)
self._verify_tag_exists(ctx)
log_info(f"🔀 Checking out tag: {ctx.chromium_version}")
run_command(["git", "checkout", f"tags/{ctx.chromium_version}"], cwd=ctx.chromium_src)
log_info("📥 Syncing dependencies (this may take a while)...")
if IS_WINDOWS():
run_command(["gclient.bat", "sync", "-D", "--no-history", "--shallow"], cwd=ctx.chromium_src)
else:
run_command(["gclient", "sync", "-D", "--no-history", "--shallow"], cwd=ctx.chromium_src)
log_success("Git setup complete")
def _verify_tag_exists(self, ctx: Context) -> None:
result = subprocess.run(
["git", "tag", "-l", ctx.chromium_version],
text=True,
capture_output=True,
cwd=ctx.chromium_src,
)
if not result.stdout or ctx.chromium_version not in result.stdout:
log_error(f"Tag {ctx.chromium_version} not found!")
log_info("Available tags (last 10):")
list_result = subprocess.run(
["git", "tag", "-l", "--sort=-version:refname"],
text=True,
capture_output=True,
cwd=ctx.chromium_src,
)
if list_result.stdout:
for tag in list_result.stdout.strip().split("\n")[:10]:
log_info(f" {tag}")
raise ValidationError(f"Git tag {ctx.chromium_version} not found")
class SparkleSetupModule(CommandModule):
produces = []
requires = []
description = "Download and setup Sparkle framework (macOS only)"
def validate(self, ctx: Context) -> None:
from ...common.utils import IS_MACOS
if not IS_MACOS():
raise ValidationError("Sparkle setup requires macOS")
def execute(self, ctx: Context) -> None:
log_info("\n✨ Setting up Sparkle framework...")
sparkle_dir = ctx.get_sparkle_dir()
if sparkle_dir.exists():
safe_rmtree(sparkle_dir)
sparkle_dir.mkdir(parents=True)
sparkle_url = ctx.get_sparkle_url()
sparkle_archive = sparkle_dir / "sparkle.tar.xz"
log_info(f"Downloading Sparkle from {sparkle_url}...")
urllib.request.urlretrieve(sparkle_url, sparkle_archive)
log_info("Extracting Sparkle...")
with tarfile.open(sparkle_archive, "r:xz") as tar:
tar.extractall(sparkle_dir)
sparkle_archive.unlink()
log_success("Sparkle setup complete")

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env python3
"""Linux signing module for BrowserOS"""
from typing import List
from ...common.module import CommandModule
from ...common.context import Context
from ...common.utils import log_info, log_warning
class LinuxSignModule(CommandModule):
produces = []
requires = []
description = "Linux code signing (no-op)"
def validate(self, ctx: Context) -> None:
pass
def execute(self, ctx: Context) -> None:
log_info("Code signing is not required for Linux packages")
def sign_universal(contexts: List[Context]) -> bool:
"""Linux doesn't support universal binaries"""
log_warning("Universal signing is not supported on Linux")
return True
def check_signing_environment() -> bool:
"""Linux doesn't require signing environment"""
return True

View File

@@ -0,0 +1,869 @@
#!/usr/bin/env python3
"""Application signing and notarization module for BrowserOS (macOS)"""
import os
import sys
import subprocess
import shutil
from pathlib import Path
from typing import Optional, List, Dict, Tuple
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.env import EnvConfig
from ...common.utils import (
run_command as utils_run_command,
log_info,
log_error,
log_success,
log_warning,
IS_MACOS,
join_paths,
)
# Central list of BrowserOS Server binaries we need to sign explicitly.
# Each entry controls identifiers, signing options, and entitlement files so
# adding a new binary is a one-line update here rather than scattered changes.
BROWSEROS_SERVER_BINARIES: Dict[str, Dict[str, str]] = {
"browseros_server": {
"identifier_suffix": "browseros_server",
"options": "runtime",
"entitlements": "browseros-executable-entitlements.plist",
},
"codex": {
"identifier_suffix": "codex",
"options": "runtime",
"entitlements": "browseros-executable-entitlements.plist",
},
}
def get_browseros_server_binary_info(component_path: Path) -> Optional[Dict[str, str]]:
"""Return metadata for known BrowserOS Server binaries, if applicable."""
name = component_path.stem.lower()
return BROWSEROS_SERVER_BINARIES.get(name)
def run_command(
cmd: List[str],
cwd: Optional[Path] = None,
check: bool = True,
) -> subprocess.CompletedProcess:
"""Run a command and handle errors"""
return utils_run_command(cmd, cwd=cwd, check=check)
class MacOSSignModule(CommandModule):
produces = ["signed_app"]
requires = ["built_app"]
description = "Sign and notarize macOS application"
def validate(self, ctx: Context) -> None:
if not IS_MACOS:
raise ValidationError("macOS signing requires macOS")
app_path = ctx.get_app_path()
if not app_path.exists():
raise ValidationError(f"App not found at: {app_path}")
env_ok, env_vars = check_environment()
if not env_ok:
raise ValidationError("Required signing environment variables not set")
def execute(self, ctx: Context) -> None:
log_info("=" * 70)
log_info("🚀 Starting signing process for BrowserOS...")
log_info("=" * 70)
app_path = ctx.get_app_path()
env_ok, env_vars = check_environment()
self._clear_extended_attributes(app_path)
self._sign_all_components(app_path, env_vars["certificate_name"], ctx)
self._verify_signature(app_path)
self._notarize(app_path, env_vars, ctx)
ctx.artifact_registry.add("signed_app", app_path)
log_success("Application signed and notarized successfully")
def _clear_extended_attributes(self, app_path: Path) -> None:
log_info("🧹 Clearing extended attributes...")
run_command(["xattr", "-cs", str(app_path)])
def _sign_all_components(self, app_path: Path, certificate_name: str, ctx: Context) -> None:
if not sign_all_components(app_path, certificate_name, ctx.root_dir, ctx):
raise RuntimeError("Failed to sign all components")
def _verify_signature(self, app_path: Path) -> None:
if not verify_signature(app_path):
raise RuntimeError("Signature verification failed")
def _notarize(self, app_path: Path, env_vars: Dict[str, str], ctx: Context) -> None:
if not notarize_app(app_path, ctx.root_dir, env_vars, ctx):
raise RuntimeError("Notarization failed")
def check_signing_environment() -> bool:
"""Check if all required environment variables are set for signing (early check)"""
# Only check on macOS
if not IS_MACOS:
return True
env = EnvConfig()
missing = []
if not env.macos_certificate_name:
missing.append("MACOS_CERTIFICATE_NAME")
if not env.macos_notarization_apple_id:
missing.append("PROD_MACOS_NOTARIZATION_APPLE_ID")
if not env.macos_notarization_team_id:
missing.append("PROD_MACOS_NOTARIZATION_TEAM_ID")
if not env.macos_notarization_password:
missing.append("PROD_MACOS_NOTARIZATION_PWD")
if missing:
log_error("❌ Signing requires macOS environment variables!")
log_error(f"Missing environment variables: {', '.join(missing)}")
log_error("Please set all required environment variables before signing.")
return False
return True
def check_environment() -> Tuple[bool, Dict[str, str]]:
"""Check if all required environment variables are set"""
env = EnvConfig()
env_vars = {
"certificate_name": env.macos_certificate_name or "",
"apple_id": env.macos_notarization_apple_id or "",
"team_id": env.macos_notarization_team_id or "",
"notarization_pwd": env.macos_notarization_password or "",
}
missing = []
for key, value in env_vars.items():
if not value:
env_name = {
"certificate_name": "MACOS_CERTIFICATE_NAME",
"apple_id": "PROD_MACOS_NOTARIZATION_APPLE_ID",
"team_id": "PROD_MACOS_NOTARIZATION_TEAM_ID",
"notarization_pwd": "PROD_MACOS_NOTARIZATION_PWD",
}[key]
missing.append(env_name)
if missing:
log_error(f"Required environment variables not set: {', '.join(missing)}")
return False, env_vars
return True, env_vars
def find_components_to_sign(
app_path: Path, ctx: Optional[Context] = None
) -> Dict[str, List[Path]]:
"""Dynamically find all components that need signing"""
components = {
"helpers": [],
"xpc_services": [],
"frameworks": [],
"dylibs": [],
"executables": [],
"apps": [],
}
framework_path = join_paths(app_path, "Contents", "Frameworks")
# Check both versioned and non-versioned paths for BrowserOS Framework
# Handle both release and debug framework names
framework_names = [
"BrowserOS Framework.framework",
"BrowserOS Dev Framework.framework",
]
nxtscape_framework_paths = []
for fw_name in framework_names:
fw_path = join_paths(framework_path, fw_name)
if fw_path.exists():
nxtscape_framework_paths.append(fw_path)
# Add versioned path if context is available
if ctx and ctx.browseros_chromium_version:
versioned_path = join_paths(
fw_path, "Versions", ctx.browseros_chromium_version
)
if versioned_path.exists():
nxtscape_framework_paths.insert(
0, versioned_path
) # Prioritize versioned path
# Find all helper apps
for nxtscape_fw_path in nxtscape_framework_paths:
helpers_dir = join_paths(nxtscape_fw_path, "Helpers")
if helpers_dir.exists():
# Find all .app helpers
components["helpers"].extend(helpers_dir.glob("*.app"))
# Find all executable helpers (files without extension)
for item in helpers_dir.iterdir():
if item.is_file() and not item.suffix and os.access(item, os.X_OK):
components["executables"].append(item)
break # Use the first valid path found
# Find all XPC services
for xpc_path in framework_path.rglob("*.xpc"):
components["xpc_services"].append(xpc_path)
# Find all frameworks (with special handling for Sparkle)
for fw_path in framework_path.rglob("*.framework"):
components["frameworks"].append(fw_path)
# Special handling for Sparkle framework versioned structure
if "Sparkle.framework" in str(fw_path):
# Look for Sparkle's versioned executables at Versions/B/
sparkle_version_b = join_paths(fw_path, "Versions", "B")
if sparkle_version_b.exists():
# Add Autoupdate executable if it exists
autoupdate = join_paths(sparkle_version_b, "Autoupdate")
if autoupdate.exists() and autoupdate.is_file():
components["executables"].append(autoupdate)
# Find all dylibs (check versioned path for BrowserOS Framework libraries)
for nxtscape_fw_path in nxtscape_framework_paths:
libraries_dir = join_paths(nxtscape_fw_path, "Libraries")
if libraries_dir.exists():
components["dylibs"].extend(libraries_dir.glob("*.dylib"))
# Also find dylibs in other frameworks
for dylib_path in framework_path.rglob("*.dylib"):
if dylib_path not in components["dylibs"]:
components["dylibs"].append(dylib_path)
# Find all nested apps (like Updater.app in Sparkle)
for nested_app in framework_path.rglob("*.app"):
if nested_app not in components["helpers"]:
components["apps"].append(nested_app)
# Find BrowserOS Server binaries
browseros_server_dir = join_paths(app_path, "Contents", "Resources", "BrowserOSServer")
if browseros_server_dir.exists():
for item in browseros_server_dir.rglob("*"):
if item.is_file() and not item.suffix and os.access(item, os.X_OK):
components["executables"].append(item)
return components
def get_identifier_for_component(
component_path: Path, base_identifier: str = "com.browseros"
) -> str:
"""Generate identifier for a component based on its path and name"""
name = component_path.stem
# Special cases for known components
special_identifiers = {
"Downloader": "org.sparkle-project.Downloader",
"Installer": "org.sparkle-project.Installer",
"Updater": "org.sparkle-project.Updater",
"Autoupdate": "org.sparkle-project.Autoupdate",
"Sparkle": "org.sparkle-project.Sparkle",
"chrome_crashpad_handler": f"{base_identifier}.crashpad_handler",
"app_mode_loader": f"{base_identifier}.app_mode_loader",
"web_app_shortcut_copier": f"{base_identifier}.web_app_shortcut_copier",
}
# Check for special cases
for key, identifier in special_identifiers.items():
if key in str(component_path):
return identifier
# BrowserOS Server binaries share the same entitlements/options but need unique identifiers.
browseros_server_info = get_browseros_server_binary_info(component_path)
if browseros_server_info:
suffix = browseros_server_info.get("identifier_suffix", component_path.stem)
return f"{base_identifier}.{suffix}"
# For helper apps
if "Helper" in name:
# Extract the helper type (GPU, Renderer, Plugin, Alerts)
if "(" in name and ")" in name:
helper_type = name[name.find("(") + 1 : name.find(")")].lower()
return f"{base_identifier}.helper.{helper_type}"
else:
return f"{base_identifier}.helper"
# For frameworks
if component_path.suffix == ".framework":
if name == "BrowserOS Framework" or name == "BrowserOS Dev Framework":
return f"{base_identifier}.framework"
else:
return f"{base_identifier}.{name.replace(' ', '_').lower()}"
# For dylibs
if component_path.suffix == ".dylib":
return f"{base_identifier}.{name}"
# Default
return f"{base_identifier}.{name.replace(' ', '_').lower()}"
def get_signing_options(component_path: Path) -> str:
"""Determine signing options based on component type"""
name = component_path.name
# For Sparkle XPC services and apps - minimal restrictions
if "sparkle" in str(component_path).lower():
return "runtime"
# For Chromium helper apps with specific sandboxing requirements
if (
"Helper (Renderer)" in name
or "Helper (GPU)" in name
or "Helper (Plugin)" in name
):
return "restrict,kill,runtime"
# Known BrowserOS Server binaries share the same relaxed options.
browseros_server_info = get_browseros_server_binary_info(component_path)
if browseros_server_info:
return browseros_server_info.get("options", "runtime")
# For dylibs - library flag ONLY for dynamic libraries
if component_path.suffix == ".dylib":
return "restrict,library,runtime,kill"
# Default for other executables - no library flag
return "runtime"
def sign_component(
component_path: Path,
certificate_name: str,
identifier: Optional[str] = None,
options: Optional[str] = None,
entitlements: Optional[Path] = None,
) -> bool:
"""Sign a single component"""
cmd = ["codesign", "--sign", certificate_name, "--force", "--timestamp"]
if identifier:
cmd.extend(["--identifier", identifier])
if options:
cmd.extend(["--options", options])
if entitlements and entitlements.exists():
cmd.extend(["--entitlements", str(entitlements)])
cmd.append(str(component_path))
try:
run_command(cmd)
return True
except Exception as e:
log_error(f"Failed to sign {component_path}: {e}")
return False
def sign_all_components(
app_path: Path,
certificate_name: str,
root_dir: Path,
ctx: Optional[Context] = None,
) -> bool:
"""Sign all components in the correct order (bottom-up)"""
log_info("🔍 Discovering components to sign...")
components = find_components_to_sign(app_path, ctx)
# Print summary
total_components = sum(len(items) for items in components.values())
log_info(f"Found {total_components} components to sign:")
for category, items in components.items():
if items:
log_info(f"{category}: {len(items)} items")
# Sign in correct order (bottom-up)
# 1. Sign XPC Services first
log_info("\n🔏 Signing XPC Services...")
for xpc in components["xpc_services"]:
identifier = get_identifier_for_component(xpc)
options = get_signing_options(xpc)
if not sign_component(xpc, certificate_name, identifier, options):
return False
# 2. Sign nested apps (like Sparkle's Updater.app)
if components["apps"]:
log_info("\n🔏 Signing nested applications...")
for nested_app in components["apps"]:
identifier = get_identifier_for_component(nested_app)
options = get_signing_options(nested_app)
if not sign_component(nested_app, certificate_name, identifier, options):
return False
# 3. Sign executables
if components["executables"]:
log_info("\n🔏 Signing executables...")
# Get entitlements directory from context
entitlements_dirs = []
if ctx:
entitlements_dirs.append(ctx.get_entitlements_dir())
for exe in components["executables"]:
identifier = get_identifier_for_component(exe)
options = get_signing_options(exe)
# Check for specific entitlements
entitlements = None
browseros_server_info = get_browseros_server_binary_info(exe)
if browseros_server_info:
entitlements_name = browseros_server_info.get("entitlements")
if entitlements_name:
for ent_dir in entitlements_dirs:
ent_path = join_paths(ent_dir, entitlements_name)
if ent_path.exists():
entitlements = ent_path
break
if not sign_component(exe, certificate_name, identifier, options, entitlements):
return False
# 4. Sign dylibs
if components["dylibs"]:
log_info("\n🔏 Signing dynamic libraries...")
for dylib in components["dylibs"]:
identifier = get_identifier_for_component(dylib)
if not sign_component(dylib, certificate_name, identifier):
return False
# 5. Sign helper apps
if components["helpers"]:
log_info("\n🔏 Signing helper applications...")
# Get entitlements directory from context
entitlements_dirs = []
if ctx:
entitlements_dirs.append(ctx.get_entitlements_dir())
for helper in components["helpers"]:
identifier = get_identifier_for_component(helper)
options = get_signing_options(helper)
# Check for specific entitlements
entitlements = None
entitlements_name = None
if "Renderer" in helper.name:
entitlements_name = "helper-renderer-entitlements.plist"
elif "GPU" in helper.name:
entitlements_name = "helper-gpu-entitlements.plist"
elif "Plugin" in helper.name:
entitlements_name = "helper-plugin-entitlements.plist"
if entitlements_name:
for ent_dir in entitlements_dirs:
ent_path = join_paths(ent_dir, entitlements_name)
if ent_path.exists():
entitlements = ent_path
break
if not sign_component(
helper, certificate_name, identifier, options, entitlements
):
return False
# 6. Sign frameworks (except the main BrowserOS Framework)
if components["frameworks"]:
log_info("\n🔏 Signing frameworks...")
# Sort to sign Sparkle.framework before BrowserOS Framework.framework
frameworks_sorted = sorted(
components["frameworks"], key=lambda x: 0 if "Sparkle" in x.name else 1
)
for framework in frameworks_sorted:
identifier = get_identifier_for_component(framework)
if not sign_component(framework, certificate_name, identifier):
return False
# 7. Sign main executable
log_info("\n🔏 Signing main executable...")
# Handle both release and debug executable names
main_exe_names = ["BrowserOS", "BrowserOS Dev"]
main_exe = None
for exe_name in main_exe_names:
exe_path = join_paths(app_path, "Contents", "MacOS", exe_name)
if exe_path.exists():
main_exe = exe_path
break
if not main_exe:
log_error(
f"Main executable not found in {join_paths(app_path, 'Contents', 'MacOS')}"
)
return False
if not sign_component(main_exe, certificate_name, "com.browseros.BrowserOS"):
return False
# 8. Finally sign the app bundle
log_info("\n🔏 Signing application bundle...")
requirements = (
'=designated => identifier "com.browseros.BrowserOS" and '
"anchor apple generic and certificate 1[field.1.2.840.113635.100.6.2.6] /* exists */ and "
"certificate leaf[field.1.2.840.113635.100.6.1.13] /* exists */"
)
# Try multiple locations for app entitlements
entitlements = None
entitlements_names = ["app-entitlements.plist", "app-entitlements-chrome.plist"]
entitlements_dirs = []
if ctx:
entitlements_dirs.append(ctx.get_entitlements_dir())
else:
entitlements_dirs.append(join_paths(root_dir, "resources", "entitlements"))
# Add fallback locations
entitlements_dirs.extend(
[
join_paths(root_dir, "entitlements"), # Legacy location
join_paths(root_dir, "build", "src", "chrome", "app"),
join_paths(
app_path.parent.parent.parent, "chrome", "app"
), # Chromium source
]
)
for ent_name in entitlements_names:
for ent_dir in entitlements_dirs:
ent_path = join_paths(ent_dir, ent_name)
if ent_path.exists():
entitlements = ent_path
log_info(f" Using entitlements: {entitlements}")
break
if entitlements:
break
cmd = [
"codesign",
"--sign",
certificate_name,
"--force",
"--timestamp",
"--identifier",
"com.browseros.BrowserOS",
"--options",
"restrict,library,runtime,kill",
"--requirements",
requirements,
]
if entitlements:
cmd.extend(["--entitlements", str(entitlements)])
else:
log_warning("No app entitlements file found, signing without entitlements")
cmd.append(str(app_path))
try:
run_command(cmd)
except Exception:
return False
return True
def verify_signature(app_path: Path) -> bool:
"""Verify application signature"""
log_info("\n🔍 Verifying application signature integrity...")
result = run_command(
["codesign", "--verify", "--deep", "--strict", "--verbose=2", str(app_path)],
check=False,
)
if result.returncode != 0:
log_error("Signature verification failed!")
return False
log_success("Signature verification passed")
return True
def notarize_app(
app_path: Path,
root_dir: Path,
env_vars: Dict[str, str],
ctx: Optional[Context] = None,
) -> bool:
"""Notarize the application"""
log_info("\n📤 Preparing for notarization...")
# Create zip for notarization
notarize_zip = (
ctx.get_notarization_zip() if ctx else join_paths(root_dir, "notarize.zip")
)
if notarize_zip.exists():
notarize_zip.unlink()
run_command(["ditto", "-c", "-k", "--keepParent", str(app_path), str(notarize_zip)])
log_success("Archive created for notarization")
# Store credentials
log_info("🔑 Storing notarization credentials...")
run_command(
[
"xcrun",
"notarytool",
"store-credentials",
"notarytool-profile",
"--apple-id",
env_vars["apple_id"],
"--team-id",
env_vars["team_id"],
"--password",
env_vars["notarization_pwd"],
],
check=False,
) # May fail if already stored
# Submit for notarization
log_info("📤 Submitting application for notarization (this may take a while)...")
result = run_command(
[
"xcrun",
"notarytool",
"submit",
str(notarize_zip),
"--keychain-profile",
"notarytool-profile",
"--wait",
],
check=False,
)
log_info(result.stdout)
if result.stderr:
log_error(result.stderr)
if result.returncode != 0:
log_error("Notarization submission failed")
return False
# Check if accepted
if "status: Accepted" not in result.stdout:
log_error("App notarization failed - status was not 'Accepted'")
# Try to extract submission ID for debugging
for line in result.stdout.split("\n"):
if "id:" in line:
submission_id = line.split("id:")[1].strip().split()[0]
log_info(
f'Get detailed logs with: xcrun notarytool log {submission_id} --keychain-profile "notarytool-profile"'
)
break
return False
log_success("App notarization successful - status: Accepted")
# Staple the ticket
log_info("📎 Stapling notarization ticket to application...")
result = run_command(["xcrun", "stapler", "staple", str(app_path)], check=False)
if result.returncode != 0:
log_error("Failed to staple notarization ticket!")
return False
log_success("Notarization ticket stapled successfully")
# Clean up
notarize_zip.unlink()
# Verify notarization
log_info("\n🔍 Verifying notarization status...")
# Check Gatekeeper
result = run_command(["spctl", "-a", "-vvv", str(app_path)], check=False)
if result.returncode != 0:
log_error("Gatekeeper check failed!")
return False
# Validate stapling
result = run_command(["xcrun", "stapler", "validate", str(app_path)], check=False)
if result.returncode != 0:
log_error("Stapler validation failed!")
return False
log_success("Notarization and stapling verification passed")
return True
def sign_app(ctx: Context, create_dmg: bool = True) -> bool:
"""Main signing function that uses BuildContext from build.py"""
log_info("=" * 70)
log_info("🚀 Starting signing process for BrowserOS...")
log_info("=" * 70)
# Error tracking similar to bash script
error_count = 0
error_messages = []
def track_error(msg: str):
nonlocal error_count
error_count += 1
error_messages.append(f"ERROR {error_count}: {msg}")
log_error(msg)
# Check environment
env_ok, env_vars = check_environment()
if not env_ok:
return False
# Setup app path
app_path = ctx.get_app_path()
# Setup DMG path if needed
dmg_path = None
if create_dmg:
dmg_dir = ctx.get_dist_dir()
dmg_name = ctx.get_dmg_name(True)
dmg_path = join_paths(dmg_dir, dmg_name)
# Verify app exists
if not app_path.exists():
log_error(f"App not found at: {app_path}")
return False
try:
# Clear extended attributes
log_info("🧹 Clearing extended attributes...")
run_command(["xattr", "-cs", str(app_path)])
# Sign all components
if not sign_all_components(
app_path, env_vars["certificate_name"], ctx.root_dir, ctx
):
return False
# Verify signature
if not verify_signature(app_path):
return False
# Notarize app
if not notarize_app(app_path, ctx.root_dir, env_vars, ctx):
return False
# Create and notarize DMG if requested
if create_dmg:
print("\n" + "=" * 70)
log_info("📦 Creating and notarizing DMG package")
log_info("=" * 70)
from ..package.macos import create_signed_notarized_dmg
# Find pkg-dmg tool
pkg_dmg_path = ctx.get_pkg_dmg_path()
# Create, sign, and notarize DMG
if dmg_path and not create_signed_notarized_dmg(
app_path=app_path,
dmg_path=dmg_path,
certificate_name=env_vars["certificate_name"],
volume_name="BrowserOS",
pkg_dmg_path=pkg_dmg_path,
keychain_profile="notarytool-profile",
):
log_error("DMG creation/notarization failed")
return False
except Exception as e:
track_error(f"Unexpected error: {e}")
import traceback
traceback.print_exc()
error_count += 1 # For the exception itself
# Summary report (similar to bash script)
log_info("=" * 70)
if error_count > 0:
log_error(f"Process completed with {error_count} errors:")
for msg in error_messages:
log_error(f" {msg}")
log_error("Review the errors above and address them before distribution.")
if create_dmg:
log_warning(f"Final DMG created at: {dmg_path} (may have issues)")
return False
else:
log_success("Process completed successfully!")
if create_dmg:
log_info(f"Final DMG created at: {dmg_path}")
log_info("The application is properly signed, notarized, and packaged.")
log_info("=" * 70)
return error_count == 0
def sign_universal(contexts: List[Context]) -> bool:
"""Create universal binary and sign it"""
log_info("=" * 70)
log_info("🔄 Creating and signing universal binary...")
log_info("=" * 70)
if len(contexts) < 2:
log_error("Universal build requires at least 2 architectures")
return False
# Verify all app builds exist
app_paths = []
for ctx in contexts:
app_path = ctx.get_app_path()
if not app_path.exists():
log_error(f"App not found for {ctx.architecture}: {app_path}")
return False
app_paths.append(app_path)
log_info(f"✓ Found {ctx.architecture} build: {app_path}")
# Create universal output directory
universal_dir = join_paths(contexts[0].chromium_src, "out", "Default_universal")
universal_app_path = join_paths(universal_dir, contexts[0].BROWSEROS_APP_NAME)
if universal_dir.exists():
log_info("Removing existing universal directory...")
shutil.rmtree(universal_dir)
universal_dir.mkdir(parents=True, exist_ok=True)
# Use universalizer script to merge architectures
universalizer_script = join_paths(
contexts[0].root_dir, "build", "modules", "package", "universalizer_patched.py"
)
if not universalizer_script.exists():
log_error(f"Universalizer script not found: {universalizer_script}")
return False
try:
cmd = [
sys.executable,
str(universalizer_script),
*[str(app_path) for app_path in app_paths],
str(universal_app_path),
]
log_info("Running universalizer...")
log_info(f"Command: {' '.join(cmd)}")
run_command(cmd)
log_success(f"Universal binary created: {universal_app_path}")
# Create a temporary context for universal signing
universal_ctx = Context(
root_dir=contexts[0].root_dir,
chromium_src=contexts[0].chromium_src,
architecture="universal",
build_type=contexts[0].build_type,
)
# Override out_dir for universal
universal_ctx.out_dir = "out/Default_universal"
# Sign the universal binary
if not sign_app(universal_ctx, create_dmg=False):
log_error("Failed to sign universal binary")
return False
log_success("Universal binary signed successfully!")
return True
except Exception as e:
log_error(f"Failed to create universal binary: {e}")
return False

View File

@@ -11,8 +11,8 @@ import glob
import shutil
from pathlib import Path
from typing import Optional, List, Dict, Tuple
from context import BuildContext
from utils import (
from ...common.context import BuildContext
from ...common.utils import (
run_command as utils_run_command,
log_info,
log_error,
@@ -154,9 +154,9 @@ def find_components_to_sign(
nxtscape_framework_paths.append(fw_path)
# Add versioned path if context is available
if ctx and ctx.nxtscape_chromium_version:
if ctx and ctx.browseros_chromium_version:
versioned_path = join_paths(
fw_path, "Versions", ctx.nxtscape_chromium_version
fw_path, "Versions", ctx.browseros_chromium_version
)
if versioned_path.exists():
nxtscape_framework_paths.insert(
@@ -720,7 +720,7 @@ def sign_app(ctx: BuildContext, create_dmg: bool = True) -> bool:
log_info("📦 Creating and notarizing DMG package")
log_info("=" * 70)
from modules.package import create_signed_notarized_dmg
from ..package.macos import create_signed_notarized_dmg
# Find pkg-dmg tool
pkg_dmg_path = ctx.get_pkg_dmg_path()
@@ -785,7 +785,7 @@ def sign_universal(contexts: List[BuildContext]) -> bool:
# Create universal output directory
universal_dir = join_paths(contexts[0].chromium_src, "out", "Default_universal")
universal_app_path = join_paths(universal_dir, contexts[0].NXTSCAPE_APP_NAME)
universal_app_path = join_paths(universal_dir, contexts[0].BROWSEROS_APP_NAME)
if universal_dir.exists():
log_info("Removing existing universal directory...")
@@ -795,7 +795,7 @@ def sign_universal(contexts: List[BuildContext]) -> bool:
# Use universalizer script to merge architectures
universalizer_script = join_paths(
contexts[0].root_dir, "build", "universalizer_patched.py"
contexts[0].root_dir, "build", "modules", "package", "universalizer_patched.py"
)
if not universalizer_script.exists():

View File

@@ -0,0 +1,262 @@
#!/usr/bin/env python3
"""Windows signing module for BrowserOS"""
import subprocess
from pathlib import Path
from typing import List
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.env import EnvConfig
from ...common.utils import (
log_info,
log_error,
log_success,
log_warning,
join_paths,
IS_WINDOWS,
)
BROWSEROS_SERVER_BINARIES: List[str] = [
"browseros_server.exe",
"codex.exe",
]
class WindowsSignModule(CommandModule):
produces = ["signed_installer"]
requires = ["built_app"]
description = "Sign Windows binaries and create signed installer"
def validate(self, ctx: Context) -> None:
if not IS_WINDOWS():
raise ValidationError("Windows signing requires Windows")
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
if not build_output_dir.exists():
raise ValidationError(f"Build output directory not found: {build_output_dir}")
env = EnvConfig()
if not env.code_sign_tool_path:
raise ValidationError("CODE_SIGN_TOOL_PATH environment variable not set")
missing = []
if not env.esigner_username:
missing.append("ESIGNER_USERNAME")
if not env.esigner_password:
missing.append("ESIGNER_PASSWORD")
if not env.esigner_totp_secret:
missing.append("ESIGNER_TOTP_SECRET")
if missing:
raise ValidationError(f"Missing environment variables: {', '.join(missing)}")
def execute(self, ctx: Context) -> None:
log_info("\n🔏 Signing Windows binaries...")
build_output_dir = join_paths(ctx.chromium_src, ctx.out_dir)
self._sign_executables(build_output_dir)
self._build_mini_installer(ctx)
mini_installer_path = self._sign_installer(build_output_dir)
ctx.artifact_registry.add("signed_installer", mini_installer_path)
log_success("✅ All binaries signed successfully!")
def _sign_executables(self, build_output_dir: Path) -> None:
log_info("\nStep 1/3: Signing executables before packaging...")
binaries_to_sign_first = [build_output_dir / "chrome.exe"]
binaries_to_sign_first.extend(get_browseros_server_binary_paths(build_output_dir))
existing_binaries = []
for binary in binaries_to_sign_first:
if binary.exists():
existing_binaries.append(binary)
log_info(f"Found binary to sign: {binary.name}")
else:
log_warning(f"Binary not found: {binary}")
if not existing_binaries:
raise RuntimeError("No binaries found to sign")
if not sign_with_codesigntool(existing_binaries):
raise RuntimeError("Failed to sign executables")
def _build_mini_installer(self, ctx: Context) -> None:
log_info("\nStep 2/3: Building mini_installer with signed binaries...")
if not build_mini_installer(ctx):
raise RuntimeError("Failed to build mini_installer")
def _sign_installer(self, build_output_dir: Path) -> Path:
log_info("\nStep 3/3: Signing mini_installer.exe...")
mini_installer_path = build_output_dir / "mini_installer.exe"
if not mini_installer_path.exists():
raise RuntimeError(f"mini_installer.exe not found at: {mini_installer_path}")
if not sign_with_codesigntool([mini_installer_path]):
raise RuntimeError("Failed to sign mini_installer.exe")
return mini_installer_path
def get_browseros_server_binary_paths(build_output_dir: Path) -> List[Path]:
"""Return absolute paths to BrowserOS Server binaries for signing."""
server_dir = build_output_dir / "BrowserOSServer" / "default" / "resources" / "bin"
return [server_dir / binary for binary in BROWSEROS_SERVER_BINARIES]
def build_mini_installer(ctx: Context) -> bool:
"""Build the mini_installer.exe"""
from ..compile import build_target
log_info("Building mini_installer target...")
return build_target(ctx, "mini_installer")
def sign_with_codesigntool(binaries: List[Path]) -> bool:
"""Sign binaries using SSL.com CodeSignTool"""
log_info("Using SSL.com CodeSignTool for signing...")
env = EnvConfig()
if not env.code_sign_tool_path:
log_error("CODE_SIGN_TOOL_PATH not set in .env file")
log_error("Set CODE_SIGN_TOOL_PATH=C:/src/CodeSignTool-v1.3.2-windows")
return False
codesigntool_path = Path(env.code_sign_tool_path) / "CodeSignTool.bat"
if not codesigntool_path.exists():
log_error(f"CodeSignTool.bat not found at: {codesigntool_path}")
log_error("Make sure CODE_SIGN_TOOL_PATH points to the CodeSignTool directory")
return False
if not all([env.esigner_username, env.esigner_password, env.esigner_totp_secret]):
log_error("Missing required eSigner environment variables in .env:")
log_error(" ESIGNER_USERNAME=your-email")
log_error(" ESIGNER_PASSWORD=your-password")
log_error(" ESIGNER_TOTP_SECRET=your-totp-secret")
if not env.esigner_credential_id:
log_warning(" ESIGNER_CREDENTIAL_ID is recommended but optional")
return False
all_success = True
for binary in binaries:
try:
log_info(f"Signing {binary.name}...")
temp_output_dir = binary.parent / "signed_temp"
temp_output_dir.mkdir(exist_ok=True)
cmd = [
str(codesigntool_path),
"sign",
"-username",
env.esigner_username,
"-password",
f'"{env.esigner_password}"',
]
if env.esigner_credential_id:
cmd.extend(["-credential_id", env.esigner_credential_id])
cmd.extend(
[
"-totp_secret",
env.esigner_totp_secret,
"-input_file_path",
str(binary),
"-output_dir_path",
str(temp_output_dir),
"-override",
]
)
cmd_str = " ".join(cmd)
log_info(f"Running: {cmd_str}")
result = subprocess.run(
cmd_str,
shell=True,
capture_output=True,
text=True,
cwd=str(codesigntool_path.parent),
)
if result.stdout:
for line in result.stdout.split("\n"):
if line.strip():
log_info(line.strip())
if result.stderr:
for line in result.stderr.split("\n"):
if line.strip() and "WARNING" not in line:
log_error(line.strip())
if result.stdout and "Error:" in result.stdout:
log_error(
f"✗ Failed to sign {binary.name} - Authentication or signing error"
)
all_success = False
continue
signed_file = temp_output_dir / binary.name
if signed_file.exists():
import shutil
shutil.move(str(signed_file), str(binary))
log_info(f"Moved signed {binary.name} to original location")
try:
temp_output_dir.rmdir()
except Exception:
pass
verify_cmd = [
"powershell",
"-Command",
f"(Get-AuthenticodeSignature '{binary}').Status",
]
try:
verify_result = subprocess.run(
verify_cmd, capture_output=True, text=True
)
if "Valid" in verify_result.stdout:
log_success(f"{binary.name} signed and verified successfully")
else:
log_error(
f"{binary.name} signing verification failed - Status: {verify_result.stdout.strip()}"
)
all_success = False
except Exception:
log_warning(f"Could not verify signature for {binary.name}")
except Exception as e:
log_error(f"Failed to sign {binary.name}: {e}")
all_success = False
return all_success
def sign_universal(contexts: List[Context]) -> bool:
"""Windows doesn't support universal binaries"""
log_warning("Universal signing is not supported on Windows")
return True
def check_signing_environment() -> bool:
"""Check if Windows signing environment is properly configured"""
env = EnvConfig()
if not env.code_sign_tool_path:
log_error("CODE_SIGN_TOOL_PATH not set")
return False
missing = []
if not env.esigner_username:
missing.append("ESIGNER_USERNAME")
if not env.esigner_password:
missing.append("ESIGNER_PASSWORD")
if not env.esigner_totp_secret:
missing.append("ESIGNER_TOTP_SECRET")
if missing:
log_error(f"Missing environment variables: {', '.join(missing)}")
return False
return True

View File

@@ -1,149 +0,0 @@
#!/usr/bin/env python3
"""
Slack notification module for Nxtscape build system
"""
import os
import json
import requests
from typing import Optional, List
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from utils import log_info, log_warning, log_error, get_platform
def get_slack_webhook_url() -> Optional[str]:
"""Get Slack webhook URL from environment variable"""
return os.environ.get("SLACK_WEBHOOK_URL")
def get_os_info() -> tuple[str, str]:
"""Get OS emoji and name for Slack notifications"""
platform = get_platform()
if platform == "macos":
return "🍎", "macOS"
elif platform == "windows":
return "🪟", "Windows"
elif platform == "linux":
return "🐧", "Linux"
else:
return "💻", platform.capitalize()
def send_slack_notification(message: str, success: bool = True) -> bool:
"""Send a notification to Slack if webhook URL is configured"""
webhook_url = get_slack_webhook_url()
if not webhook_url:
# Silently skip if no webhook configured
return True
# Choose emoji and color based on success status
emoji = "" if success else ""
color = "good" if success else "danger"
# Get OS information
os_emoji, os_name = get_os_info()
# Create Slack message payload
payload = {
"attachments": [
{
"color": color,
"fields": [
{
"title": "Nxtscape Build",
"value": f"{emoji} {message}",
"short": False,
}
],
"footer": f"{os_emoji} Nxtscape Build System - {os_name}",
"ts": None, # Slack will use current timestamp
}
]
}
try:
response = requests.post(
webhook_url,
data=json.dumps(payload),
headers={"Content-Type": "application/json"},
timeout=10,
)
if response.status_code == 200:
log_info(f"📲 Slack notification sent: {message}")
return True
else:
log_warning(f"Slack notification failed with status {response.status_code}")
return False
except requests.RequestException as e:
log_warning(f"Failed to send Slack notification: {e}")
return False
def notify_build_started(build_type: str, arch: str) -> bool:
"""Notify that build has started"""
_, os_name = get_os_info()
message = f"Build started on {os_name} - {build_type} build for {arch}"
return send_slack_notification(message, success=True)
def notify_build_step(step_name: str) -> bool:
"""Notify about a build step"""
message = f"Running step: {step_name}"
return send_slack_notification(message, success=True)
def notify_build_success(
duration_mins: int, duration_secs: int, gcs_uris: Optional[List[str]] = None
) -> bool:
"""Notify that build completed successfully"""
message = f"Build completed successfully in {duration_mins}m {duration_secs}s"
# Add GCS URIs to message if provided
if gcs_uris:
message += f"\n\nUploaded artifacts ({len(gcs_uris)} files):"
for uri in gcs_uris:
# Convert gs:// URI to public URL for easier access
if uri.startswith("gs://"):
public_url = uri.replace("gs://", "https://storage.googleapis.com/")
message += f"\n{public_url}"
else:
message += f"\n{uri}"
return send_slack_notification(message, success=True)
def notify_build_failure(error_message: str) -> bool:
"""Notify that build failed"""
message = f"Build failed: {error_message}"
return send_slack_notification(message, success=False)
def notify_build_interrupted() -> bool:
"""Notify that build was interrupted"""
message = "Build was interrupted by user"
return send_slack_notification(message, success=False)
def notify_gcs_upload(architecture: str, gcs_uris: List[str]) -> bool:
"""Notify about GCS upload for a specific architecture"""
if not gcs_uris:
return True
message = f"[{architecture}] Uploaded {len(gcs_uris)} artifact(s) to GCS"
# Add URIs to message
for uri in gcs_uris:
# Convert gs:// URI to public URL
if uri.startswith("gs://"):
public_url = uri.replace("gs://", "https://storage.googleapis.com/")
message += f"\n{public_url}"
else:
message += f"\n{uri}"
return send_slack_notification(message, success=True)

View File

@@ -1,24 +1,42 @@
#!/usr/bin/env python3
"""
Google Cloud Storage upload module for Nxtscape build artifacts
"""
"""Google Cloud Storage upload module for BrowserOS build artifacts"""
import os
import sys
from pathlib import Path
from typing import List, Optional, Tuple
from context import BuildContext
from utils import (
from ..common.module import CommandModule, ValidationError
from ..common.context import Context
from ..common.env import EnvConfig
from ..common.utils import (
log_info,
log_error,
log_success,
log_warning,
IS_WINDOWS,
IS_MACOS,
IS_LINUX,
join_paths,
)
class GCSUploadModule(CommandModule):
produces = []
requires = []
description = "Upload build artifacts to Google Cloud Storage"
def validate(self, ctx: Context) -> None:
if not GCS_AVAILABLE:
raise ValidationError("google-cloud-storage library not installed - run: pip install google-cloud-storage")
env = EnvConfig()
service_account_path = join_paths(ctx.root_dir, env.gcs_service_account_file)
if not service_account_path.exists():
raise ValidationError(f"Service account file not found: {env.gcs_service_account_file}")
def execute(self, ctx: Context) -> None:
log_info("\n☁️ Uploading package artifacts to GCS...")
success, uris = upload_package_artifacts_impl(ctx)
if not success:
raise RuntimeError("Failed to upload artifacts to GCS")
# Try to import google-cloud-storage
try:
from google.cloud import storage
@@ -28,28 +46,22 @@ try:
except ImportError:
GCS_AVAILABLE = False
# Service account file name
SERVICE_ACCOUNT_FILE = "gclient.json"
# GCS bucket configuration
GCS_BUCKET_NAME = "nxtscape"
def _get_platform_dir(platform_override: Optional[str] = None) -> str:
"""Get platform directory name for GCS path"""
if platform_override:
return platform_override
if IS_WINDOWS:
if IS_WINDOWS():
return "win"
elif IS_MACOS:
elif IS_MACOS():
return "macos"
else:
return "linux"
def upload_to_gcs(
ctx: BuildContext,
ctx: Context,
file_paths: List[Path],
platform_override: Optional[str] = None
) -> Tuple[bool, List[str]]:
@@ -72,18 +84,20 @@ def upload_to_gcs(
log_info("No files to upload to GCS")
return True, []
env = EnvConfig()
# Determine platform subdirectory
platform_dir = _get_platform_dir(platform_override)
# Build GCS path: gs://nxtscape/resources/<version>/<platform>/
gcs_prefix = f"resources/{ctx.nxtscape_version}/{platform_dir}"
# Build GCS path: gs://<bucket>/resources/<version>/<platform>/
gcs_prefix = f"resources/{ctx.browseros_version}/{platform_dir}"
log_info(f"\n☁️ Uploading artifacts to gs://{GCS_BUCKET_NAME}/{gcs_prefix}/")
log_info(f"\n☁️ Uploading artifacts to gs://{env.gcs_bucket}/{gcs_prefix}/")
# Check for service account file
service_account_path = join_paths(ctx.root_dir, SERVICE_ACCOUNT_FILE)
service_account_path = join_paths(ctx.root_dir, env.gcs_service_account_file)
if not service_account_path.exists():
log_error(f"Service account file not found: {SERVICE_ACCOUNT_FILE}")
log_error(f"Service account file not found: {env.gcs_service_account_file}")
log_info(
f"Please place the service account JSON file at: {service_account_path}"
)
@@ -95,7 +109,7 @@ def upload_to_gcs(
str(service_account_path)
)
client = storage.Client(credentials=credentials)
bucket = client.bucket(GCS_BUCKET_NAME)
bucket = client.bucket(env.gcs_bucket)
uploaded_files = []
gcs_uris = []
@@ -117,8 +131,8 @@ def upload_to_gcs(
# Note: With uniform bucket-level access, objects inherit bucket's IAM policies
# No need to set individual object ACLs
public_url = f"https://storage.googleapis.com/{GCS_BUCKET_NAME}/{blob_name}"
gcs_uri = f"gs://{GCS_BUCKET_NAME}/{blob_name}"
public_url = f"https://storage.googleapis.com/{env.gcs_bucket}/{blob_name}"
gcs_uri = f"gs://{env.gcs_bucket}/{blob_name}"
uploaded_files.append(public_url)
gcs_uris.append(gcs_uri)
log_success(f"✓ Uploaded: {public_url}")
@@ -142,20 +156,19 @@ def upload_to_gcs(
return False, []
def upload_package_artifacts(ctx: BuildContext) -> tuple[bool, List[str]]:
"""Upload package artifacts (DMG, ZIP, EXE) to GCS
def upload_package_artifacts_impl(ctx: Context) -> tuple[bool, List[str]]:
"""Internal implementation for uploading package artifacts to GCS
Returns: (success, list of GCS URIs)"""
log_info("\n☁️ Preparing to upload package artifacts to GCS...")
artifacts = []
# Look for files in the dist/<version> directory
dist_dir = ctx.get_dist_dir()
if dist_dir.exists():
if IS_MACOS:
if IS_MACOS():
# Look for DMG files
artifacts.extend(dist_dir.glob("*.dmg"))
elif IS_WINDOWS:
elif IS_WINDOWS():
# Look for installer and ZIP files
artifacts.extend(dist_dir.glob("*.exe"))
artifacts.extend(dist_dir.glob("*.zip"))
@@ -175,29 +188,29 @@ def upload_package_artifacts(ctx: BuildContext) -> tuple[bool, List[str]]:
return upload_to_gcs(ctx, artifacts)
def upload_signed_artifacts(ctx: BuildContext) -> bool:
def upload_signed_artifacts(ctx: Context) -> bool:
"""Upload signed artifacts to GCS"""
# For now, this is the same as package artifacts
# Can be extended in the future for specific signed artifacts
return upload_package_artifacts(ctx)
return upload_package_artifacts_impl(ctx)[0]
def download_from_gcs(
bucket_name: str,
source_path: str,
dest_path: Path,
ctx: Optional[BuildContext] = None,
ctx: Optional[Context] = None,
) -> bool:
"""Download a file from GCS (utility function)"""
if not GCS_AVAILABLE:
log_error("google-cloud-storage not installed")
return False
env = EnvConfig()
try:
# Try to use service account if available
client = None
if ctx:
service_account_path = join_paths(ctx.root_dir, SERVICE_ACCOUNT_FILE)
service_account_path = join_paths(ctx.root_dir, env.gcs_service_account_file)
if service_account_path.exists():
credentials = service_account.Credentials.from_service_account_file(
str(service_account_path)
@@ -246,9 +259,9 @@ def _detect_artifacts(dist_path: Path, platform_override: Optional[str] = None)
return []
else:
# Auto-detect based on current platform
if IS_MACOS:
if IS_MACOS():
patterns = ["*.dmg"]
elif IS_WINDOWS:
elif IS_WINDOWS():
patterns = ["*.exe", "*.zip"]
else: # Linux
patterns = ["*.AppImage", "*.deb"]
@@ -324,21 +337,23 @@ def handle_upload_dist(
total_size += size_mb
log_info(f" - {artifact.name} ({size_mb:.2f} MB)")
env = EnvConfig()
log_info(f"\nTotal size: {total_size:.2f} MB")
log_info(f"Upload destination: gs://{GCS_BUCKET_NAME}/resources/{version}/{platform_dir}/")
log_info(f"Upload destination: gs://{env.gcs_bucket}/resources/{version}/{platform_dir}/")
# 6. Create minimal BuildContext for upload
# BuildContext will try to load chromium_src, but we'll provide a dummy one
# since we don't need it for uploads
try:
ctx = BuildContext(
ctx = Context(
root_dir=root_dir,
chromium_src=Path("/dev/null"), # Dummy path, won't be used
architecture="", # Not needed for upload
build_type="release", # Not needed for upload
)
# Override the version with what we detected
ctx.nxtscape_version = version
ctx.browseros_version = version
except Exception as e:
# If BuildContext fails, we can still upload with minimal info
log_warning(f"Could not create full BuildContext: {e}")

View File

@@ -0,0 +1,133 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
RESOURCES_DIR="$(dirname "$SCRIPT_DIR")/../resources"
ICONS_DIR="$RESOURCES_DIR/icons"
MAC_DIR="$ICONS_DIR/mac"
ASSETS_DIR="$MAC_DIR/Assets.xcassets"
APPICONSET_DIR="$ASSETS_DIR/AppIcon.appiconset"
ICONSET_DIR="$ASSETS_DIR/Icon.iconset"
SOURCE_ICON="$ICONS_DIR/product_logo_1024.png"
if [[ ! -f "$SOURCE_ICON" ]]; then
echo "Error: Source icon not found: $SOURCE_ICON"
exit 1
fi
echo "Generating macOS icons from: $SOURCE_ICON"
mkdir -p "$APPICONSET_DIR"
mkdir -p "$ICONSET_DIR"
# Generate AppIcon.appiconset PNGs
echo "Generating AppIcon.appiconset..."
for size in 16 32 64 128 256 512 1024; do
output="$APPICONSET_DIR/appicon_${size}.png"
echo " Creating ${size}x${size}..."
sips -z $size $size "$SOURCE_ICON" --out "$output" >/dev/null
done
# Generate Icon.iconset PNGs (for .icns generation)
echo "Generating Icon.iconset..."
sips -z 256 256 "$SOURCE_ICON" --out "$ICONSET_DIR/icon_256x256.png" >/dev/null
sips -z 512 512 "$SOURCE_ICON" --out "$ICONSET_DIR/icon_256x256@2x.png" >/dev/null
# Create Contents.json for Assets.xcassets root
cat > "$ASSETS_DIR/Contents.json" << 'EOF'
{
"info" : {
"author" : "xcode",
"version" : 1
}
}
EOF
# Create Contents.json for AppIcon.appiconset
cat > "$APPICONSET_DIR/Contents.json" << 'EOF'
{
"images" : [
{
"filename" : "appicon_16.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "16x16"
},
{
"filename" : "appicon_32.png",
"idiom" : "mac",
"scale" : "2x",
"size" : "16x16"
},
{
"filename" : "appicon_32.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "32x32"
},
{
"filename" : "appicon_64.png",
"idiom" : "mac",
"scale" : "2x",
"size" : "32x32"
},
{
"filename" : "appicon_128.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "128x128"
},
{
"filename" : "appicon_256.png",
"idiom" : "mac",
"scale" : "2x",
"size" : "128x128"
},
{
"filename" : "appicon_256.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "256x256"
},
{
"filename" : "appicon_512.png",
"idiom" : "mac",
"scale" : "2x",
"size" : "256x256"
},
{
"filename" : "appicon_512.png",
"idiom" : "mac",
"scale" : "1x",
"size" : "512x512"
},
{
"filename" : "appicon_1024.png",
"idiom" : "mac",
"scale" : "2x",
"size" : "512x512"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
EOF
# Generate app.icns from Icon.iconset
echo "Generating app.icns..."
iconutil -c icns "$ICONSET_DIR" -o "$MAC_DIR/app.icns"
# Generate Assets.car using actool
echo "Generating Assets.car..."
xcrun actool --compile "$MAC_DIR" "$ASSETS_DIR" \
--platform macosx \
--minimum-deployment-target 10.15 \
--app-icon AppIcon \
--output-partial-info-plist /dev/null
echo "Done! Generated:"
echo " - $MAC_DIR/app.icns"
echo " - $MAC_DIR/Assets.car"

View File

@@ -19,6 +19,7 @@ if (is_chrome_branded) {
mac_updater_bundle_identifier = "com.browseros.BrowserOSUpdater"
privileged_helper_bundle_name = "BrowserOSUpdaterPrivilegedHelper"
privileged_helper_name = "com.browseros.BrowserOS.UpdaterPrivilegedHelper"
updater_app_icon_path = "//chrome/app/theme/chromium/mac/app.icns"
updater_company_full_name = "BrowserOS"
updater_company_short_name = "BrowserOS"
updater_company_short_name_lowercase = "browseros"
@@ -31,12 +32,16 @@ if (is_chrome_branded) {
updater_metainstaller_name = "BrowserOS Installer"
mac_team_identifier = "PLACEHOLDER"
updater_appid = "{6e8ffa8f-e7e2-4000-9884-589283c27015}"
browser_appid = "{5d8d08af-2df9-4da2-86c1-eac353a0ca32}"
qualification_appid = "{43f3a046-04b3-4443-a770-d67dae90e440}"
crx_pkhash = ""
legacy_service_name_prefix = "cupdate"
prefs_access_mutex = "{A6B9ECD5-772A-4D3F-BFEB-CF9340534A3E}"
setup_mutex_prefix = "{25569F82-3B67-4185-8127-88E4CF803680}"
grdfile_name = "browseros_strings"
grdfile_name = "chromium_strings"
extra_args_is_chrome_branded = "IS_CHROME_BRANDED=False"
update_check_url = ""
updater_event_logging_url = ""
UpdaterLegacyLibGUID = "4C61BB05-94D1-4BAB-B69C-C34195AF92CA"
GoogleUpdate3WebUserClassGUID = "75828ED1-7BE8-45D0-8950-AA85CBF74510"
@@ -102,7 +107,3 @@ if (is_chrome_branded) {
legacy_google_update_appid = "{8B2B92A3-1BA2-4154-A89C-DA74C9C505E4}"
}
}
# Chrome and Chromium share the same endpoints for now.
update_check_url = "https://update.googleapis.com/service/update2/json"
device_management_server_url = "https://m.google.com/devicemanagement/data/api"

View File

@@ -0,0 +1,12 @@
diff --git a/base/version_info/BUILD.gn b/base/version_info/BUILD.gn
index 96937eddc0560..3e587782fc239 100644
--- a/base/version_info/BUILD.gn
+++ b/base/version_info/BUILD.gn
@@ -41,6 +41,7 @@ process_version("generate_version_info") {
template_file = "version_info_values.h.version"
sources = [
"//chrome/VERSION",
+ "//chrome/BROWSEROS_VERSION",
branding_file_path,
lastchange_file,
]

View File

@@ -0,0 +1,16 @@
diff --git a/base/version_info/version_info.h b/base/version_info/version_info.h
index 1f54eef6f4b0a..ad12af4d982c2 100644
--- a/base/version_info/version_info.h
+++ b/base/version_info/version_info.h
@@ -30,6 +30,11 @@ constexpr std::string_view GetVersionNumber() {
return PRODUCT_VERSION;
}
+// Returns the BrowserOS version number, e.g. "0.30.0.0".
+constexpr std::string_view GetBrowserOSVersionNumber() {
+ return BROWSEROS_VERSION;
+}
+
// Returns the major component (aka the milestone) of the version as an int,
// e.g. 6 when the version is "6.0.490.1".
int GetMajorVersionNumberAsInt();

View File

@@ -0,0 +1,12 @@
diff --git a/base/version_info/version_info_values.h.version b/base/version_info/version_info_values.h.version
index 0880fafd594f5..d08411767d51e 100644
--- a/base/version_info/version_info_values.h.version
+++ b/base/version_info/version_info_values.h.version
@@ -7,6 +7,7 @@
#define PRODUCT_NAME "@PRODUCT_FULLNAME@"
#define PRODUCT_VERSION "@MAJOR@.@MINOR@.@BUILD@.@PATCH@"
+#define BROWSEROS_VERSION "@BROWSEROS_MAJOR@.@BROWSEROS_MINOR@.@BROWSEROS_BUILD@.@BROWSEROS_PATCH@"
#define LAST_CHANGE "@LASTCHANGE@"
#define IS_OFFICIAL_BUILD @OFFICIAL_BUILD@

View File

@@ -1,5 +1,5 @@
diff --git a/chrome/BUILD.gn b/chrome/BUILD.gn
index 97f843f8133c4..0acbe29f11806 100644
index 0753724487493..eb8244129b93f 100644
--- a/chrome/BUILD.gn
+++ b/chrome/BUILD.gn
@@ -18,6 +18,7 @@ import("//build/config/win/manifest.gni")
@@ -10,7 +10,7 @@ index 97f843f8133c4..0acbe29f11806 100644
import("//chrome/chrome_paks.gni")
import("//chrome/common/features.gni")
import("//chrome/process_version_rc_template.gni")
@@ -372,6 +373,7 @@ if (!is_android && !is_mac) {
@@ -369,6 +370,7 @@ if (!is_android && !is_mac) {
}
data_deps += [
@@ -18,7 +18,7 @@ index 97f843f8133c4..0acbe29f11806 100644
"//chrome/browser/resources/media/mei_preload:component",
"//components/privacy_sandbox/privacy_sandbox_attestations/preload:component",
"//components/webapps/isolated_web_apps/preload:component",
@@ -528,6 +530,7 @@ if (is_win) {
@@ -525,6 +527,7 @@ if (is_win) {
":chrome_versioned_bundle_data",
"//base/allocator:early_zone_registration_apple",
"//build:branding_buildflags",
@@ -26,8 +26,8 @@ index 97f843f8133c4..0acbe29f11806 100644
"//chrome/common:buildflags",
"//chrome/common:version_header",
]
@@ -1218,6 +1221,10 @@ if (is_win) {
bundle_deps += [ ":preinstalled_apps" ]
@@ -1201,6 +1204,10 @@ if (is_win) {
bundle_deps += [ ":angle_binaries" ]
}
+ if (enable_sparkle) {

View File

@@ -1,12 +0,0 @@
diff --git a/chrome/VERSION b/chrome/VERSION
index cdbed925f47c1..37b80272989ff 100644
--- a/chrome/VERSION
+++ b/chrome/VERSION
@@ -1,4 +1,4 @@
MAJOR=137
MINOR=0
-BUILD=7151
-PATCH=69
+BUILD=7187
+PATCH=69
\ No newline at end of file

View File

@@ -1,14 +1,14 @@
diff --git a/chrome/app/chrome_command_ids.h b/chrome/app/chrome_command_ids.h
index 7dbc937b376cc..353b1ca1e9f25 100644
index d32aa215bc900..9074fc42074df 100644
--- a/chrome/app/chrome_command_ids.h
+++ b/chrome/app/chrome_command_ids.h
@@ -290,6 +290,9 @@
#define IDC_SHOW_HISTORY_SIDE_PANEL 40293
#define IDC_OPEN_GLIC 40294
#define IDC_FIND_EXTENSIONS 40295
+#define IDC_SHOW_THIRD_PARTY_LLM_SIDE_PANEL 40296
+#define IDC_CYCLE_THIRD_PARTY_LLM_PROVIDER 40297
+#define IDC_OPEN_CLASH_OF_GPTS 40298
@@ -298,6 +298,9 @@
#define IDC_SHOW_SEARCH_TOOLS 40296
#define IDC_SHOW_COMMENTS_SIDE_PANEL 40297
#define IDC_RECENT_TABS_SEE_DEVICE_TABS 40298
+#define IDC_SHOW_THIRD_PARTY_LLM_SIDE_PANEL 40299
+#define IDC_CYCLE_THIRD_PARTY_LLM_PROVIDER 40300
+#define IDC_OPEN_CLASH_OF_GPTS 40301
// Spell-check
// Insert any additional suggestions before _LAST; these have to be consecutive.

View File

@@ -1,11 +1,11 @@
diff --git a/chrome/app/generated_resources.grd b/chrome/app/generated_resources.grd
index 186d94b83cb9c..1dd06536b1105 100644
index 873f67b36de4d..8d0b2e34e77f9 100644
--- a/chrome/app/generated_resources.grd
+++ b/chrome/app/generated_resources.grd
@@ -8840,6 +8840,15 @@ Keep your key file in a safe place. You will need it to create new versions of y
Reading list
</message>
</if>
@@ -9033,6 +9033,15 @@ Keep your key file in a safe place. You will need it to create new versions of y
<message name="IDS_READ_LATER_COLLAPSE_BUTTON_ARIA_LABEL" desc="Aria label for the Read later list collapse button.">
Collapse <ph name="HEADER">$1<ex>Unread</ex></ph>
</message>
+ <message name="IDS_THIRD_PARTY_LLM_TITLE" desc="Title for 3rd party LLM side panel">
+ LLM Chat
+ </message>
@@ -18,7 +18,7 @@ index 186d94b83cb9c..1dd06536b1105 100644
<message name="IDS_READ_LATER_MENU_UNREAD_HEADER" desc="Header for section of unread Read later items.">
Unread
</message>
@@ -10882,6 +10891,9 @@ Check your passwords anytime in <ph name="GOOGLE_PASSWORD_MANAGER">$1<ex>Google
@@ -11174,6 +11183,9 @@ Check your passwords anytime in <ph name="GOOGLE_PASSWORD_MANAGER">$1<ex>Google
<message name="IDS_IMPORT_FROM_FIREFOX" desc="browser combo box: Mozilla Firefox">
Mozilla Firefox
</message>

View File

@@ -1,5 +1,5 @@
diff --git a/chrome/browser/BUILD.gn b/chrome/browser/BUILD.gn
index ad39862fdd9a5..fc8552e6e6dac 100644
index 7280ef29b85c1..8c1ed0a9de786 100644
--- a/chrome/browser/BUILD.gn
+++ b/chrome/browser/BUILD.gn
@@ -12,6 +12,7 @@ import("//build/config/features.gni")
@@ -8,9 +8,9 @@ index ad39862fdd9a5..fc8552e6e6dac 100644
import("//chrome/browser/buildflags.gni")
+import("//chrome/browser/sparkle_buildflags.gni")
import("//chrome/browser/downgrade/buildflags.gni")
import("//chrome/browser/request_header_integrity/buildflags.gni")
import("//chrome/common/features.gni")
@@ -119,6 +120,11 @@ buildflag_header("buildflags") {
import("//chrome/common/request_header_integrity/buildflags.gni")
@@ -117,6 +118,11 @@ buildflag_header("buildflags") {
}
}
@@ -22,18 +22,18 @@ index ad39862fdd9a5..fc8552e6e6dac 100644
source_set("browser_process") {
sources = [
"browser_process.cc",
@@ -287,6 +293,10 @@ static_library("browser") {
"chrome_browser_interface_binders.h",
"chrome_browser_interface_binders_webui.cc",
@@ -275,6 +281,10 @@ static_library("browser") {
"chrome_browser_interface_binders_webui.h",
"chrome_browser_interface_binders_webui_parts.h",
"chrome_browser_interface_binders_webui_parts_features.cc",
+ "browseros_server/browseros_server_manager.cc",
+ "browseros_server/browseros_server_manager.h",
+ "browseros_server/browseros_server_prefs.cc",
+ "browseros_server/browseros_server_prefs.h",
"chrome_browser_main.cc",
"chrome_browser_main.h",
"chrome_browser_main_extra_parts_nacl_deprecation.cc",
@@ -6557,6 +6567,20 @@ static_library("browser") {
"chrome_content_browser_client.cc",
@@ -6751,6 +6761,20 @@ static_library("browser") {
]
}

Some files were not shown because too many files have changed in this diff Show More