|
@@ -9,8 +9,9 @@ import os
|
|
|
import sys
|
|
import sys
|
|
|
import logging
|
|
import logging
|
|
|
import shutil
|
|
import shutil
|
|
|
|
|
+import urllib.parse
|
|
|
from pathlib import Path
|
|
from pathlib import Path
|
|
|
-from typing import Optional
|
|
|
|
|
|
|
+from typing import Optional, Tuple
|
|
|
|
|
|
|
|
import click
|
|
import click
|
|
|
|
|
|
|
@@ -35,12 +36,12 @@ logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
|
@click.command()
|
|
@click.command()
|
|
|
-@click.argument("url", required=True)
|
|
|
|
|
|
|
+@click.argument("source", required=True)
|
|
|
@click.option(
|
|
@click.option(
|
|
|
"--output-dir", "-o",
|
|
"--output-dir", "-o",
|
|
|
type=click.Path(exists=False, file_okay=False, dir_okay=True),
|
|
type=click.Path(exists=False, file_okay=False, dir_okay=True),
|
|
|
- default=".",
|
|
|
|
|
- help="Output directory for generated files (default: current directory)"
|
|
|
|
|
|
|
+ default=None,
|
|
|
|
|
+ help="Output directory for generated files (default: same as source for local directories)"
|
|
|
)
|
|
)
|
|
|
@click.option(
|
|
@click.option(
|
|
|
"--work-dir", "-w",
|
|
"--work-dir", "-w",
|
|
@@ -87,11 +88,11 @@ logger = logging.getLogger(__name__)
|
|
|
"--clean-source",
|
|
"--clean-source",
|
|
|
is_flag=True,
|
|
is_flag=True,
|
|
|
default=False,
|
|
default=False,
|
|
|
- help="Clean up extracted source files after package creation"
|
|
|
|
|
|
|
+ help="Clean up extracted source files after package creation (not valid for local directories)"
|
|
|
)
|
|
)
|
|
|
def main(
|
|
def main(
|
|
|
- url: str,
|
|
|
|
|
- output_dir: str,
|
|
|
|
|
|
|
+ source: str,
|
|
|
|
|
+ output_dir: Optional[str],
|
|
|
work_dir: Optional[str],
|
|
work_dir: Optional[str],
|
|
|
name: Optional[str],
|
|
name: Optional[str],
|
|
|
version: Optional[str],
|
|
version: Optional[str],
|
|
@@ -103,9 +104,10 @@ def main(
|
|
|
clean_source: bool
|
|
clean_source: bool
|
|
|
) -> None:
|
|
) -> None:
|
|
|
"""
|
|
"""
|
|
|
- Generate USM manifest from source archive URL.
|
|
|
|
|
|
|
+ Generate USM manifest from source archive URL or local directory.
|
|
|
|
|
|
|
|
- URL is the URL to a source archive (tar, tar.gz, tar.bz2, zip).
|
|
|
|
|
|
|
+ SOURCE is either a URL to a source archive (tar, tar.gz, tar.bz2, zip)
|
|
|
|
|
+ or a path to a local directory containing source code.
|
|
|
"""
|
|
"""
|
|
|
# Configure logging level
|
|
# Configure logging level
|
|
|
if verbose:
|
|
if verbose:
|
|
@@ -114,6 +116,14 @@ def main(
|
|
|
logging.getLogger().setLevel(logging.ERROR)
|
|
logging.getLogger().setLevel(logging.ERROR)
|
|
|
|
|
|
|
|
try:
|
|
try:
|
|
|
|
|
+ # Check if source is a URL or local directory
|
|
|
|
|
+ is_url, source_path = _detect_source_type(source)
|
|
|
|
|
+
|
|
|
|
|
+ # Validate --clean-source flag for local directories
|
|
|
|
|
+ if not is_url and clean_source:
|
|
|
|
|
+ logger.error("--clean-source flag is not valid when processing local directories")
|
|
|
|
|
+ sys.exit(1)
|
|
|
|
|
+
|
|
|
# Initialize components
|
|
# Initialize components
|
|
|
download_manager = DownloadManager()
|
|
download_manager = DownloadManager()
|
|
|
extractor = ArchiveExtractor()
|
|
extractor = ArchiveExtractor()
|
|
@@ -124,30 +134,56 @@ def main(
|
|
|
usm_integration = USMIntegration()
|
|
usm_integration = USMIntegration()
|
|
|
user_interaction = UserInteraction(interactive=not non_interactive)
|
|
user_interaction = UserInteraction(interactive=not non_interactive)
|
|
|
|
|
|
|
|
- # Set up working directory
|
|
|
|
|
- if work_dir:
|
|
|
|
|
- work_path = Path(work_dir)
|
|
|
|
|
- work_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
+ # Set up working directory (only needed for URLs)
|
|
|
|
|
+ if is_url:
|
|
|
|
|
+ if work_dir:
|
|
|
|
|
+ work_path = Path(work_dir)
|
|
|
|
|
+ work_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
+ else:
|
|
|
|
|
+ import tempfile
|
|
|
|
|
+ work_path = Path(tempfile.mkdtemp(prefix="autusm-"))
|
|
|
|
|
+ logger.info(f"Using working directory: {work_path}")
|
|
|
else:
|
|
else:
|
|
|
- import tempfile
|
|
|
|
|
- work_path = Path(tempfile.mkdtemp(prefix="autusm-"))
|
|
|
|
|
-
|
|
|
|
|
- logger.info(f"Using working directory: {work_path}")
|
|
|
|
|
|
|
+ work_path = None
|
|
|
|
|
|
|
|
# Set up output directory
|
|
# Set up output directory
|
|
|
- output_path = Path(output_dir)
|
|
|
|
|
|
|
+ if is_url:
|
|
|
|
|
+ # For URLs, use the specified output directory or default to current
|
|
|
|
|
+ output_path = Path(output_dir or ".")
|
|
|
|
|
+ else:
|
|
|
|
|
+ # For local directories, use the local directory itself as output
|
|
|
|
|
+ output_path = source_path
|
|
|
|
|
+ # Override output_dir if it was specified (but warn user)
|
|
|
|
|
+ if output_dir:
|
|
|
|
|
+ logger.warning("Output directory is ignored when processing local directories")
|
|
|
|
|
+
|
|
|
output_path.mkdir(parents=True, exist_ok=True)
|
|
output_path.mkdir(parents=True, exist_ok=True)
|
|
|
logger.info(f"Using output directory: {output_path}")
|
|
logger.info(f"Using output directory: {output_path}")
|
|
|
|
|
|
|
|
- # Step 1: Download the source archive
|
|
|
|
|
- logger.info(f"Downloading source archive from: {url}")
|
|
|
|
|
- archive_path = download_manager.download(url, work_path)
|
|
|
|
|
- logger.info(f"Downloaded to: {archive_path}")
|
|
|
|
|
|
|
+ # Check for existing MANIFEST.usm in local directory mode
|
|
|
|
|
+ if not is_url:
|
|
|
|
|
+ manifest_path = output_path / "MANIFEST.usm"
|
|
|
|
|
+ if manifest_path.exists():
|
|
|
|
|
+ logger.error(f"MANIFEST.usm already exists in {output_path}")
|
|
|
|
|
+ sys.exit(1)
|
|
|
|
|
+
|
|
|
|
|
+ # Get source directory and extract metadata
|
|
|
|
|
+ if is_url:
|
|
|
|
|
+ # URL workflow
|
|
|
|
|
+ logger.info(f"Downloading source archive from: {source}")
|
|
|
|
|
+ archive_path = download_manager.download(source, work_path)
|
|
|
|
|
+ logger.info(f"Downloaded to: {archive_path}")
|
|
|
|
|
|
|
|
- # Step 2: Extract the archive directly to the output directory
|
|
|
|
|
- logger.info("Extracting archive to output directory...")
|
|
|
|
|
- source_dir = extractor.extract(archive_path, output_path)
|
|
|
|
|
- logger.info(f"Extracted to: {source_dir}")
|
|
|
|
|
|
|
+ logger.info("Extracting archive to output directory...")
|
|
|
|
|
+ source_dir = extractor.extract(archive_path, output_path)
|
|
|
|
|
+ logger.info(f"Extracted to: {source_dir}")
|
|
|
|
|
+
|
|
|
|
|
+ url = source
|
|
|
|
|
+ else:
|
|
|
|
|
+ # Local directory workflow
|
|
|
|
|
+ logger.info(f"Using local source directory: {source_path}")
|
|
|
|
|
+ source_dir = source_path
|
|
|
|
|
+ url = ""
|
|
|
|
|
|
|
|
# Step 3: Analyze the source code
|
|
# Step 3: Analyze the source code
|
|
|
logger.info("Analyzing source code...")
|
|
logger.info("Analyzing source code...")
|
|
@@ -181,10 +217,23 @@ def main(
|
|
|
scripts_dir = output_path / "scripts"
|
|
scripts_dir = output_path / "scripts"
|
|
|
scripts_dir.mkdir(parents=True, exist_ok=True)
|
|
scripts_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
|
|
|
+ # For local directories, check if scripts already exist
|
|
|
|
|
+ if not is_url:
|
|
|
|
|
+ existing_scripts = []
|
|
|
|
|
+ for script_name in ["build", "install"]:
|
|
|
|
|
+ script_path = scripts_dir / script_name
|
|
|
|
|
+ if script_path.exists():
|
|
|
|
|
+ existing_scripts.append(script_name)
|
|
|
|
|
+
|
|
|
|
|
+ if existing_scripts:
|
|
|
|
|
+ logger.info(f"Skipping generation of existing scripts: {', '.join(existing_scripts)}")
|
|
|
|
|
+
|
|
|
|
|
+ # Generate scripts (will skip acquire script for local directories)
|
|
|
script_generator.generate_scripts(
|
|
script_generator.generate_scripts(
|
|
|
package_info,
|
|
package_info,
|
|
|
build_system,
|
|
build_system,
|
|
|
- scripts_dir
|
|
|
|
|
|
|
+ scripts_dir,
|
|
|
|
|
+ skip_acquire=not is_url # Skip acquire script for local directories
|
|
|
)
|
|
)
|
|
|
|
|
|
|
|
# Step 7: Generate USM manifest
|
|
# Step 7: Generate USM manifest
|
|
@@ -222,8 +271,8 @@ def main(
|
|
|
else:
|
|
else:
|
|
|
logger.warning("USM is not available, skipping autoprovides")
|
|
logger.warning("USM is not available, skipping autoprovides")
|
|
|
|
|
|
|
|
- # Step 10: Clean up source files if requested
|
|
|
|
|
- if clean_source:
|
|
|
|
|
|
|
+ # Step 10: Clean up source files if requested (only for URLs)
|
|
|
|
|
+ if clean_source and is_url:
|
|
|
_cleanup_source_files(source_dir, output_path, logger)
|
|
_cleanup_source_files(source_dir, output_path, logger)
|
|
|
|
|
|
|
|
# Print summary
|
|
# Print summary
|
|
@@ -303,5 +352,34 @@ def _cleanup_source_files(source_dir: Path, output_path: Path, logger) -> None:
|
|
|
# Don't raise the exception - cleanup failure shouldn't break the whole process
|
|
# Don't raise the exception - cleanup failure shouldn't break the whole process
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
+def _detect_source_type(source: str) -> Tuple[bool, Path]:
|
|
|
|
|
+ """
|
|
|
|
|
+ Detect if the source argument is a URL or a local directory.
|
|
|
|
|
+
|
|
|
|
|
+ Args:
|
|
|
|
|
+ source: The source argument from command line
|
|
|
|
|
+
|
|
|
|
|
+ Returns:
|
|
|
|
|
+ Tuple of (is_url, path)
|
|
|
|
|
+ - is_url: True if source is a URL, False if it's a local directory
|
|
|
|
|
+ - path: Path object for the source (URL for is_url=True, directory path for is_url=False)
|
|
|
|
|
+ """
|
|
|
|
|
+ # Check if it's a URL
|
|
|
|
|
+ parsed = urllib.parse.urlparse(source)
|
|
|
|
|
+ if parsed.scheme and parsed.netloc:
|
|
|
|
|
+ # It's a URL
|
|
|
|
|
+ return True, Path(source)
|
|
|
|
|
+
|
|
|
|
|
+ # Check if it's a local directory
|
|
|
|
|
+ path = Path(source)
|
|
|
|
|
+ if path.exists() and path.is_dir():
|
|
|
|
|
+ # It's a local directory
|
|
|
|
|
+ return False, path.resolve()
|
|
|
|
|
+
|
|
|
|
|
+ # If it's not a URL and not an existing directory, assume it's a URL
|
|
|
|
|
+ # This maintains backward compatibility
|
|
|
|
|
+ return True, Path(source)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
if __name__ == "__main__":
|
|
if __name__ == "__main__":
|
|
|
main()
|
|
main()
|