2021-11-10 22:28:44 -03:00
|
|
|
#! /usr/bin/env python3
|
|
|
|
|
|
|
|
|
|
import argparse
|
2022-03-19 12:19:50 -03:00
|
|
|
import atexit
|
2023-10-13 20:43:34 -03:00
|
|
|
import functools
|
2021-11-10 22:28:44 -03:00
|
|
|
import glob
|
|
|
|
|
import os
|
|
|
|
|
import re
|
|
|
|
|
import shutil
|
|
|
|
|
import subprocess
|
|
|
|
|
import sys
|
2024-10-13 20:13:26 +02:00
|
|
|
import textwrap
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2024-04-18 00:23:57 +02:00
|
|
|
ns3_path = os.path.dirname(os.path.realpath(os.path.abspath(__file__)))
|
2023-10-13 20:43:34 -03:00
|
|
|
append_to_ns3_path = functools.partial(os.path.join, ns3_path)
|
2021-12-10 02:13:43 +00:00
|
|
|
out_dir = os.sep.join([ns3_path, "build"])
|
2024-06-02 07:46:55 +00:00
|
|
|
platform = sys.platform
|
|
|
|
|
platform = "bsd" if "bsd" in platform else platform
|
|
|
|
|
lock_file = os.sep.join([ns3_path, ".lock-ns3_%s_build" % platform])
|
2021-12-05 21:53:49 +00:00
|
|
|
|
2022-03-19 12:19:50 -03:00
|
|
|
max_cpu_threads = max(1, os.cpu_count() - 1)
|
2021-12-10 02:13:43 +00:00
|
|
|
print_buffer = ""
|
2022-03-02 11:51:53 -03:00
|
|
|
run_verbose = True
|
2022-09-23 23:37:14 -03:00
|
|
|
# Windows uses ; as a PATH entry separator,
|
|
|
|
|
# but msys shell uses : just like most Unix-like systems
|
|
|
|
|
path_sep = ";" if ";" in os.environ["PATH"] else ":"
|
|
|
|
|
path_variable = "$PATH" if path_sep == ":" else "%PATH%"
|
2021-12-10 02:13:43 +00:00
|
|
|
|
2025-06-04 17:06:22 +02:00
|
|
|
try:
|
|
|
|
|
TERMINAL_WIDTH = os.get_terminal_size().columns
|
|
|
|
|
except OSError:
|
|
|
|
|
TERMINAL_WIDTH = 80 # assume 80 columns when grepping
|
|
|
|
|
|
2021-12-10 02:13:43 +00:00
|
|
|
|
|
|
|
|
# Prints everything in the print_buffer on exit
|
|
|
|
|
def exit_handler(dry_run):
|
2022-03-02 11:51:53 -03:00
|
|
|
global print_buffer, run_verbose
|
|
|
|
|
# We should not print anything in run except if dry_run or run_verbose
|
|
|
|
|
if not dry_run and not run_verbose:
|
2021-12-10 02:13:43 +00:00
|
|
|
return
|
2021-12-10 21:08:23 -03:00
|
|
|
if print_buffer == "":
|
|
|
|
|
return
|
2023-11-19 20:07:19 -03:00
|
|
|
print_buffer = print_buffer.replace("\\", "/").replace("//", "/").replace("/", os.sep)
|
2021-12-10 02:13:43 +00:00
|
|
|
if dry_run:
|
|
|
|
|
print("The following commands would be executed:")
|
2022-03-02 11:51:53 -03:00
|
|
|
elif run_verbose:
|
2021-12-10 02:13:43 +00:00
|
|
|
print("Finished executing the following commands:")
|
|
|
|
|
print(print_buffer[1:])
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
def on_off_argument(parser, option_name, help_on, help_off=None):
|
2023-11-19 20:07:19 -03:00
|
|
|
parser.add_argument(
|
|
|
|
|
"--enable-%s" % option_name,
|
|
|
|
|
help=("Enable %s" % help_on) if help_off is None else help_on,
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
"--disable-%s" % option_name,
|
|
|
|
|
help=("Disable %s" % help_on) if help_off is None else help_off,
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
return parser
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def on_off(condition):
|
|
|
|
|
return "ON" if condition else "OFF"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def on_off_condition(args, cmake_flag, option_name):
|
|
|
|
|
enable_option = args.__getattribute__("enable_" + option_name)
|
|
|
|
|
disable_option = args.__getattribute__("disable_" + option_name)
|
|
|
|
|
cmake_arg = None
|
|
|
|
|
if enable_option is not None or disable_option is not None:
|
|
|
|
|
cmake_arg = "-DNS3_%s=%s" % (cmake_flag, on_off(enable_option and not disable_option))
|
|
|
|
|
return cmake_arg
|
|
|
|
|
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
def add_argument_to_subparsers(
|
|
|
|
|
parsers: list,
|
|
|
|
|
arguments: list,
|
|
|
|
|
help_msg: str,
|
|
|
|
|
dest: str,
|
|
|
|
|
action="store_true",
|
|
|
|
|
default_value=None,
|
|
|
|
|
):
|
2022-03-19 12:19:50 -03:00
|
|
|
# Instead of copying and pasting repeated arguments for each parser, we add them here
|
|
|
|
|
for subparser in parsers:
|
|
|
|
|
subparser_name = subparser.prog.replace("ns3", "").strip()
|
2025-10-09 15:31:35 +02:00
|
|
|
destination = (f"{subparser_name}_{dest}") if subparser_name else dest
|
2023-11-19 20:07:19 -03:00
|
|
|
subparser.add_argument(
|
|
|
|
|
*arguments, help=help_msg, action=action, default=default_value, dest=destination
|
|
|
|
|
)
|
2022-03-19 12:19:50 -03:00
|
|
|
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
def parse_args(argv):
|
2024-10-24 19:51:00 +02:00
|
|
|
py39args = {"exit_on_error": False}
|
2025-10-09 15:31:35 +02:00
|
|
|
py314args = {"color": False}
|
2024-10-24 19:51:00 +02:00
|
|
|
if sys.version_info < (3, 9):
|
|
|
|
|
py39args = {}
|
2025-10-09 15:31:35 +02:00
|
|
|
if sys.version_info < (3, 14):
|
|
|
|
|
py314args = {}
|
2023-11-19 20:07:19 -03:00
|
|
|
parser = argparse.ArgumentParser(
|
2025-10-09 15:31:35 +02:00
|
|
|
description="ns-3 wrapper for the CMake build system",
|
|
|
|
|
add_help=False,
|
|
|
|
|
**py39args,
|
|
|
|
|
**py314args,
|
2023-11-19 20:07:19 -03:00
|
|
|
)
|
2022-03-19 12:19:50 -03:00
|
|
|
|
2025-10-09 15:31:35 +02:00
|
|
|
sub_parser = parser.add_subparsers(dest="subparser")
|
2022-12-09 00:03:10 -03:00
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
parser.add_argument(
|
|
|
|
|
"-h",
|
|
|
|
|
"--help",
|
|
|
|
|
help="Print a summary of available commands",
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
dest="main_help",
|
|
|
|
|
)
|
2024-12-03 16:59:49 +01:00
|
|
|
parser.add_argument(
|
|
|
|
|
"--compile-or-die",
|
|
|
|
|
help=(
|
|
|
|
|
"Build and test each individual commit between a base and an head commits.\n"
|
|
|
|
|
"This is especially useful when preparing MRs or rewriting git history, and ensuring refactor-or-die "
|
|
|
|
|
"principle is being followed"
|
|
|
|
|
),
|
|
|
|
|
action="store",
|
|
|
|
|
nargs=2,
|
|
|
|
|
type=str,
|
|
|
|
|
default=[None, None],
|
|
|
|
|
metavar="compile_or_die",
|
|
|
|
|
)
|
2023-11-19 20:07:19 -03:00
|
|
|
parser_help = sub_parser.add_parser("help", help="Print a summary of available commands")
|
2025-10-09 15:31:35 +02:00
|
|
|
|
2022-03-19 12:19:50 -03:00
|
|
|
# parser.add_argument('--docset',
|
|
|
|
|
# help=(
|
|
|
|
|
# 'Create Docset, without building. This requires the docsetutil tool from Xcode 9.2 or earlier.'
|
|
|
|
|
# 'See Bugzilla 2196 for more details.'),
|
|
|
|
|
# action="store_true", default=None,
|
|
|
|
|
# dest="docset_build")
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
parser_build = sub_parser.add_parser(
|
|
|
|
|
"build",
|
|
|
|
|
help=(
|
|
|
|
|
"Accepts a list of targets to build,"
|
|
|
|
|
" or builds the entire project if no target is given"
|
|
|
|
|
),
|
|
|
|
|
formatter_class=argparse.RawTextHelpFormatter,
|
|
|
|
|
)
|
|
|
|
|
parser_build.add_argument(
|
2025-10-09 15:31:35 +02:00
|
|
|
"target",
|
2023-11-19 20:07:19 -03:00
|
|
|
help=(
|
|
|
|
|
"Build the entire project or the specified target and its dependencies.\n"
|
|
|
|
|
"To get the list of targets, use:\n"
|
|
|
|
|
"./ns3 show targets\n"
|
|
|
|
|
),
|
|
|
|
|
action="store",
|
|
|
|
|
nargs="*",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser_configure = sub_parser.add_parser(
|
|
|
|
|
"configure", help='Try "./ns3 configure --help" for more configuration options'
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"-d",
|
|
|
|
|
"--build-profile",
|
|
|
|
|
help="Build profile",
|
|
|
|
|
dest="build_profile",
|
|
|
|
|
choices=["debug", "default", "release", "optimized", "minsizerel"],
|
|
|
|
|
action="store",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"-G",
|
|
|
|
|
help=(
|
|
|
|
|
"CMake generator "
|
|
|
|
|
"(e.g. https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html)"
|
|
|
|
|
),
|
|
|
|
|
action="store",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser_configure.add_argument(
|
2025-09-17 09:52:23 +02:00
|
|
|
"--cxx-standard",
|
|
|
|
|
help="Compile NS-3 with the given C++ standard",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
choices=["23", "26"],
|
2023-11-19 20:07:19 -03:00
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-03-01 11:22:21 -03:00
|
|
|
# On-Off options
|
|
|
|
|
# First positional is transformed into --enable-option --disable-option
|
|
|
|
|
# Second positional is used for description "Enable %s" % second positional/"Disable %s" % second positional
|
2022-03-19 12:19:50 -03:00
|
|
|
# When an optional third positional is given, the second is used as is as the 'enable' description
|
|
|
|
|
# and the third is used as is as the 'disable' description
|
2022-03-01 11:22:21 -03:00
|
|
|
on_off_options = [
|
|
|
|
|
("asserts", "the asserts regardless of the compile mode"),
|
2023-11-19 20:07:19 -03:00
|
|
|
(
|
|
|
|
|
"des-metrics",
|
|
|
|
|
"Logging all events in a json file with the name of the executable "
|
|
|
|
|
"(which must call CommandLine::Parse(argc, argv))",
|
|
|
|
|
),
|
2022-03-01 11:22:21 -03:00
|
|
|
("build-version", "embedding git changes as a build version during build"),
|
2022-09-20 16:00:03 -03:00
|
|
|
("clang-tidy", "clang-tidy static analysis"),
|
2022-03-01 11:22:21 -03:00
|
|
|
("dpdk", "the fd-net-device DPDK features"),
|
2023-01-19 19:50:24 +00:00
|
|
|
("eigen", "Eigen3 library support"),
|
2022-03-01 11:22:21 -03:00
|
|
|
("examples", "the ns-3 examples"),
|
|
|
|
|
("gcov", "code coverage analysis"),
|
|
|
|
|
("gsl", "GNU Scientific Library (GSL) features"),
|
|
|
|
|
("gtk", "GTK support in ConfigStore"),
|
|
|
|
|
("logs", "the logs regardless of the compile mode"),
|
2022-04-20 19:47:38 -03:00
|
|
|
("monolib", "a single shared library with all ns-3 modules"),
|
2022-03-01 11:22:21 -03:00
|
|
|
("mpi", "the MPI support for distributed simulation"),
|
2022-10-25 17:11:00 +08:00
|
|
|
("mtp", "Multithreading support for high speed parallel simulation"),
|
2023-11-19 20:07:19 -03:00
|
|
|
(
|
|
|
|
|
"ninja-tracing",
|
|
|
|
|
"the conversion of the Ninja generator log file into about://tracing format",
|
|
|
|
|
),
|
2022-09-20 16:00:03 -03:00
|
|
|
("precompiled-headers", "precompiled headers"),
|
2022-03-01 11:22:21 -03:00
|
|
|
("python-bindings", "python bindings"),
|
|
|
|
|
("tests", "the ns-3 tests"),
|
|
|
|
|
("sanitizers", "address, memory leaks and undefined behavior sanitizers"),
|
2023-11-19 20:07:19 -03:00
|
|
|
("static", "Build a single static library with all ns-3", "Restore the shared libraries"),
|
2022-03-01 11:22:21 -03:00
|
|
|
("sudo", "use of sudo to setup suid bits on ns3 executables."),
|
|
|
|
|
("verbose", "printing of additional build system messages"),
|
|
|
|
|
("warnings", "compiler warnings"),
|
2023-11-19 20:07:19 -03:00
|
|
|
("werror", "Treat compiler warnings as errors", "Treat compiler warnings as warnings"),
|
2022-03-01 11:22:21 -03:00
|
|
|
]
|
|
|
|
|
for on_off_option in on_off_options:
|
|
|
|
|
parser_configure = on_off_argument(parser_configure, *on_off_option)
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--enable-modules",
|
|
|
|
|
help='List of modules to build (e.g. "core;network;internet")',
|
|
|
|
|
action="store",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--disable-modules",
|
2025-09-06 16:55:45 -07:00
|
|
|
help='List of modules not to build (e.g. "lte;wifi")',
|
2023-11-19 20:07:19 -03:00
|
|
|
action="store",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--filter-module-examples-and-tests",
|
|
|
|
|
help=(
|
|
|
|
|
"List of modules that should have their examples " 'and tests built (e.g. "lte;wifi")'
|
|
|
|
|
),
|
|
|
|
|
action="store",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--lcov-report",
|
|
|
|
|
help=(
|
|
|
|
|
"Generate a code coverage report "
|
|
|
|
|
"(use this option after configuring with --enable-gcov and running a program)"
|
|
|
|
|
),
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--lcov-zerocounters",
|
|
|
|
|
help=(
|
|
|
|
|
"Zero the lcov counters"
|
|
|
|
|
" (use this option before rerunning a program"
|
|
|
|
|
" when generating repeated lcov reports)"
|
|
|
|
|
),
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--out",
|
|
|
|
|
"--output-directory",
|
|
|
|
|
help=("Directory to store build artifacts"),
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
dest="output_directory",
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--with-brite",
|
|
|
|
|
help=(
|
|
|
|
|
"Use BRITE integration support, given by the indicated path,"
|
|
|
|
|
" to allow the use of the BRITE topology generator"
|
|
|
|
|
),
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--with-click",
|
|
|
|
|
help="Path to Click source or installation prefix for NS-3 Click Integration support",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--with-openflow",
|
|
|
|
|
help="Path to OFSID source for NS-3 OpenFlow Integration support",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--force-refresh",
|
|
|
|
|
help="Force refresh the CMake cache by deleting" " the cache and reconfiguring the project",
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--prefix", help="Target output directory to install", action="store", default=None
|
|
|
|
|
)
|
|
|
|
|
parser_configure.add_argument(
|
|
|
|
|
"--trace-performance",
|
|
|
|
|
help="Generate a performance trace log for the CMake configuration",
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
dest="trace_cmake_perf",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser_clean = sub_parser.add_parser("clean", help="Removes files created by ns3")
|
|
|
|
|
|
|
|
|
|
parser_distclean = sub_parser.add_parser(
|
|
|
|
|
"distclean", help="Removes files created by ns3, tests and documentation"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser_install = sub_parser.add_parser("install", help="Install ns-3")
|
|
|
|
|
|
|
|
|
|
parser_uninstall = sub_parser.add_parser("uninstall", help="Uninstall ns-3")
|
|
|
|
|
|
|
|
|
|
parser_run = sub_parser.add_parser(
|
|
|
|
|
"run",
|
|
|
|
|
help='Try "./ns3 run --help" for more runtime options',
|
|
|
|
|
formatter_class=argparse.RawTextHelpFormatter,
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
2025-10-09 15:31:35 +02:00
|
|
|
"target",
|
2023-11-19 20:07:19 -03:00
|
|
|
help=(
|
|
|
|
|
"Build and run the target executable.\n"
|
|
|
|
|
"If --no-build is present, the build step is skipped.\n"
|
|
|
|
|
"To get the list of targets, use:\n"
|
|
|
|
|
"./ns3 show targets\n"
|
|
|
|
|
"Arguments can be passed down to a program in one of the following ways:\n"
|
|
|
|
|
'./ns3 run "target --help"\n'
|
|
|
|
|
"./ns3 run target -- --help\n"
|
|
|
|
|
'./ns3 run target --command-template="%%s --help"\n'
|
|
|
|
|
),
|
|
|
|
|
default="",
|
|
|
|
|
nargs="?",
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--no-build", help="Skip build step.", action="store_true", default=False
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--command-template",
|
|
|
|
|
help=(
|
|
|
|
|
"Template of the command used to run the program given by run;"
|
|
|
|
|
" It should be a shell command string containing %%s inside,"
|
|
|
|
|
" which will be replaced by the actual program."
|
|
|
|
|
),
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--cwd",
|
|
|
|
|
help="Set the working directory for a program.",
|
|
|
|
|
action="store",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--gdb",
|
|
|
|
|
help="Change the default command template to run programs with gdb",
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--lldb",
|
|
|
|
|
help="Change the default command template to run programs with lldb",
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"-g",
|
|
|
|
|
"--valgrind",
|
|
|
|
|
help="Change the default command template to run programs with valgrind",
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--memray",
|
|
|
|
|
help="Use Memray memory profiler for Python scripts. Output will be saved to memray.output",
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--heaptrack",
|
|
|
|
|
help="Use Heaptrack memory profiler for C++",
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--perf", help="Use Linux's perf to profile a program", action="store_true", default=None
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--vis",
|
|
|
|
|
"--visualize",
|
|
|
|
|
help="Modify --run arguments to enable the visualizer",
|
|
|
|
|
action="store_true",
|
|
|
|
|
dest="visualize",
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
parser_run.add_argument(
|
|
|
|
|
"--enable-sudo",
|
|
|
|
|
help="Use sudo to setup suid bits on ns3 executables.",
|
|
|
|
|
dest="enable_sudo",
|
|
|
|
|
action="store_true",
|
|
|
|
|
default=False,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser_shell = sub_parser.add_parser(
|
2025-10-09 15:31:35 +02:00
|
|
|
"shell", help="Export necessary environment variables and open a shell"
|
2023-11-19 20:07:19 -03:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser_docs = sub_parser.add_parser(
|
|
|
|
|
"docs", help='Try "./ns3 docs --help" for more documentation options'
|
|
|
|
|
)
|
|
|
|
|
parser_docs.add_argument(
|
2025-10-09 15:31:35 +02:00
|
|
|
"target",
|
2023-11-19 20:07:19 -03:00
|
|
|
help="Build project documentation",
|
|
|
|
|
choices=[
|
|
|
|
|
"contributing",
|
|
|
|
|
"installation",
|
|
|
|
|
"manual",
|
|
|
|
|
"models",
|
|
|
|
|
"tutorial",
|
|
|
|
|
"sphinx",
|
|
|
|
|
"doxygen-no-build",
|
|
|
|
|
"doxygen",
|
|
|
|
|
"all",
|
|
|
|
|
],
|
|
|
|
|
action="store",
|
|
|
|
|
type=str,
|
|
|
|
|
default=None,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
parser_show = sub_parser.add_parser(
|
|
|
|
|
"show", help='Try "./ns3 show --help" for more show options'
|
|
|
|
|
)
|
2024-10-09 13:15:00 -10:00
|
|
|
parser_show_sub = parser_show.add_subparsers(dest="show")
|
|
|
|
|
|
|
|
|
|
# Add subcommands for the "docs" command
|
|
|
|
|
# Since 'typeid' has a required arg, need to handle these
|
|
|
|
|
# with subparsers not simple "choices=[...]"
|
|
|
|
|
show_choices = ["profile", "version", "config", "targets", "typeid", "all"]
|
|
|
|
|
|
|
|
|
|
for choice in show_choices:
|
|
|
|
|
if choice == "typeid":
|
|
|
|
|
parser_typeid = parser_show_sub.add_parser(
|
|
|
|
|
"typeid",
|
|
|
|
|
help="Show the introspected documentation for a TypeId",
|
|
|
|
|
)
|
|
|
|
|
parser_typeid.add_argument(
|
|
|
|
|
"tid",
|
|
|
|
|
help='Show the introspected documentation for TypeId "tid"',
|
|
|
|
|
type=str,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
parser_show_sub.add_parser(choice)
|
|
|
|
|
|
|
|
|
|
parser_show.set_defaults(show="all")
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-03-19 12:19:50 -03:00
|
|
|
add_argument_to_subparsers(
|
2023-11-19 20:07:19 -03:00
|
|
|
[
|
|
|
|
|
parser,
|
|
|
|
|
parser_build,
|
|
|
|
|
parser_configure,
|
|
|
|
|
parser_clean,
|
|
|
|
|
parser_distclean,
|
|
|
|
|
parser_docs,
|
|
|
|
|
parser_run,
|
|
|
|
|
parser_show,
|
|
|
|
|
],
|
2022-03-19 12:19:50 -03:00
|
|
|
["--dry-run"],
|
|
|
|
|
help_msg="Do not execute the commands.",
|
2023-11-19 20:07:19 -03:00
|
|
|
dest="dry_run",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
add_argument_to_subparsers(
|
|
|
|
|
[parser, parser_build, parser_run],
|
|
|
|
|
["-j", "--jobs"],
|
|
|
|
|
help_msg="Set number of parallel jobs.",
|
|
|
|
|
dest="jobs",
|
|
|
|
|
action="store",
|
|
|
|
|
default_value=max_cpu_threads,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
add_argument_to_subparsers(
|
|
|
|
|
[parser, parser_build, parser_configure, parser_run, parser_show],
|
|
|
|
|
["--quiet"],
|
|
|
|
|
help_msg="Don't print task lines, i.e. messages saying which tasks are being executed.",
|
|
|
|
|
dest="quiet",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
add_argument_to_subparsers(
|
|
|
|
|
[parser, parser_build, parser_configure, parser_docs, parser_run],
|
|
|
|
|
["-v", "--verbose"],
|
|
|
|
|
help_msg="Print which commands were executed",
|
|
|
|
|
dest="verbose",
|
|
|
|
|
default_value=False,
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-07-06 16:29:02 -03:00
|
|
|
# Try to split -- separated arguments into two lists for ns3 and for the runnable target
|
|
|
|
|
try:
|
2023-11-19 20:07:19 -03:00
|
|
|
args_separator_index = argv.index("--")
|
2022-07-06 16:29:02 -03:00
|
|
|
ns3_args = argv[:args_separator_index]
|
2023-11-19 20:07:19 -03:00
|
|
|
runnable_args = argv[args_separator_index + 1 :]
|
2022-07-06 16:29:02 -03:00
|
|
|
except ValueError:
|
|
|
|
|
ns3_args = argv
|
|
|
|
|
runnable_args = []
|
|
|
|
|
|
2022-01-10 22:36:50 -03:00
|
|
|
# Parse known arguments and separate from unknown arguments
|
2024-10-13 20:13:26 +02:00
|
|
|
try:
|
|
|
|
|
args, unknown_args = parser.parse_known_args(ns3_args)
|
|
|
|
|
except argparse.ArgumentError as e:
|
|
|
|
|
error_message = e.message
|
|
|
|
|
custom_exits = ["invalid choice:"]
|
|
|
|
|
for custom_exit in custom_exits:
|
|
|
|
|
if custom_exit in e.message:
|
|
|
|
|
error_message = f"Exiting due to {e.message[e.message.index(custom_exit):]}"
|
|
|
|
|
# Format error message to better fit the terminal width
|
2025-06-04 17:06:22 +02:00
|
|
|
print("\n".join(textwrap.wrap(error_message, width=TERMINAL_WIDTH)))
|
2024-10-13 20:13:26 +02:00
|
|
|
exit(1)
|
2021-12-10 02:13:43 +00:00
|
|
|
|
2025-10-09 15:31:35 +02:00
|
|
|
# Since Python 3.14, we cannot store_true positional arguments.
|
|
|
|
|
# To keep same behavior, we emulate what we had: one flag set to false for every subparser
|
|
|
|
|
# not set to anything. And build is an empty string.
|
|
|
|
|
def set_subparser_flags(parser, args):
|
|
|
|
|
sub_parser_actions = list(
|
|
|
|
|
filter(lambda x: isinstance(x, argparse._SubParsersAction), parser._actions)
|
|
|
|
|
)
|
|
|
|
|
sub_parsers = list(sub_parser_actions[0].choices.keys())
|
|
|
|
|
if args.subparser in sub_parsers:
|
|
|
|
|
sub_parsers.remove(args.subparser)
|
|
|
|
|
|
|
|
|
|
for parser in sub_parsers:
|
|
|
|
|
setattr(args, parser, "" if parser in ["build", "run", "show"] else False)
|
|
|
|
|
|
|
|
|
|
if args.subparser not in [None, "build", "run", "show"]:
|
|
|
|
|
setattr(args, args.subparser, True)
|
|
|
|
|
|
|
|
|
|
if args.subparser in ["build", "run", "docs"]:
|
|
|
|
|
setattr(args, args.subparser, args.target)
|
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
args = set_subparser_flags(parser, args)
|
|
|
|
|
|
2022-07-06 16:29:02 -03:00
|
|
|
# If run doesn't have a target, print the help message of the run parser
|
2025-10-09 15:31:35 +02:00
|
|
|
if args.subparser == "run" and args.run == "":
|
2022-06-27 17:05:29 -03:00
|
|
|
parser_run.print_help()
|
|
|
|
|
exit(-1)
|
2021-12-10 02:13:43 +00:00
|
|
|
|
2022-12-09 00:03:10 -03:00
|
|
|
# Merge attributes
|
|
|
|
|
attributes_to_merge = ["dry_run", "help", "verbose", "quiet"]
|
|
|
|
|
filtered_attributes = list(
|
2023-11-19 20:07:19 -03:00
|
|
|
filter(lambda x: x if ("disable" not in x and "enable" not in x) else None, args.__dir__())
|
|
|
|
|
)
|
2022-12-09 00:03:10 -03:00
|
|
|
for attribute in attributes_to_merge:
|
|
|
|
|
merging_attributes = list(
|
2023-11-19 20:07:19 -03:00
|
|
|
map(lambda x: args.__getattribute__(x) if attribute in x else None, filtered_attributes)
|
|
|
|
|
)
|
2022-12-09 00:03:10 -03:00
|
|
|
setattr(args, attribute, merging_attributes.count(True) > 0)
|
|
|
|
|
|
2022-03-02 11:51:53 -03:00
|
|
|
if args.help:
|
|
|
|
|
print(parser.description)
|
|
|
|
|
print("")
|
|
|
|
|
print(parser.format_usage())
|
|
|
|
|
|
|
|
|
|
# retrieve subparsers from parser
|
|
|
|
|
subparsers_actions = [
|
2023-11-19 20:07:19 -03:00
|
|
|
action for action in parser._actions if isinstance(action, argparse._SubParsersAction)
|
|
|
|
|
]
|
2022-03-02 11:51:53 -03:00
|
|
|
# there will probably only be one subparser_action,
|
|
|
|
|
# but better safe than sorry
|
|
|
|
|
for subparsers_action in subparsers_actions:
|
|
|
|
|
# get all subparsers and print help
|
|
|
|
|
for choice, subparser in subparsers_action.choices.items():
|
|
|
|
|
subcommand = subparser.format_usage()[:-1].replace("usage: ", " or: ")
|
|
|
|
|
if len(subcommand) > 1:
|
|
|
|
|
print(subcommand)
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
print(
|
|
|
|
|
parser.format_help().replace(parser.description, "").replace(parser.format_usage(), "")
|
|
|
|
|
)
|
2022-03-02 11:51:53 -03:00
|
|
|
exit(0)
|
|
|
|
|
|
2022-03-19 12:19:50 -03:00
|
|
|
attributes_to_merge = ["jobs"]
|
2023-11-19 20:07:19 -03:00
|
|
|
filtered_attributes = list(
|
|
|
|
|
filter(lambda x: x if ("disable" not in x and "enable" not in x) else 0, args.__dir__())
|
|
|
|
|
)
|
2022-03-19 12:19:50 -03:00
|
|
|
for attribute in attributes_to_merge:
|
|
|
|
|
merging_attributes = list(
|
2023-11-19 20:07:19 -03:00
|
|
|
map(
|
|
|
|
|
lambda x: int(args.__getattribute__(x)) if attribute in x else max_cpu_threads,
|
|
|
|
|
filtered_attributes,
|
|
|
|
|
)
|
|
|
|
|
)
|
2022-03-19 12:19:50 -03:00
|
|
|
setattr(args, attribute, min(merging_attributes))
|
|
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
# If some positional options are not in args, set them to false.
|
2023-11-19 20:07:19 -03:00
|
|
|
for option in [
|
|
|
|
|
"clean",
|
|
|
|
|
"configure",
|
|
|
|
|
"docs",
|
|
|
|
|
"install",
|
|
|
|
|
"run",
|
|
|
|
|
"shell",
|
|
|
|
|
"uninstall",
|
|
|
|
|
"show",
|
|
|
|
|
"distclean",
|
|
|
|
|
]:
|
2022-01-08 23:09:57 -03:00
|
|
|
if option not in args:
|
|
|
|
|
setattr(args, option, False)
|
|
|
|
|
|
|
|
|
|
if args.run and args.enable_sudo is None:
|
|
|
|
|
args.enable_sudo = True
|
2022-01-10 22:36:50 -03:00
|
|
|
|
2022-07-06 16:29:02 -03:00
|
|
|
# Save runnable target arguments
|
|
|
|
|
setattr(args, "program_args", runnable_args)
|
|
|
|
|
|
|
|
|
|
# Emit error in case of unknown arguments
|
2022-01-10 22:36:50 -03:00
|
|
|
if unknown_args:
|
2023-11-19 20:07:19 -03:00
|
|
|
msg = (
|
|
|
|
|
"Unknown options were given: {options}.\n"
|
|
|
|
|
"To see the allowed options add the `--help` option.\n"
|
|
|
|
|
"To forward configuration or runtime options, put them after '--'.\n"
|
|
|
|
|
)
|
2022-07-06 16:29:02 -03:00
|
|
|
if args.run:
|
|
|
|
|
msg += "Try: ./ns3 run {target} -- {options}\n"
|
|
|
|
|
if args.configure:
|
|
|
|
|
msg += "Try: ./ns3 configure -- {options}\n"
|
|
|
|
|
msg = msg.format(options=", ".join(unknown_args), target=args.run)
|
|
|
|
|
raise Exception(msg)
|
2021-12-10 02:13:43 +00:00
|
|
|
return args
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
2022-01-31 20:02:10 -03:00
|
|
|
def check_lock_data(output_directory):
|
|
|
|
|
# Check the .lock-ns3 for the build type (in case there are multiple cmake cache folders
|
2021-12-05 21:53:49 +00:00
|
|
|
ns3_modules_tests = []
|
2022-01-26 11:51:50 -03:00
|
|
|
ns3_modules = None
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
build_info = {
|
|
|
|
|
"NS3_ENABLED_MODULES": [],
|
|
|
|
|
"BUILD_PROFILE": None,
|
|
|
|
|
"VERSION": None,
|
|
|
|
|
"ENABLE_EXAMPLES": False,
|
|
|
|
|
"ENABLE_SUDO": False,
|
|
|
|
|
"ENABLE_TESTS": False,
|
|
|
|
|
"BUILD_VERSION_STRING": None,
|
2024-10-25 20:14:50 +02:00
|
|
|
"FETCH_NETANIM_VISUALIZER": False,
|
2023-11-19 20:07:19 -03:00
|
|
|
}
|
2022-01-31 20:02:10 -03:00
|
|
|
if output_directory and os.path.exists(lock_file):
|
|
|
|
|
exec(open(lock_file).read(), globals(), build_info)
|
|
|
|
|
ns3_modules = build_info["NS3_ENABLED_MODULES"]
|
2022-01-26 01:53:28 -03:00
|
|
|
if ns3_modules:
|
2022-03-16 22:44:30 -03:00
|
|
|
ns3_modules.extend(build_info["NS3_ENABLED_CONTRIBUTED_MODULES"])
|
2022-01-31 20:02:10 -03:00
|
|
|
if build_info["ENABLE_TESTS"]:
|
2022-01-26 01:53:28 -03:00
|
|
|
ns3_modules_tests = [x + "-test" for x in ns3_modules]
|
2022-08-13 23:56:15 +03:00
|
|
|
ns3_modules = ns3_modules + ns3_modules_tests
|
2022-01-31 20:02:10 -03:00
|
|
|
return build_info, ns3_modules
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
2021-12-10 02:13:43 +00:00
|
|
|
def print_and_buffer(message):
|
|
|
|
|
global print_buffer
|
|
|
|
|
# print(message)
|
|
|
|
|
print_buffer += "\n" + message
|
|
|
|
|
|
|
|
|
|
|
2022-12-09 00:35:53 -03:00
|
|
|
def remove_dir(dir_to_remove, dry_run, directory_qualifier=""):
|
2024-04-18 00:23:57 +02:00
|
|
|
dir_to_remove = os.path.realpath(os.path.abspath(dir_to_remove))
|
2022-12-09 00:35:53 -03:00
|
|
|
if os.path.exists(dir_to_remove):
|
2023-11-19 20:07:19 -03:00
|
|
|
if ".." in os.path.relpath(dir_to_remove, ns3_path) or os.path.abspath(
|
|
|
|
|
dir_to_remove
|
|
|
|
|
) == os.path.abspath(ns3_path):
|
2022-12-09 00:35:53 -03:00
|
|
|
# In case the directory to remove isn't within
|
|
|
|
|
# the current ns-3 directory, print an error
|
|
|
|
|
# message for the dry-run case
|
|
|
|
|
# Or throw an exception in a normal run
|
2023-11-19 20:07:19 -03:00
|
|
|
error_message = (
|
|
|
|
|
f"The {directory_qualifier} directory '{dir_to_remove}' "
|
|
|
|
|
"is not within the current ns-3 directory. "
|
|
|
|
|
"Deleting it can cause data loss."
|
|
|
|
|
)
|
2022-12-09 00:35:53 -03:00
|
|
|
if dry_run:
|
|
|
|
|
print_and_buffer(error_message)
|
|
|
|
|
return
|
|
|
|
|
else:
|
|
|
|
|
raise Exception(error_message)
|
|
|
|
|
|
|
|
|
|
# Remove directories that are within the current ns-3 directory
|
|
|
|
|
print_and_buffer("rm -R %s" % os.path.relpath(dir_to_remove, ns3_path))
|
|
|
|
|
if not dry_run:
|
|
|
|
|
shutil.rmtree(dir_to_remove, ignore_errors=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def remove_file(file_to_remove, dry_run):
|
2024-04-18 00:23:57 +02:00
|
|
|
file_to_remove = os.path.realpath(file_to_remove)
|
2022-12-09 00:35:53 -03:00
|
|
|
if os.path.exists(file_to_remove):
|
|
|
|
|
print_and_buffer("rm -R %s" % os.path.relpath(file_to_remove, ns3_path))
|
|
|
|
|
if not dry_run:
|
|
|
|
|
os.remove(file_to_remove)
|
|
|
|
|
|
|
|
|
|
|
2021-12-10 02:13:43 +00:00
|
|
|
def clean_cmake_artifacts(dry_run=False):
|
2022-12-09 00:35:53 -03:00
|
|
|
remove_dir(out_dir, dry_run, "output")
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2024-06-11 17:52:17 +02:00
|
|
|
cmake_cache_files = glob.glob("%s/**/CMakeCache.txt" % ns3_path)
|
2021-11-10 22:28:44 -03:00
|
|
|
for cmake_cache_file in cmake_cache_files:
|
|
|
|
|
dirname = os.path.dirname(cmake_cache_file)
|
2022-12-09 00:35:53 -03:00
|
|
|
remove_dir(dirname, dry_run, "CMake cache")
|
2021-12-05 21:53:49 +00:00
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
dirs_to_remove = ["testpy-output", "__pycache__", "build", "cmake-cache"]
|
2023-10-13 20:43:34 -03:00
|
|
|
for dir_to_remove in map(append_to_ns3_path, dirs_to_remove):
|
2022-12-09 00:35:53 -03:00
|
|
|
remove_dir(dir_to_remove, dry_run)
|
|
|
|
|
|
|
|
|
|
remove_file(lock_file, dry_run)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def clean_docs_and_tests_artifacts(dry_run=False):
|
2023-11-19 20:07:19 -03:00
|
|
|
docs_dir = append_to_ns3_path("doc")
|
|
|
|
|
|
|
|
|
|
file_artifacts = [
|
|
|
|
|
"doxygen.log",
|
|
|
|
|
"doxygen.warnings.log",
|
|
|
|
|
"introspected-command-line.h",
|
|
|
|
|
"introspected-doxygen.h",
|
|
|
|
|
"ns3-object.txt",
|
|
|
|
|
]
|
2022-12-09 00:35:53 -03:00
|
|
|
docs_files = [os.path.join(docs_dir, filename) for filename in file_artifacts]
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
docs_and_tests_dirs = [
|
|
|
|
|
os.path.join(docs_dir, "html"),
|
|
|
|
|
os.path.join(docs_dir, "html-warn"),
|
|
|
|
|
]
|
2022-12-09 00:35:53 -03:00
|
|
|
docs_and_tests_dirs.extend(glob.glob(f"{docs_dir}/**/build", recursive=True))
|
|
|
|
|
docs_and_tests_dirs.extend(glob.glob(f"{docs_dir}/**/source-temp", recursive=True))
|
|
|
|
|
|
|
|
|
|
for directory in docs_and_tests_dirs:
|
|
|
|
|
remove_dir(directory, dry_run)
|
|
|
|
|
|
|
|
|
|
for file in docs_files:
|
|
|
|
|
remove_file(file, dry_run)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
2023-01-31 21:56:51 -03:00
|
|
|
def clean_pip_packaging_artifacts(dry_run=False):
|
2023-11-19 20:07:19 -03:00
|
|
|
pip_dirs = ["dist", "nsnam.egg-info", "wheelhouse"]
|
2023-10-13 20:43:34 -03:00
|
|
|
for directory in map(append_to_ns3_path, pip_dirs):
|
2023-01-31 21:56:51 -03:00
|
|
|
remove_dir(directory, dry_run)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
2023-07-27 00:44:50 -03:00
|
|
|
def clean_vcpkg_artifacts(dry_run=False):
|
2023-10-13 20:43:34 -03:00
|
|
|
remove_dir(append_to_ns3_path("vcpkg"), dry_run)
|
2023-07-27 00:44:50 -03:00
|
|
|
|
|
|
|
|
|
2021-12-05 21:53:49 +00:00
|
|
|
def search_cmake_cache(build_profile):
|
2021-11-10 22:28:44 -03:00
|
|
|
# Search for the CMake cache
|
2024-06-11 17:52:17 +02:00
|
|
|
cmake_cache_files = glob.glob("%s/**/CMakeCache.txt" % ns3_path)
|
2021-11-10 22:28:44 -03:00
|
|
|
current_cmake_cache_folder = None
|
|
|
|
|
current_cmake_generator = None
|
|
|
|
|
|
|
|
|
|
if cmake_cache_files:
|
|
|
|
|
# In case there are multiple cache files, get the correct one
|
|
|
|
|
for cmake_cache_file in cmake_cache_files:
|
|
|
|
|
# We found the right cache folder
|
|
|
|
|
if current_cmake_cache_folder and current_cmake_generator:
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
# Still looking for it
|
|
|
|
|
current_cmake_cache_folder = None
|
|
|
|
|
current_cmake_generator = None
|
|
|
|
|
with open(cmake_cache_file, "r") as f:
|
|
|
|
|
lines = f.read().split("\n")
|
|
|
|
|
|
|
|
|
|
while len(lines):
|
|
|
|
|
line = lines[0]
|
|
|
|
|
lines.pop(0)
|
|
|
|
|
|
|
|
|
|
# Check for EOF
|
|
|
|
|
if current_cmake_cache_folder and current_cmake_generator:
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
# Check the build profile
|
2021-12-10 21:08:23 -03:00
|
|
|
if "build_profile:INTERNAL" in line:
|
|
|
|
|
if build_profile:
|
|
|
|
|
if build_profile == line.split("=")[-1]:
|
|
|
|
|
current_cmake_cache_folder = os.path.dirname(cmake_cache_file)
|
|
|
|
|
else:
|
2021-11-10 22:28:44 -03:00
|
|
|
current_cmake_cache_folder = os.path.dirname(cmake_cache_file)
|
|
|
|
|
|
|
|
|
|
# Check the generator
|
2022-01-10 22:36:50 -03:00
|
|
|
if "CMAKE_GENERATOR:" in line:
|
2021-11-10 22:28:44 -03:00
|
|
|
current_cmake_generator = line.split("=")[-1]
|
|
|
|
|
|
|
|
|
|
if not current_cmake_generator:
|
2025-10-02 10:31:23 +02:00
|
|
|
supported_cmake_generators = subprocess.check_output(["cmake", "--help"]).decode()
|
2021-11-10 22:28:44 -03:00
|
|
|
# Search for available generators
|
2025-10-02 10:31:23 +02:00
|
|
|
cmake_build_tool_to_generator_map = {
|
2025-01-24 13:00:37 +01:00
|
|
|
"msbuild": "Visual Studio 17 2022",
|
|
|
|
|
"ninja": "Ninja",
|
|
|
|
|
"make": "Unix Makefiles",
|
|
|
|
|
"xcodebuild": "Xcode",
|
|
|
|
|
}
|
2021-11-10 22:28:44 -03:00
|
|
|
available_generators = []
|
2025-10-02 10:31:23 +02:00
|
|
|
for build_tool, generator in cmake_build_tool_to_generator_map.items():
|
|
|
|
|
if shutil.which(build_tool) and generator in supported_cmake_generators:
|
2021-11-10 22:28:44 -03:00
|
|
|
available_generators.append(generator)
|
|
|
|
|
|
|
|
|
|
# Select the first one
|
|
|
|
|
if len(available_generators) == 0:
|
|
|
|
|
raise Exception("No generator available.")
|
|
|
|
|
|
2025-10-02 10:31:23 +02:00
|
|
|
current_cmake_generator = available_generators[0]
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
return current_cmake_cache_folder, current_cmake_generator
|
|
|
|
|
|
|
|
|
|
|
2022-01-10 22:36:50 -03:00
|
|
|
def project_not_configured(config_msg=""):
|
|
|
|
|
print("You need to configure ns-3 first: try ./ns3 configure%s" % config_msg)
|
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
def check_config(current_cmake_cache_folder):
|
2021-12-05 21:53:49 +00:00
|
|
|
if current_cmake_cache_folder is None:
|
2022-01-10 22:36:50 -03:00
|
|
|
project_not_configured()
|
2022-01-31 19:43:21 -03:00
|
|
|
config_table = current_cmake_cache_folder + os.sep + "ns3config.txt"
|
|
|
|
|
if not os.path.exists(config_table):
|
2022-01-10 22:36:50 -03:00
|
|
|
project_not_configured()
|
2022-01-31 19:43:21 -03:00
|
|
|
with open(config_table, "r") as f:
|
2021-11-10 22:28:44 -03:00
|
|
|
print(f.read())
|
|
|
|
|
|
|
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
def project_configured(current_cmake_cache_folder):
|
|
|
|
|
if not current_cmake_cache_folder:
|
|
|
|
|
return False
|
|
|
|
|
if not os.path.exists(current_cmake_cache_folder):
|
|
|
|
|
return False
|
|
|
|
|
if not os.path.exists(os.sep.join([current_cmake_cache_folder, "CMakeCache.txt"])):
|
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
def configure_cmake(
|
|
|
|
|
cmake, args, current_cmake_cache_folder, current_cmake_generator, output, dry_run=False
|
|
|
|
|
):
|
2021-11-10 22:28:44 -03:00
|
|
|
# Aggregate all flags to configure CMake
|
2023-11-04 18:27:48 -03:00
|
|
|
cmake_args = [cmake, "-S", ns3_path]
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
if not project_configured(current_cmake_cache_folder):
|
|
|
|
|
# Create a new cmake_cache folder if one does not exist
|
2022-01-31 20:02:10 -03:00
|
|
|
current_cmake_cache_folder = os.sep.join([ns3_path, "cmake-cache"])
|
2021-12-10 21:08:23 -03:00
|
|
|
if not os.path.exists(current_cmake_cache_folder):
|
|
|
|
|
print_and_buffer("mkdir %s" % os.path.relpath(current_cmake_cache_folder, ns3_path))
|
|
|
|
|
if not dry_run:
|
|
|
|
|
os.makedirs(current_cmake_cache_folder, exist_ok=True)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
# Set default build type to default if a previous cache doesn't exist
|
|
|
|
|
if args.build_profile is None:
|
2022-03-02 11:51:53 -03:00
|
|
|
args.build_profile = "default"
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
# Set generator if a previous cache doesn't exist
|
|
|
|
|
if args.G is None:
|
|
|
|
|
args.G = current_cmake_generator
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2023-03-24 13:48:26 -03:00
|
|
|
# Get the current Python information to supply
|
|
|
|
|
# CMake with the respective installation
|
|
|
|
|
if args.enable_python_bindings is True:
|
|
|
|
|
import sysconfig
|
2023-11-19 20:07:19 -03:00
|
|
|
|
2023-03-24 13:48:26 -03:00
|
|
|
cmake_args.append(f"-DPython3_LIBRARY_DIRS={sysconfig.get_config_var('LIBDIR')}")
|
|
|
|
|
cmake_args.append(f"-DPython3_INCLUDE_DIRS={sysconfig.get_config_var('INCLUDEPY')}")
|
|
|
|
|
cmake_args.append(f"-DPython3_EXECUTABLE={sys.executable}")
|
|
|
|
|
|
2023-11-04 18:27:48 -03:00
|
|
|
cmake_args.extend(["-B", current_cmake_cache_folder])
|
|
|
|
|
|
2022-05-21 04:17:34 +00:00
|
|
|
# C++ standard
|
|
|
|
|
if args.cxx_standard is not None:
|
|
|
|
|
cmake_args.append("-DCMAKE_CXX_STANDARD=%s" % args.cxx_standard)
|
|
|
|
|
|
|
|
|
|
# Build type
|
|
|
|
|
if args.build_profile is not None:
|
|
|
|
|
args.build_profile = args.build_profile.lower()
|
2023-11-19 20:07:19 -03:00
|
|
|
if args.build_profile not in [
|
|
|
|
|
"debug",
|
|
|
|
|
"default",
|
|
|
|
|
"release",
|
|
|
|
|
"optimized",
|
|
|
|
|
"minsizerel",
|
|
|
|
|
"relwithdebinfo",
|
|
|
|
|
]:
|
2022-05-21 04:17:34 +00:00
|
|
|
raise Exception("Unknown build type")
|
|
|
|
|
else:
|
|
|
|
|
if args.build_profile == "debug":
|
|
|
|
|
cmake_args.extend(
|
2023-11-19 20:07:19 -03:00
|
|
|
"-DCMAKE_BUILD_TYPE=debug -DNS3_ASSERT=ON -DNS3_LOG=ON -DNS3_WARNINGS_AS_ERRORS=ON".split()
|
|
|
|
|
)
|
2022-11-01 10:06:37 -03:00
|
|
|
elif args.build_profile in ["default", "relwithdebinfo"]:
|
2022-05-21 04:17:34 +00:00
|
|
|
cmake_args.extend(
|
2023-11-19 20:07:19 -03:00
|
|
|
"-DCMAKE_BUILD_TYPE=default -DNS3_ASSERT=ON -DNS3_LOG=ON -DNS3_WARNINGS_AS_ERRORS=OFF".split()
|
|
|
|
|
)
|
2022-11-01 10:06:37 -03:00
|
|
|
elif args.build_profile in ["release", "optimized"]:
|
|
|
|
|
cmake_args.extend(
|
2023-11-19 20:07:19 -03:00
|
|
|
"-DCMAKE_BUILD_TYPE=release -DNS3_ASSERT=OFF -DNS3_LOG=OFF -DNS3_WARNINGS_AS_ERRORS=OFF".split()
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
else:
|
2022-05-21 04:17:34 +00:00
|
|
|
cmake_args.extend(
|
2022-11-01 10:06:37 -03:00
|
|
|
"-DCMAKE_BUILD_TYPE=minsizerel -DNS3_ASSERT=OFF -DNS3_LOG=OFF -DNS3_WARNINGS_AS_ERRORS=OFF".split()
|
2022-05-21 04:17:34 +00:00
|
|
|
)
|
2023-11-19 20:07:19 -03:00
|
|
|
cmake_args.append(
|
|
|
|
|
"-DNS3_NATIVE_OPTIMIZATIONS=%s" % on_off((args.build_profile == "optimized"))
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
options = (
|
|
|
|
|
("ASSERT", "asserts"),
|
|
|
|
|
("CLANG_TIDY", "clang_tidy"),
|
|
|
|
|
("COVERAGE", "gcov"),
|
|
|
|
|
("DES_METRICS", "des_metrics"),
|
|
|
|
|
("DPDK", "dpdk"),
|
|
|
|
|
("EIGEN", "eigen"),
|
|
|
|
|
("ENABLE_BUILD_VERSION", "build_version"),
|
|
|
|
|
("ENABLE_SUDO", "sudo"),
|
|
|
|
|
("EXAMPLES", "examples"),
|
|
|
|
|
("GSL", "gsl"),
|
|
|
|
|
("GTK3", "gtk"),
|
|
|
|
|
("LOG", "logs"),
|
|
|
|
|
("MONOLIB", "monolib"),
|
|
|
|
|
("MPI", "mpi"),
|
2024-03-26 10:54:49 +08:00
|
|
|
("MTP", "mtp"),
|
2023-11-19 20:07:19 -03:00
|
|
|
("NINJA_TRACING", "ninja_tracing"),
|
|
|
|
|
("PRECOMPILE_HEADERS", "precompiled_headers"),
|
|
|
|
|
("PYTHON_BINDINGS", "python_bindings"),
|
|
|
|
|
("SANITIZE", "sanitizers"),
|
|
|
|
|
("STATIC", "static"),
|
|
|
|
|
("TESTS", "tests"),
|
|
|
|
|
("VERBOSE", "verbose"),
|
|
|
|
|
("WARNINGS", "warnings"),
|
|
|
|
|
("WARNINGS_AS_ERRORS", "werror"),
|
|
|
|
|
)
|
|
|
|
|
for cmake_flag, option_name in options:
|
2021-12-10 21:08:23 -03:00
|
|
|
arg = on_off_condition(args, cmake_flag, option_name)
|
|
|
|
|
if arg:
|
2022-11-11 21:39:24 -03:00
|
|
|
is_on = "=ON" in arg
|
2023-11-19 20:07:19 -03:00
|
|
|
reverse = arg.replace("=ON" if is_on else "=OFF", "=OFF" if is_on else "=ON")
|
2022-11-11 21:39:24 -03:00
|
|
|
if reverse in cmake_args:
|
|
|
|
|
cmake_args.remove(reverse)
|
2021-12-10 21:08:23 -03:00
|
|
|
cmake_args.append(arg)
|
|
|
|
|
|
|
|
|
|
if args.lcov_zerocounters is not None:
|
|
|
|
|
cmake_args.append("-DNS3_COVERAGE_ZERO_COUNTERS=%s" % on_off(args.lcov_zerocounters))
|
|
|
|
|
|
2022-12-09 00:35:53 -03:00
|
|
|
# Output, Brite, Click and Openflow dirs
|
2021-12-10 21:08:23 -03:00
|
|
|
if args.output_directory is not None:
|
|
|
|
|
cmake_args.append("-DNS3_OUTPUT_DIRECTORY=%s" % args.output_directory)
|
|
|
|
|
|
|
|
|
|
if args.with_brite is not None:
|
|
|
|
|
cmake_args.append("-DNS3_WITH_BRITE=%s" % args.with_brite)
|
|
|
|
|
|
|
|
|
|
if args.with_click is not None:
|
|
|
|
|
cmake_args.append("-DNS3_WITH_CLICK=%s" % args.with_click)
|
|
|
|
|
|
|
|
|
|
if args.with_openflow is not None:
|
|
|
|
|
cmake_args.append("-DNS3_WITH_OPENFLOW=%s" % args.with_openflow)
|
|
|
|
|
|
|
|
|
|
if args.prefix is not None:
|
|
|
|
|
cmake_args.append("-DCMAKE_INSTALL_PREFIX=%s" % args.prefix)
|
|
|
|
|
|
|
|
|
|
# Process enabled/disabled modules
|
2022-09-17 16:33:08 -03:00
|
|
|
if args.enable_modules is not None:
|
2021-12-10 21:08:23 -03:00
|
|
|
cmake_args.append("-DNS3_ENABLED_MODULES=%s" % args.enable_modules)
|
|
|
|
|
|
2022-09-17 16:33:08 -03:00
|
|
|
if args.disable_modules is not None:
|
2021-12-10 21:08:23 -03:00
|
|
|
cmake_args.append("-DNS3_DISABLED_MODULES=%s" % args.disable_modules)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-09-17 16:33:08 -03:00
|
|
|
if args.filter_module_examples_and_tests is not None:
|
2023-11-19 20:07:19 -03:00
|
|
|
cmake_args.append(
|
|
|
|
|
"-DNS3_FILTER_MODULE_EXAMPLES_AND_TESTS=%s" % args.filter_module_examples_and_tests
|
|
|
|
|
)
|
2022-09-17 16:33:08 -03:00
|
|
|
|
2021-12-10 02:13:43 +00:00
|
|
|
# Try to set specified generator (will probably fail if there is an old cache)
|
2021-12-10 21:08:23 -03:00
|
|
|
if args.G:
|
2022-03-18 23:04:16 -03:00
|
|
|
cmake_args.extend(["-G", args.G])
|
2025-01-24 13:00:37 +01:00
|
|
|
if "Visual Studio" in args.G:
|
|
|
|
|
cmake_args.extend(["-A", "x64", "-T", "ClangCL"])
|
2022-03-18 23:04:16 -03:00
|
|
|
|
|
|
|
|
if args.trace_cmake_perf:
|
2023-11-19 20:07:19 -03:00
|
|
|
cmake_performance_trace = os.path.join(
|
|
|
|
|
os.path.relpath(ns3_path, ns3_path), "cmake_performance_trace.log"
|
|
|
|
|
)
|
|
|
|
|
cmake_args.extend(
|
|
|
|
|
["--profiling-format=google-trace", "--profiling-output=" + cmake_performance_trace]
|
|
|
|
|
)
|
2021-12-10 02:13:43 +00:00
|
|
|
|
2023-11-18 22:23:14 -03:00
|
|
|
# Enable warnings for uninitialized variable usage
|
|
|
|
|
cmake_args.append("--warn-uninitialized")
|
2021-12-10 02:13:43 +00:00
|
|
|
|
2022-01-10 22:36:50 -03:00
|
|
|
# Append CMake flags passed using the -- separator
|
|
|
|
|
cmake_args.extend(args.program_args)
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
# Echo out the configure command
|
2023-11-04 18:27:48 -03:00
|
|
|
print_and_buffer(" ".join(cmake_args))
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
# Run cmake
|
2021-12-05 21:53:49 +00:00
|
|
|
if not dry_run:
|
2022-04-02 13:45:47 -03:00
|
|
|
proc_env = os.environ.copy()
|
2023-11-04 18:27:48 -03:00
|
|
|
ret = subprocess.run(cmake_args, stdout=output, env=proc_env)
|
2022-01-08 23:09:57 -03:00
|
|
|
if ret.returncode != 0:
|
|
|
|
|
exit(ret.returncode)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-02-02 23:37:56 +00:00
|
|
|
update_scratches_list(current_cmake_cache_folder)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def update_scratches_list(current_cmake_cache_folder):
|
|
|
|
|
# Store list of scratches to trigger a reconfiguration step if needed
|
2023-10-13 20:43:34 -03:00
|
|
|
current_scratch_sources = glob.glob(append_to_ns3_path("scratch", "**", "*.cc"), recursive=True)
|
2022-02-02 23:37:56 +00:00
|
|
|
with open(os.path.join(current_cmake_cache_folder, "ns3scratches"), "w") as f:
|
|
|
|
|
f.write("\n".join(current_scratch_sources))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def refresh_cmake(current_cmake_cache_folder, output):
|
2022-12-01 22:01:21 -03:00
|
|
|
cmake, _ = cmake_check_version()
|
|
|
|
|
ret = subprocess.run([cmake, ".."], cwd=current_cmake_cache_folder, stdout=output)
|
2022-02-02 23:37:56 +00:00
|
|
|
if ret.returncode != 0:
|
|
|
|
|
exit(ret.returncode)
|
|
|
|
|
update_scratches_list(current_cmake_cache_folder)
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2021-12-05 21:53:49 +00:00
|
|
|
def get_program_shortcuts(build_profile, ns3_version):
|
2022-01-31 20:02:10 -03:00
|
|
|
# Import programs from .lock-ns3
|
2021-11-10 22:28:44 -03:00
|
|
|
programs_dict = {}
|
2022-01-31 20:02:10 -03:00
|
|
|
exec(open(lock_file).read(), globals(), programs_dict)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
# We can now build a map to simplify things for users (at this point we could remove versioning prefix/suffix)
|
|
|
|
|
ns3_program_map = {}
|
2022-01-26 11:51:50 -03:00
|
|
|
longest_shortcut_map = {}
|
|
|
|
|
|
2024-10-25 20:14:50 +02:00
|
|
|
if programs_dict["FETCH_NETANIM_VISUALIZER"]:
|
|
|
|
|
ns3_program_map["netanim"] = [programs_dict["out_dir"] + "/bin/netanim"]
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
for program in programs_dict["ns3_runnable_programs"]:
|
|
|
|
|
if "pch_exec" in program:
|
|
|
|
|
continue
|
2022-09-23 23:37:14 -03:00
|
|
|
temp_path = program.replace(out_dir, "")
|
|
|
|
|
# Sometimes Windows uses \\, sometimes /
|
|
|
|
|
# quite the mess
|
2023-11-19 20:07:19 -03:00
|
|
|
temp_path = temp_path.split(os.sep if os.sep in temp_path else "/")
|
2022-02-02 23:37:56 +00:00
|
|
|
temp_path.pop(0) # remove first path separator
|
2021-12-21 11:39:53 -03:00
|
|
|
|
2021-12-10 02:13:43 +00:00
|
|
|
# Remove version prefix and build type suffix from shortcuts (or keep them too?)
|
2023-11-19 20:07:19 -03:00
|
|
|
temp_path[-1] = (
|
|
|
|
|
temp_path[-1].replace("-" + build_profile, "").replace("ns" + ns3_version + "-", "")
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-01-10 22:36:50 -03:00
|
|
|
# Deal with scratch subdirs
|
|
|
|
|
if "scratch" in temp_path and len(temp_path) > 3:
|
|
|
|
|
subdir = "_".join([*temp_path[2:-1], ""])
|
|
|
|
|
temp_path[-1] = temp_path[-1].replace(subdir, "")
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
# Check if there is a .cc file for that specific program
|
|
|
|
|
source_file_path = os.sep.join(temp_path) + ".cc"
|
|
|
|
|
source_shortcut = False
|
2023-10-13 20:43:34 -03:00
|
|
|
if os.path.exists(append_to_ns3_path(source_file_path)):
|
2021-11-10 22:28:44 -03:00
|
|
|
source_shortcut = True
|
|
|
|
|
|
|
|
|
|
program = program.strip()
|
2022-01-26 11:51:50 -03:00
|
|
|
longest_shortcut = None
|
2021-11-10 22:28:44 -03:00
|
|
|
while len(temp_path):
|
|
|
|
|
# Shortcuts: /src/aodv/examples/aodv can be accessed with aodv/examples/aodv, examples/aodv, aodv
|
|
|
|
|
shortcut_path = os.sep.join(temp_path)
|
2022-01-26 11:51:50 -03:00
|
|
|
if not longest_shortcut:
|
|
|
|
|
longest_shortcut = shortcut_path
|
|
|
|
|
|
|
|
|
|
# Store longest shortcut path for collisions
|
|
|
|
|
if shortcut_path not in longest_shortcut_map:
|
|
|
|
|
longest_shortcut_map[shortcut_path] = [longest_shortcut]
|
|
|
|
|
else:
|
|
|
|
|
longest_shortcut_map[shortcut_path].append(longest_shortcut)
|
|
|
|
|
|
|
|
|
|
ns3_program_map[shortcut_path] = [program]
|
2022-09-23 23:37:14 -03:00
|
|
|
|
2023-03-17 23:54:05 -03:00
|
|
|
# Add a shortcut with .exe suffix when running on Windows
|
2022-09-23 23:37:14 -03:00
|
|
|
if sys.platform == "win32":
|
2022-10-14 12:59:30 -03:00
|
|
|
ns3_program_map[shortcut_path.replace("\\", "/")] = [program]
|
|
|
|
|
ns3_program_map[shortcut_path + ".exe"] = [program]
|
|
|
|
|
ns3_program_map[shortcut_path.replace("\\", "/") + ".exe"] = [program]
|
2022-09-23 23:37:14 -03:00
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
if source_shortcut:
|
2022-01-26 11:51:50 -03:00
|
|
|
cc_shortcut_path = shortcut_path + ".cc"
|
|
|
|
|
ns3_program_map[cc_shortcut_path] = [program]
|
2022-09-23 23:37:14 -03:00
|
|
|
if sys.platform == "win32":
|
|
|
|
|
ns3_program_map[cc_shortcut_path] = [program]
|
2022-10-14 12:59:30 -03:00
|
|
|
ns3_program_map[cc_shortcut_path.replace("\\", "/")] = [program]
|
2022-01-26 11:51:50 -03:00
|
|
|
|
|
|
|
|
# Store longest shortcut path for collisions
|
|
|
|
|
if cc_shortcut_path not in longest_shortcut_map:
|
|
|
|
|
longest_shortcut_map[cc_shortcut_path] = [longest_shortcut]
|
|
|
|
|
else:
|
|
|
|
|
longest_shortcut_map[cc_shortcut_path].append(longest_shortcut)
|
2021-11-10 22:28:44 -03:00
|
|
|
temp_path.pop(0)
|
2022-01-26 01:53:28 -03:00
|
|
|
|
2022-01-26 11:51:50 -03:00
|
|
|
# Filter collisions
|
|
|
|
|
collisions = list(filter(lambda x: x if len(x[1]) > 1 else None, longest_shortcut_map.items()))
|
2023-11-19 20:07:19 -03:00
|
|
|
for colliding_shortcut, longest_shortcuts in collisions:
|
2022-01-26 11:51:50 -03:00
|
|
|
ns3_program_map[colliding_shortcut] = longest_shortcuts
|
2022-01-26 01:53:28 -03:00
|
|
|
|
|
|
|
|
if programs_dict["ns3_runnable_scripts"]:
|
2023-10-13 20:43:34 -03:00
|
|
|
scratch_scripts = glob.glob(append_to_ns3_path("scratch", "*.py"), recursive=True)
|
2022-01-26 01:53:28 -03:00
|
|
|
programs_dict["ns3_runnable_scripts"].extend(scratch_scripts)
|
|
|
|
|
|
|
|
|
|
for program in programs_dict["ns3_runnable_scripts"]:
|
|
|
|
|
temp_path = program.replace(ns3_path, "").split(os.sep)
|
|
|
|
|
program = program.strip()
|
|
|
|
|
while len(temp_path):
|
|
|
|
|
shortcut_path = os.sep.join(temp_path)
|
2022-01-26 11:51:50 -03:00
|
|
|
ns3_program_map[shortcut_path] = [program]
|
2022-01-26 01:53:28 -03:00
|
|
|
temp_path.pop(0)
|
2021-11-10 22:28:44 -03:00
|
|
|
return ns3_program_map
|
|
|
|
|
|
|
|
|
|
|
2022-01-23 17:30:08 -03:00
|
|
|
def parse_version(version_str):
|
2025-01-23 10:34:45 +01:00
|
|
|
version_str = version_str.split("-")[0]
|
2022-01-23 17:30:08 -03:00
|
|
|
version = version_str.split(".")
|
|
|
|
|
version = tuple(map(int, version))
|
|
|
|
|
return version
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cmake_check_version():
|
2021-12-05 21:53:49 +00:00
|
|
|
# Check CMake version
|
2023-11-04 18:27:00 -03:00
|
|
|
minimum_cmake_version = "3.13.0"
|
2022-12-01 22:01:21 -03:00
|
|
|
cmake3 = shutil.which("cmake3")
|
|
|
|
|
cmake = cmake3 if cmake3 else shutil.which("cmake")
|
2021-12-05 21:53:49 +00:00
|
|
|
if not cmake:
|
2023-10-18 12:26:50 -03:00
|
|
|
print(
|
2023-11-19 20:07:19 -03:00
|
|
|
f"Error: CMake not found; please install version {minimum_cmake_version} or greater, or modify {path_variable}"
|
|
|
|
|
)
|
2022-01-26 11:51:50 -03:00
|
|
|
exit(1)
|
2023-06-14 17:33:52 -03:00
|
|
|
cmake = cmake.replace(".EXE", "").replace(".exe", "") # Trim cmake executable extension
|
2022-01-23 17:30:08 -03:00
|
|
|
cmake_output = subprocess.check_output([cmake, "--version"]).decode("utf-8")
|
|
|
|
|
version = re.findall("version (.*)", cmake_output)[0]
|
2023-10-18 12:26:50 -03:00
|
|
|
if parse_version(version) < parse_version(minimum_cmake_version):
|
2023-11-19 20:07:19 -03:00
|
|
|
print(
|
|
|
|
|
f"Error: CMake found at {cmake} but version {version} is older than {minimum_cmake_version}"
|
|
|
|
|
)
|
2022-01-26 11:51:50 -03:00
|
|
|
exit(1)
|
2022-01-23 17:30:08 -03:00
|
|
|
return cmake, version
|
|
|
|
|
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
def cmake_build(
|
|
|
|
|
current_cmake_cache_folder, output, jobs, target=None, dry_run=False, build_verbose=False
|
|
|
|
|
):
|
2022-12-01 22:01:21 -03:00
|
|
|
cmake, version = cmake_check_version()
|
2021-12-05 21:53:49 +00:00
|
|
|
|
2023-11-04 18:27:48 -03:00
|
|
|
cmake_args = [cmake, "--build", current_cmake_cache_folder]
|
|
|
|
|
if jobs:
|
|
|
|
|
cmake_args.extend(["-j", str(jobs)])
|
|
|
|
|
|
|
|
|
|
if target:
|
|
|
|
|
cmake_args.extend(["--target", target])
|
2021-12-10 02:13:43 +00:00
|
|
|
|
2023-11-04 18:27:48 -03:00
|
|
|
print_and_buffer(" ".join(cmake_args))
|
2021-12-05 21:53:49 +00:00
|
|
|
if not dry_run:
|
2022-03-19 12:19:50 -03:00
|
|
|
# Assume quiet is not enabled, and print things normally
|
2023-11-19 20:07:19 -03:00
|
|
|
kwargs = {"stdout": None, "stderr": None}
|
2022-02-11 22:07:09 -03:00
|
|
|
|
2022-03-02 11:51:53 -03:00
|
|
|
proc_env = os.environ.copy()
|
|
|
|
|
if build_verbose:
|
2022-03-19 12:19:50 -03:00
|
|
|
# If verbose is enabled, we print everything to the terminal
|
|
|
|
|
# and set the environment variable
|
2022-03-02 11:51:53 -03:00
|
|
|
proc_env.update({"VERBOSE": "1"})
|
2022-03-19 12:19:50 -03:00
|
|
|
|
|
|
|
|
if output is not None:
|
|
|
|
|
# If quiet is enabled, we pipe the output of the
|
|
|
|
|
# build and only print in case of failure
|
|
|
|
|
kwargs["stdout"] = subprocess.PIPE
|
|
|
|
|
kwargs["stderr"] = subprocess.PIPE
|
2022-03-02 11:51:53 -03:00
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
ret = subprocess.run(
|
|
|
|
|
cmake_args,
|
|
|
|
|
env=proc_env,
|
|
|
|
|
**kwargs,
|
|
|
|
|
)
|
2022-02-11 22:07:09 -03:00
|
|
|
|
|
|
|
|
# Print errors in case compilation fails and output != None (quiet)
|
|
|
|
|
if ret.returncode != 0 and output is not None:
|
|
|
|
|
print(ret.stdout.decode())
|
|
|
|
|
print(ret.stderr.decode())
|
|
|
|
|
|
2021-12-16 02:29:59 +00:00
|
|
|
# In case of failure, exit prematurely with the return code from the build
|
|
|
|
|
if ret.returncode != 0:
|
|
|
|
|
exit(ret.returncode)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def extract_cmakecache_settings(current_cmake_cache_folder):
|
|
|
|
|
try:
|
2023-11-19 20:07:19 -03:00
|
|
|
with open(
|
|
|
|
|
current_cmake_cache_folder + os.sep + "CMakeCache.txt", "r", encoding="utf-8"
|
|
|
|
|
) as f:
|
2021-11-10 22:28:44 -03:00
|
|
|
contents = f.read()
|
|
|
|
|
except FileNotFoundError as e:
|
|
|
|
|
raise e
|
|
|
|
|
current_settings = re.findall("(NS3_.*):.*=(.*)", contents) # extract NS3 specific settings
|
2023-11-19 20:07:19 -03:00
|
|
|
current_settings.extend(
|
|
|
|
|
re.findall("(CMAKE_BUILD_TYPE):.*=(.*)", contents)
|
|
|
|
|
) # extract build type
|
2021-11-10 22:28:44 -03:00
|
|
|
current_settings.extend(re.findall("(CMAKE_GENERATOR):.*=(.*)", contents)) # extract generator
|
2022-01-31 20:02:10 -03:00
|
|
|
current_settings.extend(re.findall("(CMAKE_CXX_COMPILER):.*=(.*)", contents)) # C++ compiler
|
|
|
|
|
current_settings.extend(re.findall("(CMAKE_CXX_FLAGS):.*=(.*)", contents)) # C++ flags
|
2023-11-19 20:07:19 -03:00
|
|
|
current_settings.extend(
|
|
|
|
|
re.findall("(CMAKE_INSTALL_PREFIX):.*=(.*)", contents)
|
|
|
|
|
) # installation directory
|
2022-01-31 20:02:10 -03:00
|
|
|
|
|
|
|
|
# Transform list into dictionary
|
|
|
|
|
settings_dictionary = dict(current_settings)
|
|
|
|
|
del settings_dictionary["NS3_INT64X64-STRINGS"] # remove cached options or CMake will warn you
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-01-31 20:02:10 -03:00
|
|
|
# Return dictionary with NS3-related CMake settings
|
|
|
|
|
return settings_dictionary
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
2021-12-10 02:13:43 +00:00
|
|
|
def reconfigure_cmake_to_force_refresh(cmake, current_cmake_cache_folder, output, dry_run=False):
|
2021-11-10 22:28:44 -03:00
|
|
|
import json
|
2023-11-19 20:07:19 -03:00
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
settings_bak_file = "settings.json"
|
|
|
|
|
|
|
|
|
|
# Extract settings or recover from the backup
|
|
|
|
|
if not os.path.exists(settings_bak_file):
|
|
|
|
|
settings = extract_cmakecache_settings(current_cmake_cache_folder)
|
|
|
|
|
else:
|
|
|
|
|
with open(settings_bak_file, "r", encoding="utf-8") as f:
|
|
|
|
|
settings = json.load(f)
|
|
|
|
|
|
|
|
|
|
# Delete cache folder and then recreate it
|
2021-12-10 02:13:43 +00:00
|
|
|
cache_path = os.path.relpath(current_cmake_cache_folder, ns3_path)
|
|
|
|
|
print_and_buffer("rm -R %s; mkdir %s" % (cache_path, cache_path))
|
2021-12-05 21:53:49 +00:00
|
|
|
if not dry_run:
|
|
|
|
|
shutil.rmtree(current_cmake_cache_folder)
|
|
|
|
|
os.mkdir(current_cmake_cache_folder)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
# Save settings backup to prevent loss
|
|
|
|
|
with open(settings_bak_file, "w", encoding="utf-8") as f:
|
|
|
|
|
json.dump(settings, f, indent=2)
|
|
|
|
|
|
|
|
|
|
# Reconfigure CMake preserving previous NS3 settings
|
|
|
|
|
cmake_args = [cmake]
|
|
|
|
|
for setting in settings.items():
|
|
|
|
|
if setting[1]:
|
|
|
|
|
cmake_args.append("-D%s=%s" % setting)
|
|
|
|
|
cmake_args.append("..")
|
|
|
|
|
|
|
|
|
|
# Echo out the configure command
|
2023-11-19 20:07:19 -03:00
|
|
|
print_and_buffer(
|
|
|
|
|
"cd %s; %s ; cd %s"
|
|
|
|
|
% (
|
|
|
|
|
os.path.relpath(ns3_path, current_cmake_cache_folder),
|
|
|
|
|
" ".join(cmake_args),
|
|
|
|
|
os.path.relpath(current_cmake_cache_folder, ns3_path),
|
|
|
|
|
)
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
# Call cmake
|
2021-12-05 21:53:49 +00:00
|
|
|
if not dry_run:
|
|
|
|
|
ret = subprocess.run(cmake_args, cwd=current_cmake_cache_folder, stdout=output)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2021-12-05 21:53:49 +00:00
|
|
|
# If it succeeds, delete backup, otherwise raise exception
|
|
|
|
|
if ret.returncode == 0:
|
|
|
|
|
os.remove(settings_bak_file)
|
|
|
|
|
else:
|
2023-11-19 20:07:19 -03:00
|
|
|
raise Exception(
|
|
|
|
|
"Reconfiguring CMake to force refresh failed. "
|
|
|
|
|
"A backup of the settings was saved in %s" % settings_bak_file
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-02-02 23:37:56 +00:00
|
|
|
update_scratches_list(current_cmake_cache_folder)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-02-09 09:25:09 -03:00
|
|
|
|
2021-12-05 21:53:49 +00:00
|
|
|
def get_target_to_build(program_path, ns3_version, build_profile):
|
2022-01-26 01:53:28 -03:00
|
|
|
if ".py" in program_path:
|
|
|
|
|
return None
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
build_profile_suffix = "" if build_profile in ["release"] else "-" + build_profile
|
2022-02-11 22:07:09 -03:00
|
|
|
program_name = ""
|
2022-02-09 09:25:09 -03:00
|
|
|
|
2024-10-25 20:14:50 +02:00
|
|
|
special_run_to_build_targets = {
|
|
|
|
|
"netanim": "netanim_visualizer",
|
|
|
|
|
}
|
|
|
|
|
|
2022-02-09 09:25:09 -03:00
|
|
|
try:
|
2023-11-19 20:07:19 -03:00
|
|
|
program_name = "".join(
|
|
|
|
|
*re.findall("(.*)ns%s-(.*)%s" % (ns3_version, build_profile_suffix), program_path)
|
|
|
|
|
)
|
2022-02-09 09:25:09 -03:00
|
|
|
except TypeError:
|
2024-10-25 20:14:50 +02:00
|
|
|
# This is not your typical ns-3 executable.
|
|
|
|
|
# Maybe some imported target that had a conflicting name, like netanim
|
|
|
|
|
program_name = os.path.basename(program_path)
|
|
|
|
|
if program_name in special_run_to_build_targets:
|
|
|
|
|
return special_run_to_build_targets[program_name]
|
|
|
|
|
# Or maybe it is an error indeed
|
2022-02-09 09:25:09 -03:00
|
|
|
print("Target to build does not exist: %s" % program_path)
|
|
|
|
|
exit(1)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2024-03-01 16:21:15 -06:00
|
|
|
if "scratch" in os.path.relpath(program_path, ns3_path):
|
2021-12-05 21:53:49 +00:00
|
|
|
# Get the path to the program and replace slashes with underlines
|
|
|
|
|
# to get unique targets for CMake, preventing collisions with modules examples
|
2022-01-10 22:36:50 -03:00
|
|
|
return program_name.replace(out_dir, "").replace("/", "_")[1:]
|
2021-12-05 21:53:49 +00:00
|
|
|
else:
|
|
|
|
|
# Other programs just use their normal names (without version prefix and build_profile suffix) as targets
|
|
|
|
|
return program_name.split("/")[-1]
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
def configuration_step(
|
|
|
|
|
current_cmake_cache_folder, current_cmake_generator, args, output, dry_run=False
|
|
|
|
|
):
|
2021-12-10 21:08:23 -03:00
|
|
|
# Search for the CMake binary
|
2022-01-23 17:30:08 -03:00
|
|
|
cmake, _ = cmake_check_version()
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
# If --force-refresh, we load settings from the CMakeCache, delete it, then reconfigure CMake to
|
|
|
|
|
# force refresh cached packages/libraries that were installed/removed, without losing the current settings
|
|
|
|
|
if args.force_refresh:
|
|
|
|
|
reconfigure_cmake_to_force_refresh(cmake, current_cmake_cache_folder, output, dry_run)
|
|
|
|
|
exit(0)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
# Call cmake to configure/reconfigure/refresh the project
|
2023-11-19 20:07:19 -03:00
|
|
|
configure_cmake(
|
|
|
|
|
cmake, args, current_cmake_cache_folder, current_cmake_generator, output, dry_run
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
# If manually configuring, we end the script earlier
|
|
|
|
|
exit(0)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
def build_step(
|
|
|
|
|
args,
|
|
|
|
|
build_and_run,
|
|
|
|
|
target_to_run,
|
|
|
|
|
current_cmake_cache_folder,
|
|
|
|
|
ns3_modules,
|
|
|
|
|
ns3_version,
|
|
|
|
|
build_profile,
|
|
|
|
|
output,
|
|
|
|
|
):
|
2022-03-19 12:19:50 -03:00
|
|
|
# There is one scenario where we build everything: ./ns3 build
|
2022-03-02 11:51:53 -03:00
|
|
|
if "build" in args and len(args.build) == 0:
|
2023-11-19 20:07:19 -03:00
|
|
|
cmake_build(
|
|
|
|
|
current_cmake_cache_folder,
|
|
|
|
|
jobs=args.jobs,
|
|
|
|
|
output=output,
|
|
|
|
|
dry_run=args.dry_run,
|
|
|
|
|
build_verbose=args.verbose,
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
# If we are building specific targets, we build them one by one
|
|
|
|
|
if "build" in args:
|
2023-11-19 20:07:19 -03:00
|
|
|
non_executable_targets = [
|
|
|
|
|
"assemble-introspected-command-line",
|
|
|
|
|
"check-version",
|
|
|
|
|
"cmake-format",
|
|
|
|
|
"cmake-format-check",
|
2024-09-06 17:17:15 +02:00
|
|
|
"coverage_cobertura",
|
2023-11-19 20:07:19 -03:00
|
|
|
"coverage_gcc",
|
2024-09-06 17:17:15 +02:00
|
|
|
"coverage_html",
|
2023-11-19 20:07:19 -03:00
|
|
|
"docs",
|
|
|
|
|
"doxygen",
|
|
|
|
|
"doxygen-no-build",
|
|
|
|
|
"installation",
|
|
|
|
|
"sphinx",
|
|
|
|
|
"manual",
|
|
|
|
|
"models",
|
|
|
|
|
"ninjaTrace",
|
|
|
|
|
"timeTraceReport",
|
|
|
|
|
"tutorial",
|
|
|
|
|
"contributing",
|
|
|
|
|
"install",
|
|
|
|
|
"uninstall",
|
|
|
|
|
]
|
2021-11-10 22:28:44 -03:00
|
|
|
# Build targets in the list
|
|
|
|
|
for target in args.build:
|
|
|
|
|
if target in ns3_modules:
|
2024-06-11 17:51:25 +02:00
|
|
|
pass
|
2021-12-05 21:53:49 +00:00
|
|
|
elif target not in non_executable_targets:
|
|
|
|
|
target = get_target_to_build(target, ns3_version, build_profile)
|
2022-01-08 23:09:57 -03:00
|
|
|
else:
|
|
|
|
|
# Sphinx target should have the sphinx prefix
|
2023-03-17 23:54:05 -03:00
|
|
|
if target in ["contributing", "installation", "manual", "models", "tutorial"]:
|
2022-01-08 23:09:57 -03:00
|
|
|
target = "sphinx_%s" % target
|
|
|
|
|
|
|
|
|
|
# Docs should build both doxygen and sphinx based docs
|
|
|
|
|
if target == "docs":
|
|
|
|
|
target = "sphinx"
|
|
|
|
|
args.build.append("doxygen")
|
|
|
|
|
|
2022-04-23 20:14:38 -03:00
|
|
|
if target == "check-version":
|
|
|
|
|
global run_verbose
|
|
|
|
|
run_verbose = False # Do not print the equivalent cmake command
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
cmake_build(
|
|
|
|
|
current_cmake_cache_folder,
|
|
|
|
|
jobs=args.jobs,
|
|
|
|
|
target=target,
|
|
|
|
|
output=output,
|
|
|
|
|
dry_run=args.dry_run,
|
|
|
|
|
build_verbose=args.verbose,
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
# The remaining case is when we want to build something to run
|
2021-12-10 21:08:23 -03:00
|
|
|
if build_and_run:
|
2023-11-19 20:07:19 -03:00
|
|
|
cmake_build(
|
|
|
|
|
current_cmake_cache_folder,
|
|
|
|
|
jobs=args.jobs,
|
|
|
|
|
target=get_target_to_build(target_to_run, ns3_version, build_profile),
|
|
|
|
|
output=output,
|
|
|
|
|
dry_run=args.dry_run,
|
|
|
|
|
build_verbose=args.verbose,
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
2022-10-14 12:57:45 -03:00
|
|
|
def check_program_installed(program_name: str) -> str:
|
|
|
|
|
program_path = shutil.which(program_name)
|
|
|
|
|
if program_path is None:
|
|
|
|
|
print("Executable '{program}' was not found".format(program=program_name.capitalize()))
|
|
|
|
|
exit(-1)
|
|
|
|
|
return program_path
|
|
|
|
|
|
2022-10-14 12:59:30 -03:00
|
|
|
|
2022-10-14 12:57:45 -03:00
|
|
|
def check_module_installed(module_name: str):
|
|
|
|
|
import importlib
|
2023-11-19 20:07:19 -03:00
|
|
|
|
2022-10-14 12:57:45 -03:00
|
|
|
try:
|
|
|
|
|
importlib.import_module(module_name)
|
|
|
|
|
except ImportError:
|
|
|
|
|
print("Python module '{module}' was not found".format(module=module_name))
|
|
|
|
|
exit(-1)
|
|
|
|
|
|
2022-10-14 12:59:30 -03:00
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
def run_step(args, target_to_run, target_args):
|
|
|
|
|
libdir = "%s/lib" % out_dir
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
custom_env = {
|
|
|
|
|
"PATH": libdir,
|
|
|
|
|
"PYTHONPATH": "%s/bindings/python" % out_dir,
|
|
|
|
|
}
|
2022-01-08 23:09:57 -03:00
|
|
|
if sys.platform != "win32":
|
|
|
|
|
custom_env["LD_LIBRARY_PATH"] = libdir
|
2021-12-10 21:08:23 -03:00
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
proc_env = os.environ.copy()
|
2023-11-19 20:07:19 -03:00
|
|
|
for key, value in custom_env.items():
|
2022-01-08 23:09:57 -03:00
|
|
|
if key in proc_env:
|
|
|
|
|
proc_env[key] += path_sep + value
|
|
|
|
|
else:
|
|
|
|
|
proc_env[key] = value
|
2021-12-10 21:08:23 -03:00
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
debugging_software = []
|
|
|
|
|
working_dir = ns3_path
|
|
|
|
|
use_shell = False
|
2022-01-10 22:36:50 -03:00
|
|
|
target_args += args.program_args
|
2021-12-10 21:08:23 -03:00
|
|
|
|
2022-03-02 11:51:53 -03:00
|
|
|
if args.shell:
|
2022-01-08 23:09:57 -03:00
|
|
|
target_to_run = "bash"
|
|
|
|
|
use_shell = True
|
|
|
|
|
else:
|
2022-01-26 01:53:28 -03:00
|
|
|
# running a python script?
|
|
|
|
|
if ".py" in target_to_run:
|
|
|
|
|
target_args = [target_to_run] + target_args
|
|
|
|
|
target_to_run = "python3"
|
|
|
|
|
|
2022-10-14 12:59:30 -03:00
|
|
|
# running with memray?
|
|
|
|
|
if args.memray:
|
|
|
|
|
check_module_installed("memray")
|
2023-11-19 20:07:19 -03:00
|
|
|
target_args = [
|
|
|
|
|
"-m",
|
|
|
|
|
"memray",
|
|
|
|
|
"run",
|
|
|
|
|
"-o",
|
|
|
|
|
"memray.output",
|
|
|
|
|
"--native",
|
|
|
|
|
] + target_args
|
2022-10-14 12:59:30 -03:00
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
# running from ns-3-dev (ns3_path) or cwd
|
|
|
|
|
if args.cwd:
|
|
|
|
|
working_dir = args.cwd
|
|
|
|
|
|
2022-10-14 12:59:30 -03:00
|
|
|
# running with heaptrack?
|
|
|
|
|
if args.heaptrack:
|
|
|
|
|
debugging_software.append(check_program_installed("heaptrack"))
|
|
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
# running valgrind?
|
|
|
|
|
if args.valgrind:
|
2022-10-14 12:59:30 -03:00
|
|
|
debugging_software.extend(
|
2023-11-19 20:07:19 -03:00
|
|
|
[check_program_installed("valgrind"), "--leak-check=full", "--show-leak-kinds=all"]
|
|
|
|
|
)
|
2024-02-23 23:12:29 +01:00
|
|
|
suppressions_file = os.path.join(ns3_path, ".ns3.supp")
|
|
|
|
|
if os.path.exists(suppressions_file):
|
|
|
|
|
debugging_software.append(f"--suppressions={suppressions_file}")
|
2022-01-08 23:09:57 -03:00
|
|
|
|
|
|
|
|
# running gdb?
|
|
|
|
|
if args.gdb:
|
2022-09-23 23:37:14 -03:00
|
|
|
gdb_eval_command = []
|
|
|
|
|
if os.getenv("gdb_eval"):
|
|
|
|
|
gdb_eval_command.append("--eval-command=quit")
|
2022-10-14 12:57:45 -03:00
|
|
|
debugging_software.extend([check_program_installed("gdb"), *gdb_eval_command, "--args"])
|
2022-01-08 23:09:57 -03:00
|
|
|
|
2022-02-06 16:56:08 -03:00
|
|
|
# running lldb?
|
|
|
|
|
if args.lldb:
|
2022-10-14 12:57:45 -03:00
|
|
|
debugging_software.extend([check_program_installed("lldb"), "--"])
|
2022-02-06 16:56:08 -03:00
|
|
|
|
2022-10-14 12:59:30 -03:00
|
|
|
# running with perf?
|
|
|
|
|
if args.perf:
|
2023-11-19 20:07:19 -03:00
|
|
|
debugging_software.extend(
|
|
|
|
|
[
|
|
|
|
|
check_program_installed("perf"),
|
|
|
|
|
"record",
|
|
|
|
|
"--call-graph",
|
|
|
|
|
"dwarf",
|
|
|
|
|
"-e",
|
|
|
|
|
"cache-misses,branch-misses,cpu-cycles,stalled-cycles-frontend,stalled-cycles-backend",
|
|
|
|
|
]
|
|
|
|
|
)
|
2022-02-06 16:56:08 -03:00
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
# running with the visualizer?
|
|
|
|
|
if args.visualize:
|
|
|
|
|
target_args.append("--SimulatorImplementationType=ns3::VisualSimulatorImpl")
|
|
|
|
|
|
|
|
|
|
# running with command template?
|
|
|
|
|
if args.command_template:
|
|
|
|
|
commands = (args.command_template % target_to_run).split()
|
2022-10-23 20:48:11 -03:00
|
|
|
check_program_installed(commands[0])
|
2022-01-08 23:09:57 -03:00
|
|
|
target_to_run = commands[0]
|
|
|
|
|
target_args = commands[1:] + target_args
|
2021-12-10 21:08:23 -03:00
|
|
|
|
2022-01-23 17:30:08 -03:00
|
|
|
# running mpi on the CI?
|
|
|
|
|
if target_to_run in ["mpiexec", "mpirun"] and os.getenv("MPI_CI"):
|
|
|
|
|
if shutil.which("ompi_info"):
|
2025-05-07 11:21:10 +02:00
|
|
|
try:
|
2025-10-13 23:34:52 +02:00
|
|
|
subprocess.check_call(
|
|
|
|
|
[target_to_run, "--allow-run-as-root", "cmake", "--version"],
|
|
|
|
|
stdout=subprocess.DEVNULL,
|
|
|
|
|
stderr=subprocess.DEVNULL,
|
|
|
|
|
)
|
2025-05-07 11:21:10 +02:00
|
|
|
target_args = ["--allow-run-as-root"] + target_args
|
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
|
pass
|
2022-01-23 17:30:08 -03:00
|
|
|
target_args = ["--oversubscribe"] + target_args
|
|
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
program_arguments = [*debugging_software, target_to_run, *target_args]
|
|
|
|
|
|
2022-03-02 11:51:53 -03:00
|
|
|
if run_verbose or args.dry_run:
|
2021-12-10 21:08:23 -03:00
|
|
|
exported_variables = "export "
|
2023-11-19 20:07:19 -03:00
|
|
|
for variable, value in custom_env.items():
|
2021-12-10 21:08:23 -03:00
|
|
|
if variable == "PATH":
|
2022-09-23 23:37:14 -03:00
|
|
|
value = path_variable + path_sep + libdir
|
2021-12-10 21:08:23 -03:00
|
|
|
exported_variables += "%s=%s " % (variable, value)
|
2023-11-19 20:07:19 -03:00
|
|
|
print_and_buffer(
|
|
|
|
|
"cd %s; %s; %s"
|
|
|
|
|
% (
|
|
|
|
|
os.path.relpath(ns3_path, working_dir),
|
|
|
|
|
exported_variables,
|
|
|
|
|
" ".join(program_arguments),
|
|
|
|
|
)
|
|
|
|
|
)
|
2021-12-10 21:08:23 -03:00
|
|
|
|
|
|
|
|
if not args.dry_run:
|
|
|
|
|
try:
|
2023-11-19 20:07:19 -03:00
|
|
|
subprocess.run(
|
|
|
|
|
program_arguments, env=proc_env, cwd=working_dir, shell=use_shell, check=True
|
|
|
|
|
)
|
2022-03-19 12:19:50 -03:00
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
|
# Replace list of arguments with a single string
|
|
|
|
|
e.cmd = " ".join(e.cmd)
|
2022-10-14 12:59:30 -03:00
|
|
|
# Replace full path to binary to relative path
|
2023-11-19 20:07:19 -03:00
|
|
|
e.cmd = e.cmd.replace(
|
|
|
|
|
os.path.abspath(target_to_run), os.path.relpath(target_to_run, ns3_path)
|
|
|
|
|
)
|
2022-03-19 12:19:50 -03:00
|
|
|
# Print error message and forward the return code
|
|
|
|
|
print(e)
|
|
|
|
|
exit(e.returncode)
|
2021-12-10 21:08:23 -03:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
|
print("Process was interrupted by the user")
|
|
|
|
|
|
2022-03-19 12:19:50 -03:00
|
|
|
# Exit normally
|
|
|
|
|
exit(0)
|
|
|
|
|
|
2021-12-10 21:08:23 -03:00
|
|
|
|
2022-07-10 18:56:31 -03:00
|
|
|
def non_ambiguous_program_target_list(programs: dict) -> list:
|
|
|
|
|
# Assembles a dictionary of all the possible shortcuts a program have
|
|
|
|
|
list_of_shortcuts = {}
|
2023-11-19 20:07:19 -03:00
|
|
|
for shortcut, possible_programs in programs.items():
|
2022-07-10 18:56:31 -03:00
|
|
|
if len(possible_programs) == 1:
|
|
|
|
|
if possible_programs[0] not in list_of_shortcuts:
|
|
|
|
|
list_of_shortcuts[possible_programs[0]] = [shortcut]
|
|
|
|
|
else:
|
|
|
|
|
list_of_shortcuts[possible_programs[0]].append(shortcut)
|
|
|
|
|
# Select the shortest non-ambiguous shortcut for each program
|
|
|
|
|
final_list = []
|
|
|
|
|
for shortcuts in list_of_shortcuts.values():
|
|
|
|
|
final_list.append(sorted(shortcuts, key=lambda x: len(x))[0])
|
|
|
|
|
return final_list
|
|
|
|
|
|
|
|
|
|
|
2022-06-10 16:31:21 -03:00
|
|
|
def print_targets_list(ns3_modules: list, ns3_programs: dict) -> None:
|
2022-10-14 12:59:30 -03:00
|
|
|
def list_to_table(targets_list: list) -> str:
|
2022-06-10 16:31:21 -03:00
|
|
|
# Set column width and check how much is space is left at the end
|
|
|
|
|
columnwidth = 30
|
2025-06-04 17:06:22 +02:00
|
|
|
dead_space = TERMINAL_WIDTH % columnwidth
|
2022-06-10 16:31:21 -03:00
|
|
|
|
|
|
|
|
# Filter the targets with names longer than the column width
|
2022-10-14 12:59:30 -03:00
|
|
|
large_items = list(filter(lambda x: len(x) >= columnwidth, targets_list))
|
2022-06-10 16:31:21 -03:00
|
|
|
|
|
|
|
|
# Then filter the targets with names shorter than the column width
|
2022-10-14 12:59:30 -03:00
|
|
|
small_items = sorted(list(set(targets_list) - set(large_items)))
|
2022-06-10 16:31:21 -03:00
|
|
|
|
|
|
|
|
prev_new_line = 0
|
|
|
|
|
output = "\n"
|
|
|
|
|
for item in small_items:
|
|
|
|
|
current_end = len(output)
|
|
|
|
|
# If the terminal width minus the written columns is smaller than the dead space,
|
|
|
|
|
# we add a new line and start counting the width of the new line from it
|
2025-06-04 17:06:22 +02:00
|
|
|
if TERMINAL_WIDTH - (current_end - prev_new_line) < dead_space:
|
2022-06-10 16:31:21 -03:00
|
|
|
prev_new_line = len(output)
|
|
|
|
|
output += "\n"
|
|
|
|
|
# After the new line or space, add the item plus some spacing
|
|
|
|
|
output += item + " " * (columnwidth - len(item))
|
|
|
|
|
# Add a new line in case we did not fill all the columns
|
|
|
|
|
# of the last small item line
|
|
|
|
|
if len(output) - prev_new_line > 0:
|
|
|
|
|
output += "\n"
|
|
|
|
|
|
|
|
|
|
# The list of large items is printed next
|
|
|
|
|
for large_item in sorted(large_items):
|
|
|
|
|
output += large_item + "\n"
|
|
|
|
|
return output
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
print(
|
|
|
|
|
"""Buildable targets:{buildables}\n\nRunnable/Buildable targets:{runnables}
|
|
|
|
|
""".format(
|
|
|
|
|
buildables=list_to_table(sorted(ns3_modules)),
|
|
|
|
|
runnables=list_to_table(non_ambiguous_program_target_list(ns3_programs)),
|
|
|
|
|
)
|
|
|
|
|
)
|
2022-06-10 16:31:21 -03:00
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
2022-07-10 18:56:31 -03:00
|
|
|
def show_profile(build_profile, exit_early=True):
|
|
|
|
|
if build_profile:
|
|
|
|
|
print("Build profile: %s" % build_profile)
|
|
|
|
|
else:
|
|
|
|
|
project_not_configured()
|
|
|
|
|
|
|
|
|
|
if exit_early:
|
|
|
|
|
exit(0)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def show_build_version(build_version_string, exit_early=True):
|
|
|
|
|
if build_version_string is None:
|
|
|
|
|
project_not_configured()
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
if (
|
|
|
|
|
build_version_string == ""
|
|
|
|
|
and shutil.which("git")
|
|
|
|
|
and os.path.exists(append_to_ns3_path(".git"))
|
|
|
|
|
):
|
2022-07-10 18:56:31 -03:00
|
|
|
try:
|
2023-11-19 20:07:19 -03:00
|
|
|
build_version_string = subprocess.check_output(
|
|
|
|
|
["git", "describe", "--dirty"], cwd=ns3_path
|
|
|
|
|
).decode()
|
|
|
|
|
build_version_string += (
|
|
|
|
|
"Reconfigure with './ns3 configure --enable-build-version' "
|
|
|
|
|
"to bake the version into the libraries."
|
|
|
|
|
)
|
2022-07-10 18:56:31 -03:00
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
|
pass
|
|
|
|
|
|
2023-11-19 20:07:19 -03:00
|
|
|
if build_version_string == "":
|
|
|
|
|
print(
|
|
|
|
|
"Build version feature disabled. Reconfigure ns-3 with ./ns3 configure --enable-build-version"
|
|
|
|
|
)
|
2022-07-10 18:56:31 -03:00
|
|
|
else:
|
|
|
|
|
print("ns-3 version: %s" % build_version_string)
|
|
|
|
|
|
|
|
|
|
if exit_early:
|
|
|
|
|
exit(0)
|
|
|
|
|
|
|
|
|
|
|
2024-12-03 16:59:49 +01:00
|
|
|
def compile_or_die(base_commit, head_commit):
|
|
|
|
|
try:
|
|
|
|
|
import git.exc
|
|
|
|
|
from git import Repo
|
|
|
|
|
except ImportError:
|
|
|
|
|
raise Exception("Missing pip package 'GitPython'.")
|
|
|
|
|
|
|
|
|
|
if shutil.which("git") is None:
|
|
|
|
|
raise Exception("Missing program 'git'.")
|
|
|
|
|
|
|
|
|
|
# Load ns-3 and module git repositories
|
|
|
|
|
NS3_DIR = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
|
SRC_DIR = os.path.join(NS3_DIR, "src")
|
|
|
|
|
CONTRIB_DIR = os.path.join(NS3_DIR, "contrib")
|
|
|
|
|
git_repos_dirs = [NS3_DIR]
|
|
|
|
|
git_repos_dirs.extend(map(lambda x: os.path.join(SRC_DIR, x), os.listdir(SRC_DIR)))
|
|
|
|
|
git_repos_dirs.extend(map(lambda x: os.path.join(CONTRIB_DIR, x), os.listdir(CONTRIB_DIR)))
|
|
|
|
|
git_repos_dirs = list(filter(lambda x: os.path.isdir(x), git_repos_dirs))
|
|
|
|
|
git_repos_dirs = list(filter(lambda x: os.path.exists(os.path.join(x, ".git")), git_repos_dirs))
|
|
|
|
|
git_repos = []
|
|
|
|
|
for repo_dir in git_repos_dirs:
|
|
|
|
|
try:
|
|
|
|
|
git_repos.append(Repo(repo_dir))
|
|
|
|
|
except Exception as e:
|
|
|
|
|
raise Exception(f"Failed to load git repository in {repo_dir}: {e}")
|
|
|
|
|
|
|
|
|
|
# Check which particular repo we are testing (contain both base and top commits)
|
|
|
|
|
tested_repo = None
|
|
|
|
|
commits = None
|
|
|
|
|
for git_repo in git_repos:
|
|
|
|
|
commits = list(git_repo.iter_commits(all=True))
|
|
|
|
|
base_commit_object = list(filter(lambda x: x.hexsha == base_commit, commits))
|
|
|
|
|
head_commit_object = list(filter(lambda x: x.hexsha == head_commit, commits))
|
|
|
|
|
if base_commit_object and head_commit_object:
|
|
|
|
|
tested_repo = git_repo
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
if not tested_repo:
|
|
|
|
|
raise Exception("Base and head commits were not found in any of the git repositories")
|
|
|
|
|
|
|
|
|
|
# Filter commits we want to test
|
|
|
|
|
commits_sha = list(map(lambda x: x.hexsha, commits))
|
|
|
|
|
commits = commits[commits_sha.index(head_commit) : commits_sha.index(base_commit) + 1]
|
|
|
|
|
|
|
|
|
|
# Check if there are uncommitted changes, that will be lost if we proceed
|
|
|
|
|
diff = tested_repo.git.diff()
|
|
|
|
|
if diff:
|
|
|
|
|
print(
|
|
|
|
|
"Interrupted compile-or-die testing to prevent data loss."
|
|
|
|
|
"Uncommitted changes were found. Commit them or remove them before proceeding."
|
|
|
|
|
)
|
|
|
|
|
exit(-1)
|
|
|
|
|
|
|
|
|
|
# Check if head commit has a branch attached to it
|
|
|
|
|
branches_with_head_commit = list(
|
|
|
|
|
filter(
|
|
|
|
|
lambda x: x.commit.hexsha == head_commit and x.name != "compileOrDieTest",
|
|
|
|
|
list(tested_repo.branches),
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
if not branches_with_head_commit:
|
|
|
|
|
# If not, then create one (compileOrDieBackup), otherwise we could lose data
|
|
|
|
|
compile_or_die_backup_branch = list(
|
|
|
|
|
filter(lambda x: x.name == "compileOrDieBackup", list(tested_repo.branches))
|
|
|
|
|
)
|
|
|
|
|
if not compile_or_die_backup_branch:
|
|
|
|
|
# If the backup branch does not exist, we can safely create it
|
|
|
|
|
tested_repo.create_head("compileOrDieBackup", commits[0]).checkout()
|
|
|
|
|
else:
|
|
|
|
|
# If the backup branch already exist, we actually need to check if there
|
|
|
|
|
# is another branch attached to it, otherwise we could lose data
|
|
|
|
|
branches_of_backup = list(
|
|
|
|
|
filter(
|
|
|
|
|
lambda x: x.commit.hexsha == compile_or_die_backup_branch.commit.hexsha,
|
|
|
|
|
list(tested_repo.branches),
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
if len(branches_of_backup) == 1:
|
|
|
|
|
print(
|
|
|
|
|
"Interrupted compile-or-die testing to prevent data loss."
|
|
|
|
|
"Make sure the head commit of the compileOrDieBackup branch has a second branch attached to it."
|
|
|
|
|
)
|
|
|
|
|
exit(-1)
|
|
|
|
|
compile_or_die_backup_branch[0].set_commit(commits[0])
|
|
|
|
|
compile_or_die_backup_branch[0].checkout()
|
|
|
|
|
compile_or_die_backup_branch[0].repo.git.reset("--hard")
|
|
|
|
|
|
|
|
|
|
# Checkout the test branch
|
|
|
|
|
compile_or_die_test_branch = tested_repo.create_head("compileOrDieTest")
|
|
|
|
|
|
|
|
|
|
# Reset the test branch to a specific commit, then hard reset it
|
|
|
|
|
# From oldest to newest
|
|
|
|
|
commits = list(reversed(commits))
|
|
|
|
|
print(f"Compile-or-die with commits: {list(map(lambda x: x.hexsha, commits))}")
|
|
|
|
|
for commit in commits:
|
|
|
|
|
print(f"\tTesting commit {commit.hexsha}")
|
|
|
|
|
compile_or_die_test_branch.set_commit(commit)
|
|
|
|
|
compile_or_die_test_branch.checkout()
|
|
|
|
|
compile_or_die_test_branch.repo.git.reset("--hard")
|
|
|
|
|
try:
|
|
|
|
|
ret = subprocess.run(
|
|
|
|
|
[sys.executable, "./test.py", "--verbose-failed"],
|
|
|
|
|
cwd=ns3_path,
|
|
|
|
|
capture_output=True,
|
|
|
|
|
shell=False,
|
|
|
|
|
)
|
|
|
|
|
if ret.returncode != 0:
|
|
|
|
|
print("\t\t" + ret.stdout.decode().replace("\n", "\n\t\t"))
|
|
|
|
|
except Exception as e:
|
|
|
|
|
tested_repo.checkout("compileOrDieBackup") # Revert to head commit
|
|
|
|
|
print(f"Failed compile-or-die testing in commit {commit}")
|
|
|
|
|
exit(1)
|
|
|
|
|
exit(0)
|
|
|
|
|
|
|
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
# Debugging this with PyCharm is a no no. It refuses to work hanging indefinitely
|
|
|
|
|
def sudo_command(command: list, password: str):
|
|
|
|
|
# Run command and feed the sudo password
|
2023-11-19 20:07:19 -03:00
|
|
|
proc = subprocess.Popen(
|
|
|
|
|
["sudo", "-S", *command],
|
|
|
|
|
stdin=subprocess.PIPE,
|
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
|
).communicate(input=password.encode() + b"\n")
|
2022-01-08 23:09:57 -03:00
|
|
|
stdout, stderr = proc[0].decode(), proc[1].decode()
|
|
|
|
|
|
|
|
|
|
# Clean sudo password after each command
|
2023-11-19 20:07:19 -03:00
|
|
|
subprocess.Popen(
|
|
|
|
|
["sudo", "-k"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
|
|
|
|
).communicate()
|
2022-01-08 23:09:57 -03:00
|
|
|
|
|
|
|
|
# Check if the password is wrong
|
|
|
|
|
if "try again" in stderr:
|
|
|
|
|
raise Exception("Incorrect sudo password")
|
|
|
|
|
|
|
|
|
|
return stdout, stderr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sudo_step(args, target_to_run, configure_post_build: set):
|
|
|
|
|
# Check if sudo exists
|
|
|
|
|
sudo = shutil.which("sudo")
|
|
|
|
|
if not sudo:
|
|
|
|
|
raise Exception("Sudo is required by --enable-sudo, but it was not found")
|
|
|
|
|
|
|
|
|
|
# We do this for specified targets if --enable-sudo was set in the run sub-parser
|
2022-03-19 12:19:50 -03:00
|
|
|
# And to all executables if set in the 'configure' sub-parser
|
2022-01-08 23:09:57 -03:00
|
|
|
targets_to_sudo = configure_post_build
|
|
|
|
|
if target_to_run:
|
|
|
|
|
targets_to_sudo.add(target_to_run)
|
|
|
|
|
|
|
|
|
|
password = os.getenv("SUDO_PASSWORD", None)
|
|
|
|
|
if not args.dry_run:
|
|
|
|
|
if password is None:
|
|
|
|
|
from getpass import getpass
|
2023-11-19 20:07:19 -03:00
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
password = getpass(prompt="Sudo password:")
|
|
|
|
|
|
|
|
|
|
import stat
|
2023-11-19 20:07:19 -03:00
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
for target in targets_to_sudo:
|
|
|
|
|
# Check if the file was already built
|
|
|
|
|
if not os.path.exists(target):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Check if we need to set anything
|
|
|
|
|
fstat = os.stat(target)
|
|
|
|
|
if (fstat.st_mode & stat.S_ISUID) == stat.S_ISUID:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Log commands
|
|
|
|
|
relative_path_to_target = os.path.relpath(target, ns3_path)
|
|
|
|
|
chown_command = "chown root {}".format(relative_path_to_target)
|
|
|
|
|
chmod_command = "chmod u+s {}".format(relative_path_to_target)
|
|
|
|
|
print_and_buffer("; ".join([chown_command, chmod_command]))
|
|
|
|
|
|
|
|
|
|
# Change permissions
|
|
|
|
|
if not args.dry_run:
|
|
|
|
|
out, err = sudo_command(chown_command.split(), password)
|
|
|
|
|
if len(out) > 0:
|
|
|
|
|
raise Exception("Failed to chown: ", relative_path_to_target)
|
|
|
|
|
|
|
|
|
|
out, err = sudo_command(chmod_command.split(), password)
|
|
|
|
|
if len(out) > 0:
|
|
|
|
|
raise Exception("Failed to chmod: ", relative_path_to_target)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def refuse_run_as_root():
|
|
|
|
|
# Check if the user is root and refuse to run
|
|
|
|
|
username = os.getenv("USER", "")
|
|
|
|
|
if username == "root":
|
2023-11-19 20:07:19 -03:00
|
|
|
raise Exception(
|
|
|
|
|
"Refusing to run as root. --enable-sudo will request your password when needed"
|
|
|
|
|
)
|
2022-01-08 23:09:57 -03:00
|
|
|
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
def main():
|
2022-03-02 11:51:53 -03:00
|
|
|
global out_dir, run_verbose
|
2021-12-10 02:13:43 +00:00
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
# Refuse to run with sudo
|
|
|
|
|
refuse_run_as_root()
|
|
|
|
|
|
2022-03-19 12:57:05 -03:00
|
|
|
# Enable colorized output for CMake and GCC/Clang
|
2022-12-09 19:39:02 -03:00
|
|
|
if os.getenv("CLICOLOR") is None:
|
|
|
|
|
os.environ["CLICOLOR"] = "1"
|
2022-03-19 12:57:05 -03:00
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
# Parse arguments
|
|
|
|
|
args = parse_args(sys.argv[1:])
|
2021-12-10 02:13:43 +00:00
|
|
|
atexit.register(exit_handler, dry_run=args.dry_run)
|
2022-01-08 23:09:57 -03:00
|
|
|
output = subprocess.DEVNULL if args.quiet else None
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
# no arguments were passed, so can't possibly be reconfiguring anything, then we refresh and rebuild
|
|
|
|
|
if len(sys.argv) == 1:
|
|
|
|
|
args.build = []
|
|
|
|
|
|
2021-12-05 21:53:49 +00:00
|
|
|
# Read contents from lock (output directory is important)
|
|
|
|
|
if os.path.exists(lock_file):
|
|
|
|
|
exec(open(lock_file).read(), globals())
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
# Clean project if needed
|
2022-01-08 23:09:57 -03:00
|
|
|
if args.clean:
|
2021-12-10 02:13:43 +00:00
|
|
|
clean_cmake_artifacts(dry_run=args.dry_run)
|
2021-11-10 22:28:44 -03:00
|
|
|
# We end things earlier when cleaning
|
|
|
|
|
return
|
|
|
|
|
|
2022-12-09 00:35:53 -03:00
|
|
|
if args.distclean:
|
|
|
|
|
clean_cmake_artifacts(dry_run=args.dry_run)
|
|
|
|
|
clean_docs_and_tests_artifacts(dry_run=args.dry_run)
|
2023-01-31 21:56:51 -03:00
|
|
|
clean_pip_packaging_artifacts(dry_run=args.dry_run)
|
2023-07-27 00:44:50 -03:00
|
|
|
clean_vcpkg_artifacts(dry_run=args.dry_run)
|
2022-12-09 00:35:53 -03:00
|
|
|
# We end things earlier when cleaning
|
2021-11-10 22:28:44 -03:00
|
|
|
return
|
|
|
|
|
|
2021-12-10 02:13:43 +00:00
|
|
|
# Installation and uninstallation options become cmake targets
|
2022-01-08 23:09:57 -03:00
|
|
|
if args.install:
|
2023-11-19 20:07:19 -03:00
|
|
|
args.build = ["install"]
|
2022-01-08 23:09:57 -03:00
|
|
|
if args.uninstall:
|
2023-11-19 20:07:19 -03:00
|
|
|
args.build = ["uninstall"]
|
2021-12-10 02:13:43 +00:00
|
|
|
|
2022-01-26 11:51:50 -03:00
|
|
|
# Get build profile and other settings
|
2022-01-31 20:02:10 -03:00
|
|
|
build_info, ns3_modules = check_lock_data(out_dir)
|
|
|
|
|
build_profile = build_info["BUILD_PROFILE"]
|
2022-07-10 18:56:31 -03:00
|
|
|
build_version_string = build_info["BUILD_VERSION_STRING"]
|
2022-01-31 20:02:10 -03:00
|
|
|
enable_sudo = build_info["ENABLE_SUDO"]
|
|
|
|
|
ns3_version = build_info["VERSION"]
|
2022-01-26 11:51:50 -03:00
|
|
|
|
2022-03-19 12:19:50 -03:00
|
|
|
# Docs subparser options become cmake targets
|
2022-01-26 11:51:50 -03:00
|
|
|
if args.docs:
|
|
|
|
|
args.build = [args.docs] if args.docs != "all" else ["sphinx", "doxygen"]
|
2023-11-19 20:07:19 -03:00
|
|
|
if "doxygen" in args.build and (
|
|
|
|
|
not build_info["ENABLE_EXAMPLES"] or not build_info["ENABLE_TESTS"]
|
|
|
|
|
):
|
|
|
|
|
print(
|
|
|
|
|
'The "./ns3 docs doxygen" and "./ns3 docs all" commands,\n'
|
|
|
|
|
"requires examples and tests to generate introspected documentation.\n"
|
|
|
|
|
'Try "./ns3 docs doxygen-no-build" or enable examples and tests.'
|
|
|
|
|
)
|
2022-01-26 11:51:50 -03:00
|
|
|
exit(1)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-03-02 11:51:53 -03:00
|
|
|
if args.show == "profile":
|
2022-07-10 18:56:31 -03:00
|
|
|
show_profile(build_profile)
|
2022-01-10 22:36:50 -03:00
|
|
|
|
2022-03-02 11:51:53 -03:00
|
|
|
if args.show == "version":
|
2022-07-10 18:56:31 -03:00
|
|
|
show_build_version(build_version_string)
|
2022-01-10 22:36:50 -03:00
|
|
|
|
2024-10-09 13:15:00 -10:00
|
|
|
if args.show == "typeid":
|
|
|
|
|
sys.argv = [
|
|
|
|
|
"ns3",
|
|
|
|
|
"run",
|
|
|
|
|
"print-introspected-doxygen",
|
|
|
|
|
"--",
|
|
|
|
|
f"--TypeId={args.tid}",
|
|
|
|
|
]
|
|
|
|
|
args = parse_args(sys.argv[1:])
|
|
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
# Check if running something or reconfiguring ns-3
|
2022-01-08 23:09:57 -03:00
|
|
|
run_only = False
|
|
|
|
|
build_and_run = False
|
|
|
|
|
if args.run:
|
2022-03-19 12:19:50 -03:00
|
|
|
# Only print "Finished running..." if verbose is set
|
|
|
|
|
run_verbose = not (args.run_verbose is not True)
|
2022-01-23 17:30:08 -03:00
|
|
|
|
|
|
|
|
# Check whether we are only running or we need to build first
|
2022-03-19 12:19:50 -03:00
|
|
|
if args.no_build:
|
2022-01-08 23:09:57 -03:00
|
|
|
run_only = True
|
2022-03-19 12:19:50 -03:00
|
|
|
else:
|
|
|
|
|
build_and_run = True
|
2021-11-10 22:28:44 -03:00
|
|
|
target_to_run = None
|
2021-12-10 02:13:43 +00:00
|
|
|
target_args = []
|
|
|
|
|
current_cmake_cache_folder = None
|
2022-03-02 11:51:53 -03:00
|
|
|
if run_only or build_and_run:
|
2022-01-08 23:09:57 -03:00
|
|
|
target_to_run = args.run
|
2021-11-10 22:28:44 -03:00
|
|
|
if len(target_to_run) > 0:
|
2021-12-05 21:53:49 +00:00
|
|
|
# While testing a weird case appeared where the target to run is between quotes,
|
|
|
|
|
# so we remove in case they exist
|
2023-11-19 20:07:19 -03:00
|
|
|
if target_to_run[0] in ['"', "'"] and target_to_run[-1] in ['"', "'"]:
|
2021-12-05 21:53:49 +00:00
|
|
|
target_to_run = target_to_run[1:-1]
|
2021-11-10 22:28:44 -03:00
|
|
|
target_to_run = target_to_run.split()
|
|
|
|
|
target_to_run, target_args = target_to_run[0], target_to_run[1:]
|
|
|
|
|
else:
|
|
|
|
|
raise Exception("You need to specify a program to run")
|
|
|
|
|
|
2021-12-05 23:59:55 -03:00
|
|
|
if not run_only:
|
|
|
|
|
# Get current CMake cache folder and CMake generator (used when reconfiguring)
|
|
|
|
|
current_cmake_cache_folder, current_cmake_generator = search_cmake_cache(build_profile)
|
|
|
|
|
|
2022-03-02 11:51:53 -03:00
|
|
|
if args.show == "config":
|
2021-12-05 23:59:55 -03:00
|
|
|
check_config(current_cmake_cache_folder)
|
|
|
|
|
# We end things earlier if only checking the current project configuration
|
2022-07-10 18:56:31 -03:00
|
|
|
exit(0)
|
2021-12-05 23:59:55 -03:00
|
|
|
|
2022-03-19 12:19:50 -03:00
|
|
|
# Check for changes in scratch sources and trigger a reconfiguration if sources changed
|
2022-02-02 23:37:56 +00:00
|
|
|
if current_cmake_cache_folder:
|
2023-11-19 20:07:19 -03:00
|
|
|
current_scratch_sources = glob.glob(
|
|
|
|
|
append_to_ns3_path("scratch", "**", "*.cc"), recursive=True
|
|
|
|
|
)
|
2022-02-02 23:37:56 +00:00
|
|
|
scratches_file = os.path.join(current_cmake_cache_folder, "ns3scratches")
|
|
|
|
|
if os.path.exists(scratches_file):
|
|
|
|
|
with open(scratches_file, "r") as f:
|
2023-11-19 20:07:19 -03:00
|
|
|
previous_scratches_sources = f.read().split("\n")
|
2022-02-02 23:37:56 +00:00
|
|
|
if previous_scratches_sources != current_scratch_sources:
|
|
|
|
|
refresh_cmake(current_cmake_cache_folder, output)
|
|
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
if args.configure:
|
2023-11-19 20:07:19 -03:00
|
|
|
configuration_step(
|
|
|
|
|
current_cmake_cache_folder,
|
|
|
|
|
current_cmake_generator,
|
|
|
|
|
args,
|
|
|
|
|
output,
|
|
|
|
|
args.dry_run,
|
|
|
|
|
)
|
2021-12-10 21:08:23 -03:00
|
|
|
|
|
|
|
|
if not project_configured(current_cmake_cache_folder):
|
2022-01-10 22:36:50 -03:00
|
|
|
project_not_configured()
|
|
|
|
|
|
|
|
|
|
if ns3_modules is None:
|
|
|
|
|
project_not_configured()
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2024-12-03 16:59:49 +01:00
|
|
|
# Entry point for compile-or-die
|
|
|
|
|
if args.compile_or_die[0] and args.compile_or_die[1]:
|
|
|
|
|
compile_or_die(*args.compile_or_die)
|
|
|
|
|
|
2022-01-31 20:02:10 -03:00
|
|
|
# We could also replace the "ns3-" prefix used in .lock-ns3 with the "lib" prefix currently used in cmake
|
2021-11-10 22:28:44 -03:00
|
|
|
ns3_modules = [module.replace("ns3-", "") for module in ns3_modules]
|
|
|
|
|
|
2022-01-31 20:02:10 -03:00
|
|
|
# Now that CMake is configured, we can look for c++ targets in .lock-ns3
|
2021-12-05 21:53:49 +00:00
|
|
|
ns3_programs = get_program_shortcuts(build_profile, ns3_version)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-06-27 17:05:29 -03:00
|
|
|
if args.show == "targets":
|
2022-06-10 16:31:21 -03:00
|
|
|
print_targets_list(ns3_modules, ns3_programs)
|
|
|
|
|
exit(0)
|
|
|
|
|
|
2022-07-10 18:56:31 -03:00
|
|
|
if args.show == "all":
|
|
|
|
|
show_profile(build_profile, exit_early=False)
|
|
|
|
|
show_build_version(build_version_string, exit_early=False)
|
|
|
|
|
check_config(current_cmake_cache_folder)
|
|
|
|
|
print("---- Summary of buildable targets:")
|
|
|
|
|
print("Buildable targets: %4.d" % len(ns3_modules))
|
2023-11-19 20:07:19 -03:00
|
|
|
print(
|
|
|
|
|
"Runnable/Buildable targets: %4.d"
|
|
|
|
|
% len(non_ambiguous_program_target_list(ns3_programs))
|
|
|
|
|
)
|
2022-07-10 18:56:31 -03:00
|
|
|
|
|
|
|
|
exit(0)
|
|
|
|
|
|
2022-02-11 22:07:09 -03:00
|
|
|
def check_ambiguous_target(target_type, target_to_check, programs):
|
|
|
|
|
if len(programs[target_to_check]) > 1:
|
2023-11-19 20:07:19 -03:00
|
|
|
print(
|
|
|
|
|
'%s target "%s" is ambiguous. Try one of these: "%s"'
|
|
|
|
|
% (target_type, target_to_check, '", "'.join(programs[target_to_check]))
|
|
|
|
|
)
|
2022-01-26 11:51:50 -03:00
|
|
|
exit(1)
|
2022-02-11 22:07:09 -03:00
|
|
|
return programs[target_to_check][0]
|
2022-01-26 11:51:50 -03:00
|
|
|
|
2021-11-10 22:28:44 -03:00
|
|
|
# If we have a target to run, replace shortcut with full path or raise exception
|
2021-12-10 21:08:23 -03:00
|
|
|
if run_only or build_and_run:
|
2021-11-10 22:28:44 -03:00
|
|
|
if target_to_run in ns3_programs:
|
2022-01-26 11:51:50 -03:00
|
|
|
target_to_run = check_ambiguous_target("Run", target_to_run, ns3_programs)
|
2022-04-13 22:45:44 -03:00
|
|
|
elif ".py" in target_to_run and os.path.exists(target_to_run):
|
|
|
|
|
# We let python scripts pass to be able to run ./utils/python-unit-tests.py
|
|
|
|
|
pass
|
2021-11-10 22:28:44 -03:00
|
|
|
else:
|
|
|
|
|
raise Exception("Couldn't find the specified program: %s" % target_to_run)
|
|
|
|
|
|
2021-12-05 21:53:49 +00:00
|
|
|
if "build" in args:
|
|
|
|
|
complete_targets = []
|
|
|
|
|
for target in args.build:
|
2023-11-19 20:07:19 -03:00
|
|
|
build_target = (
|
|
|
|
|
check_ambiguous_target("Build", target, ns3_programs)
|
|
|
|
|
if target in ns3_programs
|
|
|
|
|
else target
|
|
|
|
|
)
|
2022-01-26 11:51:50 -03:00
|
|
|
complete_targets.append(build_target)
|
2021-12-05 21:53:49 +00:00
|
|
|
args.build = complete_targets
|
|
|
|
|
del complete_targets
|
|
|
|
|
|
2021-12-05 23:59:55 -03:00
|
|
|
if not run_only:
|
2023-11-19 20:07:19 -03:00
|
|
|
build_step(
|
|
|
|
|
args,
|
|
|
|
|
build_and_run,
|
|
|
|
|
target_to_run,
|
|
|
|
|
current_cmake_cache_folder,
|
|
|
|
|
ns3_modules,
|
|
|
|
|
ns3_version,
|
|
|
|
|
build_profile,
|
|
|
|
|
output,
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
2022-03-02 11:51:53 -03:00
|
|
|
if not args.shell and target_to_run and ".py" not in target_to_run:
|
2022-09-23 23:37:14 -03:00
|
|
|
if sys.platform == "win32":
|
|
|
|
|
target_to_run += ".exe"
|
2022-01-08 23:09:57 -03:00
|
|
|
|
|
|
|
|
# If we're only trying to run the target, we need to check if it actually exists first
|
2023-11-19 20:07:19 -03:00
|
|
|
if (
|
|
|
|
|
(run_only or build_and_run)
|
|
|
|
|
and ".py" not in target_to_run
|
|
|
|
|
and not os.path.exists(target_to_run)
|
|
|
|
|
):
|
2022-01-08 23:09:57 -03:00
|
|
|
raise Exception("Executable has not been built yet")
|
2021-12-05 21:53:49 +00:00
|
|
|
|
2022-01-08 23:09:57 -03:00
|
|
|
# Setup program as sudo
|
|
|
|
|
if enable_sudo or (args.run and args.enable_sudo):
|
2023-11-19 20:07:19 -03:00
|
|
|
sudo_step(
|
|
|
|
|
args,
|
|
|
|
|
target_to_run,
|
|
|
|
|
set(map(lambda x: x[0], ns3_programs.values())) if enable_sudo else set(),
|
|
|
|
|
)
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
# Finally, we try to run it
|
2022-03-02 11:51:53 -03:00
|
|
|
if args.shell or run_only or build_and_run:
|
2021-12-10 21:08:23 -03:00
|
|
|
run_step(args, target_to_run, target_args)
|
|
|
|
|
|
2021-12-10 02:13:43 +00:00
|
|
|
return
|
2021-11-10 22:28:44 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
main()
|