bindings: replace pybindgen bindings support with cppyy bindings
This commit is contained in:
@@ -1,9 +0,0 @@
|
||||
# Store version information for required pybindgen version, used by
|
||||
# ns-3-allinone/download.py.
|
||||
# If specifying a released pybindgen version, specify the required pybindgen
|
||||
# version as, e.g. '0.21.0'
|
||||
# If specifying a commit on the development tree, specify it like this based
|
||||
# on 'git describe --tags' command. Example, if the latest release was 0.21.0,
|
||||
# and 'git describe --tags' reports "0.21.0-6-g8e7c0a9", then write the
|
||||
# version string below as '0.21.0.post6+ng8e7c0a9'
|
||||
__required_pybindgen_version__ = '0.22.1'
|
||||
@@ -1,13 +0,0 @@
|
||||
from pybindgen import Module, FileCodeSink, write_preamble, param, retval
|
||||
|
||||
def register_types(module):
|
||||
module.add_class('MyClass')
|
||||
|
||||
def register_methods(root_module):
|
||||
MyClass = root_module['MyClass']
|
||||
MyClass.add_constructor([], visibility='public')
|
||||
MyClass.add_constructor([param('double', 's'), param('double', 'l'), param('double', 'mean')], visibility='public')
|
||||
|
||||
def register_functions(module):
|
||||
module.add_function('SomeFunction', 'int', [param('int', 'xpto')])
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
|
||||
from _ns3 import *
|
||||
|
||||
import atexit
|
||||
atexit.register(Simulator.Destroy)
|
||||
del atexit
|
||||
|
||||
@@ -1,336 +0,0 @@
|
||||
#include "ns3module.h"
|
||||
#include "ns3/ref-count-base.h"
|
||||
|
||||
|
||||
namespace ns3 {
|
||||
|
||||
void PythonCompleteConstruct (Ptr<Object> object, TypeId typeId, const AttributeList &attributes)
|
||||
{
|
||||
object->SetTypeId (typeId);
|
||||
object->Object::Construct (attributes);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
class PythonEventImpl : public ns3::EventImpl
|
||||
{
|
||||
private:
|
||||
PyObject *m_callback;
|
||||
PyObject *m_args;
|
||||
public:
|
||||
PythonEventImpl (PyObject *callback, PyObject *args)
|
||||
{
|
||||
m_callback = callback;
|
||||
Py_INCREF (m_callback);
|
||||
m_args = args;
|
||||
Py_INCREF (m_args);
|
||||
}
|
||||
virtual ~PythonEventImpl ()
|
||||
{
|
||||
PyGILState_STATE __py_gil_state;
|
||||
__py_gil_state = (PyEval_ThreadsInitialized () ? PyGILState_Ensure () : (PyGILState_STATE) 0);
|
||||
|
||||
Py_DECREF (m_callback);
|
||||
Py_DECREF (m_args);
|
||||
|
||||
if (PyEval_ThreadsInitialized ())
|
||||
PyGILState_Release (__py_gil_state);
|
||||
}
|
||||
virtual void Notify ()
|
||||
{
|
||||
PyGILState_STATE __py_gil_state;
|
||||
__py_gil_state = (PyEval_ThreadsInitialized () ? PyGILState_Ensure () : (PyGILState_STATE) 0);
|
||||
|
||||
PyObject *retval = PyObject_CallObject (m_callback, m_args);
|
||||
if (retval) {
|
||||
if (retval != Py_None) {
|
||||
PyErr_SetString (PyExc_TypeError, "event callback should return None");
|
||||
PyErr_Print ();
|
||||
}
|
||||
Py_DECREF (retval);
|
||||
} else {
|
||||
PyErr_Print ();
|
||||
}
|
||||
|
||||
if (PyEval_ThreadsInitialized ())
|
||||
PyGILState_Release (__py_gil_state);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
PyObject *
|
||||
_wrap_Simulator_Schedule (PyNs3Simulator *PYBINDGEN_UNUSED (dummy), PyObject *args, PyObject *kwargs,
|
||||
PyObject **return_exception)
|
||||
{
|
||||
PyObject *exc_type, *traceback;
|
||||
PyObject *py_time;
|
||||
PyObject *py_callback;
|
||||
PyObject *user_args;
|
||||
ns3::Ptr<PythonEventImpl> py_event_impl;
|
||||
PyNs3EventId *py_EventId;
|
||||
|
||||
if (kwargs && PyObject_Length (kwargs) > 0) {
|
||||
PyErr_SetString (PyExc_TypeError, "keyword arguments not supported");
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (PyTuple_GET_SIZE (args) < 2) {
|
||||
PyErr_SetString (PyExc_TypeError, "ns3.Simulator.Schedule needs at least 2 arguments");
|
||||
goto error;
|
||||
}
|
||||
py_time = PyTuple_GET_ITEM (args, 0);
|
||||
py_callback = PyTuple_GET_ITEM (args, 1);
|
||||
|
||||
if (!PyObject_IsInstance (py_time, (PyObject*) &PyNs3Time_Type)) {
|
||||
PyErr_SetString (PyExc_TypeError, "Parameter 1 should be a ns3.Time instance");
|
||||
goto error;
|
||||
}
|
||||
if (!PyCallable_Check (py_callback)) {
|
||||
PyErr_SetString (PyExc_TypeError, "Parameter 2 should be callable");
|
||||
goto error;
|
||||
}
|
||||
user_args = PyTuple_GetSlice (args, 2, PyTuple_GET_SIZE (args));
|
||||
py_event_impl = ns3::Create<PythonEventImpl>(py_callback, user_args);
|
||||
Py_DECREF (user_args);
|
||||
|
||||
py_EventId = PyObject_New (PyNs3EventId, &PyNs3EventId_Type);
|
||||
py_EventId->obj = new ns3::EventId (
|
||||
ns3::Simulator::Schedule (*((PyNs3Time *) py_time)->obj, py_event_impl));
|
||||
return (PyObject *) py_EventId;
|
||||
|
||||
error:
|
||||
PyErr_Fetch (&exc_type, return_exception, &traceback);
|
||||
Py_XDECREF (exc_type);
|
||||
Py_XDECREF (traceback);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
PyObject *
|
||||
_wrap_Simulator_ScheduleNow (PyNs3Simulator *PYBINDGEN_UNUSED (dummy), PyObject *args, PyObject *kwargs,
|
||||
PyObject **return_exception)
|
||||
{
|
||||
PyObject *exc_type, *traceback;
|
||||
PyObject *py_callback;
|
||||
PyObject *user_args;
|
||||
ns3::Ptr<PythonEventImpl> py_event_impl;
|
||||
PyNs3EventId *py_EventId;
|
||||
|
||||
if (kwargs && PyObject_Length (kwargs) > 0) {
|
||||
PyErr_SetString (PyExc_TypeError, "keyword arguments not supported");
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (PyTuple_GET_SIZE (args) < 1) {
|
||||
PyErr_SetString (PyExc_TypeError, "ns3.Simulator.Schedule needs at least 1 argument");
|
||||
goto error;
|
||||
}
|
||||
py_callback = PyTuple_GET_ITEM (args, 0);
|
||||
|
||||
if (!PyCallable_Check (py_callback)) {
|
||||
PyErr_SetString (PyExc_TypeError, "Parameter 2 should be callable");
|
||||
goto error;
|
||||
}
|
||||
user_args = PyTuple_GetSlice (args, 1, PyTuple_GET_SIZE (args));
|
||||
py_event_impl = ns3::Create<PythonEventImpl>(py_callback, user_args);
|
||||
Py_DECREF (user_args);
|
||||
|
||||
py_EventId = PyObject_New (PyNs3EventId, &PyNs3EventId_Type);
|
||||
py_EventId->obj = new ns3::EventId (ns3::Simulator::ScheduleNow (py_event_impl));
|
||||
return (PyObject *) py_EventId;
|
||||
|
||||
error:
|
||||
PyErr_Fetch (&exc_type, return_exception, &traceback);
|
||||
Py_XDECREF (exc_type);
|
||||
Py_XDECREF (traceback);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
PyObject *
|
||||
_wrap_Simulator_ScheduleDestroy (PyNs3Simulator *PYBINDGEN_UNUSED (dummy), PyObject *args, PyObject *kwargs,
|
||||
PyObject **return_exception)
|
||||
{
|
||||
PyObject *exc_type, *traceback;
|
||||
PyObject *py_callback;
|
||||
PyObject *user_args;
|
||||
ns3::Ptr<PythonEventImpl> py_event_impl;
|
||||
PyNs3EventId *py_EventId;
|
||||
|
||||
if (kwargs && PyObject_Length (kwargs) > 0) {
|
||||
PyErr_SetString (PyExc_TypeError, "keyword arguments not supported");
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (PyTuple_GET_SIZE (args) < 1) {
|
||||
PyErr_SetString (PyExc_TypeError, "ns3.Simulator.Schedule needs at least 1 argument");
|
||||
goto error;
|
||||
}
|
||||
py_callback = PyTuple_GET_ITEM (args, 0);
|
||||
|
||||
if (!PyCallable_Check (py_callback)) {
|
||||
PyErr_SetString (PyExc_TypeError, "Parameter 2 should be callable");
|
||||
goto error;
|
||||
}
|
||||
user_args = PyTuple_GetSlice (args, 1, PyTuple_GET_SIZE (args));
|
||||
py_event_impl = ns3::Create<PythonEventImpl>(py_callback, user_args);
|
||||
Py_DECREF (user_args);
|
||||
|
||||
py_EventId = PyObject_New (PyNs3EventId, &PyNs3EventId_Type);
|
||||
py_EventId->obj = new ns3::EventId (ns3::Simulator::ScheduleDestroy (py_event_impl));
|
||||
return (PyObject *) py_EventId;
|
||||
|
||||
error:
|
||||
PyErr_Fetch (&exc_type, return_exception, &traceback);
|
||||
Py_XDECREF (exc_type);
|
||||
Py_XDECREF (traceback);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
PyObject *
|
||||
_wrap_TypeId_LookupByNameFailSafe (PyNs3TypeId *PYBINDGEN_UNUSED (dummy), PyObject *args, PyObject *kwargs,
|
||||
PyObject **return_exception)
|
||||
{
|
||||
bool ok;
|
||||
const char *name;
|
||||
Py_ssize_t name_len;
|
||||
ns3::TypeId tid;
|
||||
PyNs3TypeId *py_tid;
|
||||
const char *keywords[] = {"name", NULL};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords (args, kwargs, (char *) "s#", (char **) keywords, &name, &name_len)) {
|
||||
PyObject *exc_type, *traceback;
|
||||
PyErr_Fetch (&exc_type, return_exception, &traceback);
|
||||
Py_XDECREF (exc_type);
|
||||
Py_XDECREF (traceback);
|
||||
return NULL;
|
||||
}
|
||||
ok = ns3::TypeId::LookupByNameFailSafe (std::string (name, name_len), &tid);
|
||||
if (!ok)
|
||||
{
|
||||
PyErr_Format (PyExc_KeyError, "The ns3 type with name `%s' is not registered", name);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
py_tid = PyObject_New (PyNs3TypeId, &PyNs3TypeId_Type);
|
||||
py_tid->obj = new ns3::TypeId (tid);
|
||||
PyNs3TypeId_wrapper_registry[(void *) py_tid->obj] = (PyObject *) py_tid;
|
||||
|
||||
return (PyObject *) py_tid;
|
||||
}
|
||||
|
||||
|
||||
class CommandLinePythonValueSetter : public ns3::RefCountBase
|
||||
{
|
||||
PyObject *m_namespace;
|
||||
std::string m_variable;
|
||||
public:
|
||||
CommandLinePythonValueSetter (PyObject *ns, std::string const &variable) {
|
||||
Py_INCREF (ns);
|
||||
m_namespace = ns;
|
||||
m_variable = variable;
|
||||
}
|
||||
bool Parse (std::string value) {
|
||||
PyObject *pyvalue = PyString_FromStringAndSize (value.data (), value.size ());
|
||||
PyObject_SetAttrString (m_namespace, (char *) m_variable.c_str (), pyvalue);
|
||||
if (PyErr_Occurred ()) {
|
||||
PyErr_Print ();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
virtual ~CommandLinePythonValueSetter () {
|
||||
Py_DECREF (m_namespace);
|
||||
m_namespace = NULL;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
PyObject *
|
||||
_wrap_CommandLine_AddValue (PyNs3CommandLine *self, PyObject *args, PyObject *kwargs,
|
||||
PyObject **return_exception)
|
||||
{
|
||||
const char *name, *help, *variable = NULL;
|
||||
PyObject *py_namespace = NULL;
|
||||
const char *keywords[] = {"name", "help", "variable", "namespace", NULL};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords (args, kwargs, (char *) "ss|sO", (char **) keywords, &name, &help, &variable, &py_namespace)) {
|
||||
PyObject *exc_type, *traceback;
|
||||
PyErr_Fetch (&exc_type, return_exception, &traceback);
|
||||
Py_XDECREF (exc_type);
|
||||
Py_XDECREF (traceback);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (variable == NULL) {
|
||||
variable = name;
|
||||
}
|
||||
if (py_namespace == NULL) {
|
||||
py_namespace = (PyObject *) self;
|
||||
}
|
||||
|
||||
ns3::Ptr<CommandLinePythonValueSetter> setter = ns3::Create<CommandLinePythonValueSetter> (py_namespace, variable);
|
||||
self->obj->AddValue (name, help, ns3::MakeCallback (&CommandLinePythonValueSetter::Parse, setter));
|
||||
|
||||
Py_INCREF (Py_None);
|
||||
return Py_None;
|
||||
}
|
||||
|
||||
|
||||
PyObject *
|
||||
_wrap_Simulator_Run (PyNs3Simulator *PYBINDGEN_UNUSED (dummy), PyObject *args, PyObject *kwargs,
|
||||
PyObject **return_exception)
|
||||
{
|
||||
const char *keywords[] = {"signal_check_frequency", NULL};
|
||||
int signal_check_frequency;
|
||||
|
||||
ns3::Ptr<ns3::DefaultSimulatorImpl> defaultSim =
|
||||
ns3::DynamicCast<ns3::DefaultSimulatorImpl> (ns3::Simulator::GetImplementation ());
|
||||
if (defaultSim) {
|
||||
signal_check_frequency = 100;
|
||||
} else {
|
||||
signal_check_frequency = -1;
|
||||
}
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords (args, kwargs, (char *) "|i", (char **) keywords, &signal_check_frequency)) {
|
||||
PyObject *exc_type, *traceback;
|
||||
PyErr_Fetch (&exc_type, return_exception, &traceback);
|
||||
Py_XDECREF (exc_type);
|
||||
Py_XDECREF (traceback);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyThreadState *py_thread_state = NULL;
|
||||
|
||||
if (signal_check_frequency == -1)
|
||||
{
|
||||
if (PyEval_ThreadsInitialized ())
|
||||
py_thread_state = PyEval_SaveThread ();
|
||||
ns3::Simulator::Run ();
|
||||
if (py_thread_state)
|
||||
PyEval_RestoreThread (py_thread_state);
|
||||
} else {
|
||||
while (!ns3::Simulator::IsFinished ())
|
||||
{
|
||||
if (PyEval_ThreadsInitialized ())
|
||||
py_thread_state = PyEval_SaveThread ();
|
||||
|
||||
for (int n = signal_check_frequency; n > 0 && !ns3::Simulator::IsFinished (); --n)
|
||||
{
|
||||
ns3::Simulator::RunOneEvent ();
|
||||
}
|
||||
|
||||
if (py_thread_state)
|
||||
PyEval_RestoreThread (py_thread_state);
|
||||
PyErr_CheckSignals ();
|
||||
if (PyErr_Occurred ())
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
Py_INCREF (Py_None);
|
||||
return Py_None;
|
||||
}
|
||||
|
||||
@@ -1,131 +0,0 @@
|
||||
from __future__ import print_function
|
||||
import warnings
|
||||
import sys
|
||||
import os
|
||||
import pybindgen.settings
|
||||
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
|
||||
from pybindgen.module import MultiSectionFactory
|
||||
import ns3modulegen_core_customizations
|
||||
|
||||
import logging
|
||||
|
||||
pybindgen.settings.wrapper_registry = pybindgen.settings.StdMapWrapperRegistry
|
||||
|
||||
import traceback
|
||||
|
||||
class ErrorHandler(pybindgen.settings.ErrorHandler):
|
||||
|
||||
def __init__(self, apidefs_file):
|
||||
self.apidefs_file = apidefs_file
|
||||
|
||||
def handle_error(self, wrapper, exception, traceback_):
|
||||
stack = getattr(wrapper, 'stack_where_defined', [])
|
||||
stack.reverse()
|
||||
for l in stack:
|
||||
if l[0] == self.apidefs_file:
|
||||
warnings.warn_explicit("exception %r in wrapper %s" % (exception, wrapper),
|
||||
Warning, l[0], l[1])
|
||||
break
|
||||
else:
|
||||
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
|
||||
return True
|
||||
|
||||
|
||||
#print >> sys.stderr, ">>>>>>>>>>>>>>>>>>>>>>>>>>>> ", bool(eval(os.environ["GCC_RTTI_ABI_COMPLETE"]))
|
||||
pybindgen.settings.gcc_rtti_abi_complete = bool(eval(os.environ["GCC_RTTI_ABI_COMPLETE"]))
|
||||
|
||||
class MyMultiSectionFactory(MultiSectionFactory):
|
||||
def __init__(self, main_file_name):
|
||||
super(MyMultiSectionFactory, self).__init__()
|
||||
self.main_file_name = main_file_name
|
||||
self.main_sink = FileCodeSink(open(main_file_name, "wt"))
|
||||
self.header_name = "ns3module.h"
|
||||
header_file_name = os.path.join(os.path.dirname(self.main_file_name), self.header_name)
|
||||
#print >> sys.stderr, ">>>>>>>>>>>>>>>>>", header_file_name, main_file_name
|
||||
self.header_sink = FileCodeSink(open(header_file_name, "wt"))
|
||||
def get_section_code_sink(self, section_name):
|
||||
return self.main_sink
|
||||
def get_main_code_sink(self):
|
||||
return self.main_sink
|
||||
def get_common_header_code_sink(self):
|
||||
return self.header_sink
|
||||
def get_common_header_include(self):
|
||||
return '"%s"' % self.header_name
|
||||
def close(self):
|
||||
self.header_sink.file.close()
|
||||
self.main_sink.file.close()
|
||||
|
||||
|
||||
|
||||
def main(argv):
|
||||
logging.basicConfig()
|
||||
logging.getLogger("pybindgen.typehandlers").setLevel(logging.DEBUG)
|
||||
|
||||
module_abs_src_path, target, extension_name, output_cc_file_name = argv[1:]
|
||||
module_name = os.path.basename(module_abs_src_path)
|
||||
out = MyMultiSectionFactory(output_cc_file_name)
|
||||
|
||||
sys.path.insert(0, os.path.join(module_abs_src_path, "bindings"))
|
||||
try:
|
||||
module_apidefs = __import__("modulegen__%s" % target)
|
||||
del sys.modules["modulegen__%s" % target]
|
||||
try:
|
||||
module_customization = __import__("modulegen_customizations")
|
||||
del sys.modules["modulegen_customizations"]
|
||||
except ImportError:
|
||||
module_customization = object()
|
||||
|
||||
try:
|
||||
from callbacks_list import callback_classes
|
||||
except ImportError as ex:
|
||||
print("***************", repr(ex), file=sys.stderr)
|
||||
callback_classes = []
|
||||
else:
|
||||
print(">>>>>>>>>>>>>>>>", repr(callback_classes), file=sys.stderr)
|
||||
|
||||
finally:
|
||||
sys.path.pop(0)
|
||||
|
||||
apidefs_file, dummy = os.path.splitext(module_apidefs.__file__)
|
||||
apidefs_file += '.py'
|
||||
pybindgen.settings.error_handler = ErrorHandler(apidefs_file)
|
||||
|
||||
root_module = module_apidefs.module_init()
|
||||
root_module.set_name(extension_name)
|
||||
root_module.add_include('"ns3/%s-module.h"' % module_name)
|
||||
|
||||
ns3modulegen_core_customizations.add_std_ios_openmode(root_module)
|
||||
|
||||
# -----------
|
||||
module_apidefs.register_types(root_module)
|
||||
|
||||
if hasattr(module_customization, 'post_register_types'):
|
||||
module_customization.post_register_types(root_module)
|
||||
|
||||
# register Callback<...> type handlers
|
||||
ns3modulegen_core_customizations.register_callback_classes(root_module.after_forward_declarations,
|
||||
callback_classes)
|
||||
|
||||
# -----------
|
||||
module_apidefs.register_methods(root_module)
|
||||
|
||||
if hasattr(module_customization, 'post_register_methods'):
|
||||
module_customization.post_register_methods(root_module)
|
||||
|
||||
ns3modulegen_core_customizations.Object_customizations(root_module)
|
||||
ns3modulegen_core_customizations.Attribute_customizations(root_module)
|
||||
ns3modulegen_core_customizations.generate_callback_classes(root_module,
|
||||
callback_classes)
|
||||
|
||||
# -----------
|
||||
module_apidefs.register_functions(root_module)
|
||||
|
||||
if hasattr(module_customization, 'post_register_functions'):
|
||||
module_customization.post_register_functions(root_module)
|
||||
|
||||
# -----------
|
||||
root_module.generate(out)
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
main(sys.argv)
|
||||
@@ -1,446 +0,0 @@
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import re
|
||||
|
||||
from pybindgen.typehandlers import base as typehandlers
|
||||
from pybindgen import ReturnValue, Parameter
|
||||
from pybindgen.cppmethod import CustomCppMethodWrapper, CustomCppConstructorWrapper
|
||||
from pybindgen.typehandlers.codesink import MemoryCodeSink
|
||||
from pybindgen.typehandlers import ctypeparser
|
||||
from pybindgen.typehandlers.base import ForwardWrapperBase
|
||||
from pybindgen import cppclass
|
||||
import warnings
|
||||
|
||||
from pybindgen.typehandlers.base import CodeGenerationError
|
||||
|
||||
import sys
|
||||
|
||||
class SmartPointerTransformation(typehandlers.TypeTransformation):
|
||||
"""
|
||||
This class provides a "type transformation" that tends to support
|
||||
NS-3 smart pointers. Parameters such as "Ptr<Foo> foo" are
|
||||
transformed into something like Parameter.new("Foo*", "foo",
|
||||
transfer_ownership=False). Return values such as Ptr<Foo> are
|
||||
transformed into ReturnValue.new("Foo*",
|
||||
caller_owns_return=False). Since the underlying objects have
|
||||
reference counting, PyBindGen does the right thing.
|
||||
"""
|
||||
def __init__(self):
|
||||
super(SmartPointerTransformation, self).__init__()
|
||||
self.rx = re.compile(r'(ns3::|::ns3::|)Ptr<([^>]+)>\s*$')
|
||||
print("{0!r}".format(self), file=sys.stderr)
|
||||
|
||||
def _get_untransformed_type_traits(self, name):
|
||||
m = self.rx.match(name)
|
||||
is_const = False
|
||||
if m is None:
|
||||
print("{0!r} did not match".format(name), file=sys.stderr)
|
||||
return None, False
|
||||
else:
|
||||
name1 = m.group(2).strip()
|
||||
if name1.startswith('const '):
|
||||
name1 = name1[len('const '):]
|
||||
is_const = True
|
||||
if name1.endswith(' const'):
|
||||
name1 = name1[:-len(' const')]
|
||||
is_const = True
|
||||
new_name = name1+' *'
|
||||
|
||||
if new_name.startswith('::'):
|
||||
new_name = new_name[2:]
|
||||
return new_name, is_const
|
||||
|
||||
def get_untransformed_name(self, name):
|
||||
new_name, dummy_is_const = self._get_untransformed_type_traits(name)
|
||||
return new_name
|
||||
|
||||
def create_type_handler(self, type_handler, *args, **kwargs):
|
||||
if issubclass(type_handler, Parameter):
|
||||
kwargs['transfer_ownership'] = False
|
||||
elif issubclass(type_handler, ReturnValue):
|
||||
kwargs['caller_owns_return'] = False
|
||||
else:
|
||||
raise AssertionError
|
||||
|
||||
## fix the ctype, add ns3:: namespace
|
||||
orig_ctype, is_const = self._get_untransformed_type_traits(args[0])
|
||||
if is_const:
|
||||
correct_ctype = 'ns3::Ptr< {0} const >'.format(orig_ctype[:-2])
|
||||
else:
|
||||
correct_ctype = 'ns3::Ptr< {0} >'.format(orig_ctype[:-2])
|
||||
args = tuple([correct_ctype] + list(args[1:]))
|
||||
|
||||
handler = type_handler(*args, **kwargs)
|
||||
handler.set_transformation(self, orig_ctype)
|
||||
return handler
|
||||
|
||||
def untransform(self, type_handler, declarations, code_block, expression):
|
||||
return 'const_cast<%s> (ns3::PeekPointer (%s))' % (type_handler.untransformed_ctype, expression)
|
||||
|
||||
def transform(self, type_handler, declarations, code_block, expression):
|
||||
assert type_handler.untransformed_ctype[-1] == '*'
|
||||
return 'ns3::Ptr< %s > (%s)' % (type_handler.untransformed_ctype[:-1], expression)
|
||||
|
||||
## register the type transformation
|
||||
transf = SmartPointerTransformation()
|
||||
typehandlers.return_type_matcher.register_transformation(transf)
|
||||
typehandlers.param_type_matcher.register_transformation(transf)
|
||||
del transf
|
||||
|
||||
|
||||
class CallbackImplProxyMethod(typehandlers.ReverseWrapperBase):
|
||||
"""
|
||||
Class that generates a proxy virtual method that calls a similarly named python method.
|
||||
"""
|
||||
|
||||
def __init__(self, return_value, parameters):
|
||||
super(CallbackImplProxyMethod, self).__init__(return_value, parameters)
|
||||
|
||||
def generate_python_call(self):
|
||||
"""code to call the python method"""
|
||||
build_params = self.build_params.get_parameters(force_tuple_creation=True)
|
||||
if build_params[0][0] == '"':
|
||||
build_params[0] = '(char *) ' + build_params[0]
|
||||
args = self.before_call.declare_variable('PyObject*', 'args')
|
||||
self.before_call.write_code('%s = Py_BuildValue(%s);'
|
||||
% (args, ', '.join(build_params)))
|
||||
self.before_call.add_cleanup_code('Py_DECREF(%s);' % args)
|
||||
self.before_call.write_code('py_retval = PyObject_CallObject(m_callback, %s);' % args)
|
||||
self.before_call.write_error_check('py_retval == NULL')
|
||||
self.before_call.add_cleanup_code('Py_DECREF(py_retval);')
|
||||
|
||||
|
||||
|
||||
|
||||
def register_callback_classes(out, callbacks):
|
||||
for callback_impl_num, template_parameters in enumerate(callbacks):
|
||||
cls_name = "ns3::Callback< %s >" % ', '.join(template_parameters)
|
||||
#print >> sys.stderr, "***** trying to register callback: %r" % cls_name
|
||||
class_name = "PythonCallbackImpl%i" % callback_impl_num
|
||||
|
||||
class PythonCallbackParameter(Parameter):
|
||||
"Class handlers"
|
||||
CTYPES = [cls_name]
|
||||
print("***** registering callback handler: %r (%r)" % (ctypeparser.normalize_type_string(cls_name), cls_name), file=sys.stderr)
|
||||
DIRECTIONS = [Parameter.DIRECTION_IN]
|
||||
PYTHON_CALLBACK_IMPL_NAME = class_name
|
||||
TEMPLATE_ARGS = template_parameters
|
||||
DISABLED = False
|
||||
|
||||
def convert_python_to_c(self, wrapper):
|
||||
"parses python args to get C++ value"
|
||||
assert isinstance(wrapper, typehandlers.ForwardWrapperBase)
|
||||
|
||||
if self.DISABLED:
|
||||
raise CodeGenerationError("wrapper could not be generated")
|
||||
|
||||
if self.default_value is None:
|
||||
py_callback = wrapper.declarations.declare_variable('PyObject*', self.name)
|
||||
wrapper.parse_params.add_parameter('O', ['&'+py_callback], self.name)
|
||||
wrapper.before_call.write_error_check(
|
||||
'!PyCallable_Check(%s)' % py_callback,
|
||||
'PyErr_SetString(PyExc_TypeError, "parameter \'%s\' must be callbale");' % self.name)
|
||||
callback_impl = wrapper.declarations.declare_variable(
|
||||
'ns3::Ptr<%s>' % self.PYTHON_CALLBACK_IMPL_NAME,
|
||||
'%s_cb_impl' % self.name)
|
||||
wrapper.before_call.write_code("%s = ns3::Create<%s> (%s);"
|
||||
% (callback_impl, self.PYTHON_CALLBACK_IMPL_NAME, py_callback))
|
||||
wrapper.call_params.append(
|
||||
'ns3::Callback<%s> (%s)' % (', '.join(self.TEMPLATE_ARGS), callback_impl))
|
||||
else:
|
||||
py_callback = wrapper.declarations.declare_variable('PyObject*', self.name, 'NULL')
|
||||
wrapper.parse_params.add_parameter('O', ['&'+py_callback], self.name, optional=True)
|
||||
value = wrapper.declarations.declare_variable(
|
||||
'ns3::Callback<%s>' % ', '.join(self.TEMPLATE_ARGS),
|
||||
self.name+'_value',
|
||||
self.default_value)
|
||||
|
||||
wrapper.before_call.write_code("if (%s) {" % (py_callback,))
|
||||
wrapper.before_call.indent()
|
||||
|
||||
wrapper.before_call.write_error_check(
|
||||
'!PyCallable_Check(%s)' % py_callback,
|
||||
'PyErr_SetString(PyExc_TypeError, "parameter \'%s\' must be callbale");' % self.name)
|
||||
|
||||
wrapper.before_call.write_code("%s = ns3::Callback<%s> (ns3::Create<%s> (%s));"
|
||||
% (value, ', '.join(self.TEMPLATE_ARGS),
|
||||
self.PYTHON_CALLBACK_IMPL_NAME, py_callback))
|
||||
|
||||
wrapper.before_call.unindent()
|
||||
wrapper.before_call.write_code("}") # closes: if (py_callback) {
|
||||
|
||||
wrapper.call_params.append(value)
|
||||
|
||||
|
||||
def convert_c_to_python(self, wrapper):
|
||||
raise typehandlers.NotSupportedError("Reverse wrappers for ns3::Callback<...> types "
|
||||
"(python using callbacks defined in C++) not implemented.")
|
||||
|
||||
|
||||
def generate_callback_classes(module, callbacks):
|
||||
out = module.after_forward_declarations
|
||||
for callback_impl_num, template_parameters in enumerate(callbacks):
|
||||
sink = MemoryCodeSink()
|
||||
cls_name = "ns3::Callback< %s >" % ', '.join(template_parameters)
|
||||
#print >> sys.stderr, "***** trying to register callback: %r" % cls_name
|
||||
class_name = "PythonCallbackImpl%i" % callback_impl_num
|
||||
sink.writeln('''
|
||||
class %s : public ns3::CallbackImpl<%s>
|
||||
{
|
||||
public:
|
||||
PyObject *m_callback;
|
||||
%s(PyObject *callback)
|
||||
{
|
||||
Py_INCREF(callback);
|
||||
m_callback = callback;
|
||||
}
|
||||
virtual ~%s()
|
||||
{
|
||||
PyGILState_STATE __py_gil_state;
|
||||
__py_gil_state = (PyEval_ThreadsInitialized() ? PyGILState_Ensure() : (PyGILState_STATE) 0);
|
||||
Py_DECREF(m_callback);
|
||||
m_callback = NULL;
|
||||
PyGILState_Release(__py_gil_state);
|
||||
}
|
||||
|
||||
virtual bool IsEqual(ns3::Ptr<const ns3::CallbackImplBase> other_base) const
|
||||
{
|
||||
const %s *other = dynamic_cast<const %s*> (ns3::PeekPointer (other_base));
|
||||
if (other != NULL)
|
||||
return (other->m_callback == m_callback);
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
''' % (class_name, ', '.join(template_parameters), class_name, class_name, class_name, class_name))
|
||||
sink.indent()
|
||||
callback_return = template_parameters[0]
|
||||
return_ctype = ctypeparser.parse_type(callback_return)
|
||||
if ('const' in return_ctype.remove_modifiers()):
|
||||
kwargs = {'is_const': True}
|
||||
else:
|
||||
kwargs = {}
|
||||
try:
|
||||
return_type = ReturnValue.new(str(return_ctype), **kwargs)
|
||||
except (typehandlers.TypeLookupError, typehandlers.TypeConfigurationError) as ex:
|
||||
warnings.warn("***** Unable to register callback; Return value '%s' error (used in %s): %r"
|
||||
% (callback_return, cls_name, ex),
|
||||
Warning)
|
||||
continue
|
||||
|
||||
arguments = []
|
||||
ok = True
|
||||
callback_parameters = [arg for arg in template_parameters[1:] if arg != 'ns3::empty']
|
||||
for arg_num, arg_type in enumerate(callback_parameters):
|
||||
arg_name = 'arg%i' % (arg_num+1)
|
||||
|
||||
param_ctype = ctypeparser.parse_type(arg_type)
|
||||
if ('const' in param_ctype.remove_modifiers()):
|
||||
kwargs = {'is_const': True}
|
||||
else:
|
||||
kwargs = {}
|
||||
try:
|
||||
param = Parameter.new(str(param_ctype), arg_name, **kwargs)
|
||||
cpp_class = getattr(param, "cpp_class", None)
|
||||
if isinstance(cpp_class, cppclass.CppClass):
|
||||
# check if the "helper class" can be constructed
|
||||
if cpp_class.helper_class is not None:
|
||||
cpp_class.helper_class.generate_forward_declarations(
|
||||
MemoryCodeSink())
|
||||
if cpp_class.helper_class.cannot_be_constructed:
|
||||
cpp_class.helper_class = None
|
||||
cpp_class.helper_class_disabled = True
|
||||
arguments.append(param)
|
||||
except (typehandlers.TypeLookupError, typehandlers.TypeConfigurationError) as ex:
|
||||
warnings.warn("***** Unable to register callback; parameter '%s %s' error (used in %s): %r"
|
||||
% (arg_type, arg_name, cls_name, ex),
|
||||
Warning)
|
||||
ok = False
|
||||
if not ok:
|
||||
try:
|
||||
typehandlers.return_type_matcher.lookup(cls_name)[0].DISABLED = True
|
||||
except typehandlers.TypeLookupError:
|
||||
pass
|
||||
try:
|
||||
typehandlers.param_type_matcher.lookup(cls_name)[0].DISABLED = True
|
||||
except typehandlers.TypeLookupError:
|
||||
pass
|
||||
continue
|
||||
|
||||
wrapper = CallbackImplProxyMethod(return_type, arguments)
|
||||
wrapper.generate(sink, 'operator()', decl_modifiers=[])
|
||||
|
||||
sink.unindent()
|
||||
sink.writeln('};\n')
|
||||
print("Flushing to ", out, file=sys.stderr)
|
||||
sink.flush_to(out)
|
||||
|
||||
|
||||
# def write_preamble(out):
|
||||
# pybindgen.write_preamble(out)
|
||||
# out.writeln("#include \"ns3/everything.h\"")
|
||||
|
||||
|
||||
|
||||
def Simulator_customizations(module):
|
||||
Simulator = module['ns3::Simulator']
|
||||
|
||||
## Simulator::Schedule(delay, callback, ...user..args...)
|
||||
Simulator.add_custom_method_wrapper("Schedule", "_wrap_Simulator_Schedule",
|
||||
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
|
||||
|
||||
|
||||
## Simulator::ScheduleNow(callback, ...user..args...)
|
||||
Simulator.add_custom_method_wrapper("ScheduleNow", "_wrap_Simulator_ScheduleNow",
|
||||
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
|
||||
|
||||
|
||||
## Simulator::ScheduleDestroy(callback, ...user..args...)
|
||||
Simulator.add_custom_method_wrapper("ScheduleDestroy", "_wrap_Simulator_ScheduleDestroy",
|
||||
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
|
||||
|
||||
Simulator.add_custom_method_wrapper("Run", "_wrap_Simulator_Run",
|
||||
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
|
||||
|
||||
|
||||
def CommandLine_customizations(module):
|
||||
CommandLine = module['ns3::CommandLine']
|
||||
CommandLine.add_method('Parse', None, [ArgvParam(None, 'argv')],
|
||||
is_static=False)
|
||||
CommandLine.add_custom_method_wrapper("AddValue", "_wrap_CommandLine_AddValue",
|
||||
flags=["METH_VARARGS", "METH_KEYWORDS"])
|
||||
|
||||
|
||||
def Object_customizations(module):
|
||||
## ---------------------------------------------------------------------
|
||||
## Here we generate custom constructor code for all classes that
|
||||
## derive from ns3::Object. The custom constructors are needed in
|
||||
## order to support kwargs only and to translate kwargs into ns3
|
||||
## attributes, etc.
|
||||
## ---------------------------------------------------------------------
|
||||
try:
|
||||
Object = module['ns3::Object']
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
## add a GetTypeId method to all generatd helper classes
|
||||
def helper_class_hook(helper_class):
|
||||
decl = """
|
||||
static ns3::TypeId GetTypeId (void)
|
||||
{
|
||||
static ns3::TypeId tid = ns3::TypeId ("%s")
|
||||
.SetParent< %s > ()
|
||||
;
|
||||
return tid;
|
||||
}""" % (helper_class.name, helper_class.class_.full_name)
|
||||
|
||||
helper_class.add_custom_method(decl)
|
||||
helper_class.add_post_generation_code(
|
||||
"NS_OBJECT_ENSURE_REGISTERED (%s);" % helper_class.name)
|
||||
Object.add_helper_class_hook(helper_class_hook)
|
||||
|
||||
def ns3_object_instance_creation_function(cpp_class, code_block, lvalue,
|
||||
parameters, construct_type_name):
|
||||
assert lvalue
|
||||
assert not lvalue.startswith('None')
|
||||
if cpp_class.cannot_be_constructed:
|
||||
raise CodeGenerationError("%s cannot be constructed (%s)"
|
||||
% cpp_class.full_name)
|
||||
if cpp_class.incomplete_type:
|
||||
raise CodeGenerationError("%s cannot be constructed (incomplete type)"
|
||||
% cpp_class.full_name)
|
||||
code_block.write_code("%s = new %s(%s);" % (lvalue, construct_type_name, parameters))
|
||||
code_block.write_code("%s->Ref ();" % (lvalue))
|
||||
|
||||
def ns3_object_post_instance_creation_function(cpp_class, code_block, lvalue,
|
||||
parameters, construct_type_name):
|
||||
code_block.write_code("ns3::CompleteConstruct(%s);" % (lvalue, ))
|
||||
|
||||
Object.set_instance_creation_function(ns3_object_instance_creation_function)
|
||||
Object.set_post_instance_creation_function(ns3_object_post_instance_creation_function)
|
||||
|
||||
|
||||
def Attribute_customizations(module):
|
||||
# Fix up for the "const AttributeValue &v = EmptyAttribute()"
|
||||
# case, as used extensively by helper classes.
|
||||
|
||||
# Here's why we need to do this: pybindgen.gccxmlscanner, when
|
||||
# scanning parameter default values, is only provided with the
|
||||
# value as a simple C expression string. (py)gccxml does not
|
||||
# report the type of the default value.
|
||||
|
||||
# As a workaround, here we iterate over all parameters of all
|
||||
# methods of all classes and tell pybindgen what is the type of
|
||||
# the default value for attributes.
|
||||
|
||||
for cls in module.classes:
|
||||
for meth in cls.get_all_methods():
|
||||
for param in meth.parameters:
|
||||
if isinstance(param, cppclass.CppClassRefParameter):
|
||||
if param.cpp_class.name == 'AttributeValue' \
|
||||
and param.default_value is not None \
|
||||
and param.default_value_type is None:
|
||||
param.default_value_type = 'ns3::EmptyAttributeValue'
|
||||
|
||||
|
||||
def TypeId_customizations(module):
|
||||
TypeId = module['ns3::TypeId']
|
||||
TypeId.add_custom_method_wrapper("LookupByNameFailSafe", "_wrap_TypeId_LookupByNameFailSafe",
|
||||
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
|
||||
|
||||
|
||||
def add_std_ofstream(module):
|
||||
module.add_include('<fstream>')
|
||||
ostream = module.add_class('ostream', foreign_cpp_namespace='::std')
|
||||
ostream.set_cannot_be_constructed("abstract base class")
|
||||
ofstream = module.add_class('ofstream', foreign_cpp_namespace='::std', parent=ostream)
|
||||
ofstream.add_enum('openmode', [
|
||||
('app', 'std::ios_base::app'),
|
||||
('ate', 'std::ios_base::ate'),
|
||||
('binary', 'std::ios_base::binary'),
|
||||
('in', 'std::ios_base::in'),
|
||||
('out', 'std::ios_base::out'),
|
||||
('trunc', 'std::ios_base::trunc'),
|
||||
])
|
||||
ofstream.add_constructor([Parameter.new("const char *", 'filename'),
|
||||
Parameter.new("::std::ofstream::openmode", 'mode', default_value="std::ios_base::out")])
|
||||
ofstream.add_method('close', None, [])
|
||||
|
||||
add_std_ios_openmode(module)
|
||||
|
||||
|
||||
class IosOpenmodeParam(Parameter):
|
||||
|
||||
DIRECTIONS = [Parameter.DIRECTION_IN]
|
||||
CTYPES = ['std::ios_base::openmode', 'std::_Ios_Openmode']
|
||||
|
||||
def convert_c_to_python(self, wrapper):
|
||||
assert isinstance(wrapper, ReverseWrapperBase)
|
||||
wrapper.build_params.add_parameter('i', [self.value])
|
||||
|
||||
def convert_python_to_c(self, wrapper):
|
||||
assert isinstance(wrapper, ForwardWrapperBase)
|
||||
name = wrapper.declarations.declare_variable("std::ios_base::openmode", self.name, self.default_value)
|
||||
wrapper.parse_params.add_parameter('i', ['&'+name], self.name, optional=bool(self.default_value))
|
||||
wrapper.call_params.append(name)
|
||||
|
||||
|
||||
|
||||
def add_std_ios_openmode(module):
|
||||
for flag in 'in', 'out', 'ate', 'app', 'trunc', 'binary':
|
||||
module.after_init.write_code('PyModule_AddIntConstant(m, (char *) "STD_IOS_%s", std::ios::%s);'
|
||||
% (flag.upper(), flag))
|
||||
|
||||
|
||||
|
||||
def add_ipv4_address_tp_hash(module):
|
||||
module.body.writeln('''
|
||||
long
|
||||
_ns3_Ipv4Address_tp_hash (PyObject *obj)
|
||||
{
|
||||
PyNs3Ipv4Address *addr = reinterpret_cast<PyNs3Ipv4Address *> (obj);
|
||||
return static_cast<long> (ns3::Ipv4AddressHash () (*addr->obj));
|
||||
}
|
||||
''')
|
||||
module.header.writeln('long _ns3_Ipv4Address_tp_hash (PyObject *obj);')
|
||||
module['Ipv4Address'].pytype.slots['tp_hash'] = "_ns3_Ipv4Address_tp_hash"
|
||||
@@ -1,292 +0,0 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
|
||||
import pybindgen.settings
|
||||
from pybindgen.castxmlparser import ModuleParser, PygenClassifier, PygenSection, WrapperWarning, find_declaration_from_name
|
||||
from pybindgen.typehandlers.codesink import FileCodeSink
|
||||
from pygccxml.declarations import templates
|
||||
from pygccxml.declarations.enumeration import enumeration_t
|
||||
from pygccxml.declarations.class_declaration import class_t
|
||||
from pygccxml.declarations.free_calldef import free_function_t
|
||||
from pygccxml.declarations.calldef_members import constructor_t, member_function_t
|
||||
from pygccxml.declarations.calldef import calldef_t
|
||||
|
||||
## we need the smart pointer type transformation to be active even
|
||||
## during castxml scanning.
|
||||
import ns3modulegen_core_customizations
|
||||
|
||||
|
||||
## silence castxmlparser errors; we only want error handling in the
|
||||
## generated python script, not while scanning.
|
||||
class ErrorHandler(pybindgen.settings.ErrorHandler):
|
||||
def handle_error(self, dummy_wrapper, dummy_exception, dummy_traceback_):
|
||||
return True
|
||||
pybindgen.settings.error_handler = ErrorHandler()
|
||||
import warnings
|
||||
warnings.filterwarnings(category=WrapperWarning, action='ignore')
|
||||
|
||||
|
||||
import ns3modulescan
|
||||
type_annotations = ns3modulescan.type_annotations
|
||||
|
||||
|
||||
def get_ns3_relative_path(path):
|
||||
l = []
|
||||
head = path
|
||||
if not path:
|
||||
return
|
||||
while head:
|
||||
new_head, tail = os.path.split(head)
|
||||
if new_head == head:
|
||||
raise ValueError
|
||||
head = new_head
|
||||
if tail == 'ns3':
|
||||
return os.path.join(*l)
|
||||
l.insert(0, tail)
|
||||
raise AssertionError("is the path %r inside ns3?!" % path)
|
||||
|
||||
class PreScanHook:
|
||||
|
||||
def __init__(self, headers_map, module):
|
||||
self.headers_map = headers_map
|
||||
self.module = module
|
||||
|
||||
def __call__(self, module_parser,
|
||||
pygccxml_definition,
|
||||
global_annotations,
|
||||
parameter_annotations):
|
||||
try:
|
||||
ns3_header = get_ns3_relative_path(pygccxml_definition.location.file_name)
|
||||
except ValueError: # the header is not from ns3
|
||||
return # ignore the definition, it's not ns-3 def.
|
||||
if not ns3_header:
|
||||
return
|
||||
definition_module = self.headers_map[ns3_header]
|
||||
|
||||
## Note: we don't include line numbers in the comments because
|
||||
## those numbers are very likely to change frequently, which would
|
||||
## cause needless changes, since the generated python files are
|
||||
## kept under version control.
|
||||
|
||||
#global_annotations['pygen_comment'] = "%s:%i: %s" % \
|
||||
# (ns3_header, pygccxml_definition.location.line, pygccxml_definition)
|
||||
global_annotations['pygen_comment'] = "%s (module %r): %s" % \
|
||||
(ns3_header, definition_module, pygccxml_definition)
|
||||
|
||||
|
||||
## handle ns3::Object::GetObject (left to its own devices,
|
||||
## pybindgen will generate a mangled name containing the template
|
||||
## argument type name).
|
||||
if isinstance(pygccxml_definition, member_function_t) \
|
||||
and pygccxml_definition.parent.name == 'Object' \
|
||||
and pygccxml_definition.name == 'GetObject':
|
||||
template_args = templates.args(str(pygccxml_definition))
|
||||
if template_args == ['ns3::Object']:
|
||||
global_annotations['template_instance_names'] = 'ns3::Object=>GetObject'
|
||||
|
||||
## Don't wrap Simulator::Schedule* (manually wrapped)
|
||||
if isinstance(pygccxml_definition, member_function_t) \
|
||||
and pygccxml_definition.parent.name == 'Simulator' \
|
||||
and pygccxml_definition.name.startswith('Schedule'):
|
||||
global_annotations['ignore'] = None
|
||||
|
||||
# manually wrapped
|
||||
if isinstance(pygccxml_definition, member_function_t) \
|
||||
and pygccxml_definition.parent.name == 'Simulator' \
|
||||
and pygccxml_definition.name == 'Run':
|
||||
global_annotations['ignore'] = True
|
||||
|
||||
## http://www.gccxml.org/Bug/view.php?id=9915
|
||||
if isinstance(pygccxml_definition, calldef_t):
|
||||
for arg in pygccxml_definition.arguments:
|
||||
if arg.default_value is None:
|
||||
continue
|
||||
elif arg.default_value == "ns3::MilliSeconds( )":
|
||||
arg.default_value = "ns3::MilliSeconds(0)"
|
||||
elif arg.default_value == "ns3::Seconds( )":
|
||||
arg.default_value = "ns3::Seconds(0)"
|
||||
|
||||
## classes
|
||||
if isinstance(pygccxml_definition, class_t):
|
||||
print(pygccxml_definition, file=sys.stderr)
|
||||
# no need for helper classes to allow subclassing in Python, I think...
|
||||
#if pygccxml_definition.name.endswith('Helper'):
|
||||
# global_annotations['allow_subclassing'] = 'false'
|
||||
|
||||
#
|
||||
# If a class is template instantiation, even if the
|
||||
# template was defined in some other module, if a template
|
||||
# argument belongs to this module then the template
|
||||
# instantiation will belong to this module.
|
||||
#
|
||||
if templates.is_instantiation(pygccxml_definition.decl_string):
|
||||
cls_name, template_parameters = templates.split(pygccxml_definition.name)
|
||||
template_parameters_decls = [find_declaration_from_name(module_parser.global_ns, templ_param)
|
||||
for templ_param in template_parameters]
|
||||
#print >> sys.stderr, "********************", cls_name, repr(template_parameters_decls)
|
||||
|
||||
template_parameters_modules = []
|
||||
for templ in template_parameters_decls:
|
||||
if not hasattr(templ, 'location'):
|
||||
continue
|
||||
try:
|
||||
h = get_ns3_relative_path(templ.location.file_name)
|
||||
except ValueError:
|
||||
continue
|
||||
template_parameters_modules.append(self.headers_map[h])
|
||||
|
||||
for templ_mod in template_parameters_modules:
|
||||
if templ_mod == self.module:
|
||||
definition_module = templ_mod
|
||||
break
|
||||
#print >> sys.stderr, "********************", cls_name, repr(template_parameters_modules)
|
||||
|
||||
|
||||
if definition_module != self.module:
|
||||
global_annotations['import_from_module'] = 'ns.%s' % (definition_module.replace('-', '_'),)
|
||||
|
||||
if pygccxml_definition.decl_string.startswith('::ns3::SimpleRefCount<'):
|
||||
global_annotations['incref_method'] = 'Ref'
|
||||
global_annotations['decref_method'] = 'Unref'
|
||||
global_annotations['peekref_method'] = 'GetReferenceCount'
|
||||
global_annotations['automatic_type_narrowing'] = 'true'
|
||||
return
|
||||
|
||||
if pygccxml_definition.decl_string.startswith('::ns3::Callback<'):
|
||||
# manually handled in ns3modulegen_core_customizations.py
|
||||
global_annotations['ignore'] = None
|
||||
return
|
||||
|
||||
if pygccxml_definition.decl_string.startswith('::ns3::TracedCallback<'):
|
||||
global_annotations['ignore'] = None
|
||||
return
|
||||
|
||||
if pygccxml_definition.decl_string.startswith('::ns3::Ptr<'):
|
||||
# handled by pybindgen "type transformation"
|
||||
global_annotations['ignore'] = None
|
||||
return
|
||||
|
||||
# table driven class customization
|
||||
try:
|
||||
annotations = type_annotations[pygccxml_definition.decl_string]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
global_annotations.update(annotations)
|
||||
|
||||
## enums
|
||||
if isinstance(pygccxml_definition, enumeration_t):
|
||||
if definition_module != self.module:
|
||||
global_annotations['import_from_module'] = 'ns.%s' % definition_module
|
||||
|
||||
## free functions
|
||||
if isinstance(pygccxml_definition, free_function_t):
|
||||
|
||||
if definition_module != self.module:
|
||||
global_annotations['ignore'] = None
|
||||
return
|
||||
|
||||
if pygccxml_definition.name == 'PeekPointer':
|
||||
global_annotations['ignore'] = None
|
||||
return
|
||||
|
||||
## table driven methods/constructors/functions customization
|
||||
if isinstance(pygccxml_definition, (free_function_t, member_function_t, constructor_t)):
|
||||
try:
|
||||
annotations = type_annotations[str(pygccxml_definition)]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
for key,value in list(annotations.items()):
|
||||
if key == 'params':
|
||||
parameter_annotations.update (value)
|
||||
del annotations['params']
|
||||
global_annotations.update(annotations)
|
||||
|
||||
|
||||
# def post_scan_hook(dummy_module_parser, dummy_pygccxml_definition, pybindgen_wrapper):
|
||||
# ## classes
|
||||
# if isinstance(pybindgen_wrapper, CppClass):
|
||||
# if pybindgen_wrapper.name.endswith('Checker'):
|
||||
# print >> sys.stderr, "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!", pybindgen_wrapper
|
||||
# #pybindgen_wrapper.set_instance_creation_function(AttributeChecker_instance_creation_function)
|
||||
|
||||
|
||||
def scan_callback_classes(module_parser, callback_classes_file):
|
||||
callback_classes_file.write("callback_classes = [\n")
|
||||
for cls in module_parser.module_namespace.classes(function=module_parser.location_filter,
|
||||
recursive=False):
|
||||
if not cls.name.startswith("Callback<"):
|
||||
continue
|
||||
assert templates.is_instantiation(cls.decl_string), "%s is not a template instantiation" % cls
|
||||
dummy_cls_name, template_parameters = templates.split(cls.decl_string)
|
||||
callback_classes_file.write(" %r,\n" % template_parameters)
|
||||
callback_classes_file.write("]\n")
|
||||
|
||||
|
||||
def ns3_module_scan(top_builddir, module_name, headers_map, output_file_name, cflags):
|
||||
module_parser = ModuleParser('ns.%s' % module_name.replace('-', '_'), 'ns3')
|
||||
module_parser.add_pre_scan_hook(PreScanHook(headers_map, module_name))
|
||||
#module_parser.add_post_scan_hook(post_scan_hook)
|
||||
|
||||
castxml_options = dict(
|
||||
include_paths=[top_builddir, os.path.join(top_builddir, "include")],
|
||||
define_symbols={
|
||||
#'NS3_ASSERT_ENABLE': None,
|
||||
#'NS3_LOG_ENABLE': None,
|
||||
},
|
||||
cflags=('-std=c++17 %s' % cflags)
|
||||
)
|
||||
|
||||
try:
|
||||
os.unlink(output_file_name)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
os.makedirs(os.path.dirname(output_file_name))
|
||||
except OSError:
|
||||
pass
|
||||
output_file = open(output_file_name, "wt")
|
||||
output_sink = FileCodeSink(output_file)
|
||||
|
||||
# if there exists a scan-header.h file in src/<module>/bindings or contrib/<module>/bindings,
|
||||
# scan it, otherwise scan ns3/xxxx-module.h.
|
||||
scan_header = os.path.join(os.path.dirname(output_file_name), "scan-header.h")
|
||||
if not os.path.exists(scan_header):
|
||||
scan_header = os.path.join(top_builddir, "ns3", "%s-module.h" % module_name)
|
||||
if not os.path.exists(scan_header):
|
||||
scan_header = os.path.join(top_builddir, "include", "ns3", "%s-module.h" % module_name)
|
||||
|
||||
module_parser.parse_init([scan_header],
|
||||
None, whitelist_paths=[top_builddir],
|
||||
pygen_sink=output_sink,
|
||||
castxml_options=castxml_options)
|
||||
module_parser.scan_types()
|
||||
|
||||
callback_classes_file = open(os.path.join(os.path.dirname(output_file_name), "callbacks_list.py"), "wt")
|
||||
scan_callback_classes(module_parser, callback_classes_file)
|
||||
callback_classes_file.close()
|
||||
|
||||
|
||||
module_parser.scan_methods()
|
||||
module_parser.scan_functions()
|
||||
module_parser.parse_finalize()
|
||||
|
||||
output_file.close()
|
||||
os.chmod(output_file_name, 0o400)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) != 6:
|
||||
print("ns3modulescan-modular.py top_builddir module_path module_headers output_file_name cflags")
|
||||
sys.exit(1)
|
||||
if os.path.exists(sys.argv[3]):
|
||||
import json
|
||||
with open(sys.argv[3], "r") as f:
|
||||
module_headers = json.load(f)
|
||||
else:
|
||||
module_headers = eval(sys.argv[3])
|
||||
ns3_module_scan(sys.argv[1], sys.argv[2], module_headers, sys.argv[4], sys.argv[5])
|
||||
sys.exit(0)
|
||||
@@ -1,335 +0,0 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
|
||||
import pybindgen.settings
|
||||
from pybindgen.castxmlparser import ModuleParser, PygenClassifier, PygenSection, WrapperWarning
|
||||
from pybindgen.typehandlers.codesink import FileCodeSink
|
||||
from pygccxml.declarations import templates
|
||||
from pygccxml.declarations.class_declaration import class_t
|
||||
from pygccxml.declarations.free_calldef import free_function_t
|
||||
from pygccxml.declarations.calldef_members import constructor_t, member_function_t
|
||||
from pygccxml.declarations.calldef import calldef_t
|
||||
|
||||
|
||||
## we need the smart pointer type transformation to be active even
|
||||
## during gccxml scanning.
|
||||
import ns3modulegen_core_customizations
|
||||
|
||||
|
||||
## silence castxmlparser errors; we only want error handling in the
|
||||
## generated python script, not while scanning.
|
||||
class ErrorHandler(pybindgen.settings.ErrorHandler):
|
||||
def handle_error(self, dummy_wrapper, dummy_exception, dummy_traceback_):
|
||||
return True
|
||||
pybindgen.settings.error_handler = ErrorHandler()
|
||||
import warnings
|
||||
warnings.filterwarnings(category=WrapperWarning, action='ignore')
|
||||
|
||||
type_annotations = {
|
||||
'::ns3::AttributeChecker': {
|
||||
'automatic_type_narrowing': 'true',
|
||||
'allow_subclassing': 'false',
|
||||
},
|
||||
'::ns3::AttributeValue': {
|
||||
'automatic_type_narrowing': 'true',
|
||||
'allow_subclassing': 'false',
|
||||
},
|
||||
|
||||
'::ns3::CommandLine': {
|
||||
'allow_subclassing': 'true', # needed so that AddValue is able to set attributes on the object
|
||||
},
|
||||
|
||||
'::ns3::NscTcpL4Protocol': {
|
||||
'ignore': 'true', # this class is implementation detail
|
||||
},
|
||||
|
||||
|
||||
'ns3::RandomVariable::RandomVariable(ns3::RandomVariableBase const & variable) [constructor]': {
|
||||
'ignore': None,
|
||||
},
|
||||
'ns3::RandomVariableBase * ns3::RandomVariable::Peek() const [member function]': {
|
||||
'ignore': None,
|
||||
},
|
||||
'void ns3::RandomVariable::GetSeed(uint32_t * seed) const [member function]': {
|
||||
'params': {'seed':{'direction':'out',
|
||||
'array_length':'6'}}
|
||||
},
|
||||
'bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]': {
|
||||
'params': {'info':{'transfer_ownership': 'false'}}
|
||||
},
|
||||
'static bool ns3::TypeId::LookupByNameFailSafe(std::string name, ns3::TypeId * tid) [member function]': {
|
||||
'ignore': None, # manually wrapped in
|
||||
},
|
||||
'bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]': {
|
||||
'params': {'obj': {'transfer_ownership':'false'}}
|
||||
},
|
||||
'bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]': {
|
||||
'params': {'obj': {'transfer_ownership':'false'}}
|
||||
},
|
||||
'bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]': {
|
||||
'params': {'obj': {'transfer_ownership':'false'}}
|
||||
},
|
||||
'bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]': {
|
||||
'params': {'obj': {'transfer_ownership':'false'}}
|
||||
},
|
||||
'bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]': {
|
||||
'params': {'object': {'transfer_ownership':'false'}}
|
||||
},
|
||||
'ns3::EmpiricalVariable::EmpiricalVariable(ns3::RandomVariableBase const & variable) [constructor]': {
|
||||
'ignore': None
|
||||
},
|
||||
'static ns3::AttributeList * ns3::AttributeList::GetGlobal() [member function]': {
|
||||
'caller_owns_return': 'false'
|
||||
},
|
||||
'void ns3::CommandLine::Parse(int argc, char * * argv) const [member function]': {
|
||||
'ignore': None # manually wrapped
|
||||
},
|
||||
'extern void ns3::PythonCompleteConstruct(ns3::Ptr<ns3::Object> object, ns3::TypeId typeId, ns3::AttributeList const & attributes) [free function]': {
|
||||
'ignore': None # used transparently by, should not be wrapped
|
||||
},
|
||||
|
||||
'ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4ListRouting::GetRoutingProtocol(uint32_t index, int16_t & priority) const [member function]': {
|
||||
'params': {'priority':{'direction':'out'}}
|
||||
},
|
||||
'ns3::Ipv4RoutingTableEntry * ns3::GlobalRouter::GetInjectedRoute(uint32_t i) [member function]': {
|
||||
'params': {'return': { 'caller_owns_return': 'false',}},
|
||||
},
|
||||
'ns3::Ipv4RoutingTableEntry * ns3::Ipv4GlobalRouting::GetRoute(uint32_t i) const [member function]': {
|
||||
'params': {'return': { 'caller_owns_return': 'false',}},
|
||||
},
|
||||
|
||||
'::ns3::TestCase': {
|
||||
'ignore': 'true', # we don't need to write test cases in Python
|
||||
},
|
||||
'::ns3::TestRunner': {
|
||||
'ignore': 'true', # we don't need to write test cases in Python
|
||||
},
|
||||
'::ns3::TestSuite': {
|
||||
'ignore': 'true', # we don't need to write test cases in Python
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
def get_ns3_relative_path(path):
|
||||
l = []
|
||||
head = path
|
||||
while head:
|
||||
head, tail = os.path.split(head)
|
||||
if tail == 'ns3':
|
||||
return os.path.join(*l)
|
||||
l.insert(0, tail)
|
||||
raise AssertionError("is the path %r inside ns3?!" % path)
|
||||
|
||||
|
||||
def pre_scan_hook(dummy_module_parser,
|
||||
pygccxml_definition,
|
||||
global_annotations,
|
||||
parameter_annotations):
|
||||
ns3_header = get_ns3_relative_path(pygccxml_definition.location.file_name)
|
||||
|
||||
## Note: we don't include line numbers in the comments because
|
||||
## those numbers are very likely to change frequently, which would
|
||||
## cause needless changes, since the generated python files are
|
||||
## kept under version control.
|
||||
|
||||
#global_annotations['pygen_comment'] = "%s:%i: %s" % \
|
||||
# (ns3_header, pygccxml_definition.location.line, pygccxml_definition)
|
||||
global_annotations['pygen_comment'] = "%s: %s" % \
|
||||
(ns3_header, pygccxml_definition)
|
||||
|
||||
|
||||
## handle ns3::Object::GetObject (left to its own devices,
|
||||
## pybindgen will generate a mangled name containing the template
|
||||
## argument type name).
|
||||
if isinstance(pygccxml_definition, member_function_t) \
|
||||
and pygccxml_definition.parent.name == 'Object' \
|
||||
and pygccxml_definition.name == 'GetObject':
|
||||
template_args = templates.args(pygccxml_definition.demangled_name)
|
||||
if template_args == ['ns3::Object']:
|
||||
global_annotations['template_instance_names'] = 'ns3::Object=>GetObject'
|
||||
|
||||
## Don't wrap Simulator::Schedule* (manually wrapped)
|
||||
if isinstance(pygccxml_definition, member_function_t) \
|
||||
and pygccxml_definition.parent.name == 'Simulator' \
|
||||
and pygccxml_definition.name.startswith('Schedule'):
|
||||
global_annotations['ignore'] = None
|
||||
|
||||
# manually wrapped
|
||||
if isinstance(pygccxml_definition, member_function_t) \
|
||||
and pygccxml_definition.parent.name == 'Simulator' \
|
||||
and pygccxml_definition.name == 'Run':
|
||||
global_annotations['ignore'] = True
|
||||
|
||||
## http://www.gccxml.org/Bug/view.php?id=9915
|
||||
if isinstance(pygccxml_definition, calldef_t):
|
||||
for arg in pygccxml_definition.arguments:
|
||||
if arg.default_value is None:
|
||||
continue
|
||||
if "ns3::MilliSeconds( )" == arg.default_value:
|
||||
arg.default_value = "ns3::MilliSeconds(0)"
|
||||
if "ns3::Seconds( )" == arg.default_value:
|
||||
arg.default_value = "ns3::Seconds(0)"
|
||||
|
||||
## classes
|
||||
if isinstance(pygccxml_definition, class_t):
|
||||
# no need for helper classes to allow subclassing in Python, I think...
|
||||
#if pygccxml_definition.name.endswith('Helper'):
|
||||
# global_annotations['allow_subclassing'] = 'false'
|
||||
|
||||
if pygccxml_definition.decl_string.startswith('::ns3::SimpleRefCount<'):
|
||||
global_annotations['incref_method'] = 'Ref'
|
||||
global_annotations['decref_method'] = 'Unref'
|
||||
global_annotations['peekref_method'] = 'GetReferenceCount'
|
||||
global_annotations['automatic_type_narrowing'] = 'true'
|
||||
return
|
||||
|
||||
if pygccxml_definition.decl_string.startswith('::ns3::Callback<'):
|
||||
# manually handled in ns3modulegen_core_customizations.py
|
||||
global_annotations['ignore'] = None
|
||||
return
|
||||
|
||||
if pygccxml_definition.decl_string.startswith('::ns3::TracedCallback<'):
|
||||
global_annotations['ignore'] = None
|
||||
return
|
||||
|
||||
if pygccxml_definition.decl_string.startswith('::ns3::Ptr<'):
|
||||
# handled by pybindgen "type transformation"
|
||||
global_annotations['ignore'] = None
|
||||
return
|
||||
|
||||
# table driven class customization
|
||||
try:
|
||||
annotations = type_annotations[pygccxml_definition.decl_string]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
global_annotations.update(annotations)
|
||||
|
||||
## free functions
|
||||
if isinstance(pygccxml_definition, free_function_t):
|
||||
if pygccxml_definition.name == 'PeekPointer':
|
||||
global_annotations['ignore'] = None
|
||||
return
|
||||
|
||||
## table driven methods/constructors/functions customization
|
||||
if isinstance(pygccxml_definition, (free_function_t, member_function_t, constructor_t)):
|
||||
try:
|
||||
annotations = type_annotations[str(pygccxml_definition)]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
for key,value in annotations.items():
|
||||
if key == 'params':
|
||||
parameter_annotations.update (value)
|
||||
del annotations['params']
|
||||
global_annotations.update(annotations)
|
||||
|
||||
|
||||
# def post_scan_hook(dummy_module_parser, dummy_pygccxml_definition, pybindgen_wrapper):
|
||||
# ## classes
|
||||
# if isinstance(pybindgen_wrapper, CppClass):
|
||||
# if pybindgen_wrapper.name.endswith('Checker'):
|
||||
# print >> sys.stderr, "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!", pybindgen_wrapper
|
||||
# #pybindgen_wrapper.set_instance_creation_function(AttributeChecker_instance_creation_function)
|
||||
|
||||
|
||||
def scan_callback_classes(module_parser, callback_classes_file):
|
||||
callback_classes_file.write("callback_classes = [\n")
|
||||
for cls in module_parser.module_namespace.classes(function=module_parser.location_filter,
|
||||
recursive=False):
|
||||
if not cls.name.startswith("Callback<"):
|
||||
continue
|
||||
assert templates.is_instantiation(cls.decl_string), "%s is not a template instantiation" % cls
|
||||
dummy_cls_name, template_parameters = templates.split(cls.decl_string)
|
||||
callback_classes_file.write(" %r,\n" % template_parameters)
|
||||
callback_classes_file.write("]\n")
|
||||
|
||||
|
||||
class MyPygenClassifier(PygenClassifier):
|
||||
def __init__(self, headers_map, section_precendences):
|
||||
self.headers_map = headers_map
|
||||
self.section_precendences = section_precendences
|
||||
|
||||
def classify(self, pygccxml_definition):
|
||||
name = os.path.basename(pygccxml_definition.location.file_name)
|
||||
try:
|
||||
return self.headers_map[name]
|
||||
except KeyError:
|
||||
return '__main__'
|
||||
|
||||
def get_section_precedence(self, section_name):
|
||||
if section_name == '__main__':
|
||||
return -1
|
||||
return self.section_precendences[section_name]
|
||||
|
||||
|
||||
def ns3_module_scan(top_builddir, pygen_file_name, everything_h, cflags):
|
||||
|
||||
ns3_modules = eval(sys.stdin.readline())
|
||||
|
||||
## do a topological sort on the modules graph
|
||||
from topsort import topsort
|
||||
graph = []
|
||||
module_names = ns3_modules.keys()
|
||||
module_names.sort()
|
||||
for ns3_module_name in module_names:
|
||||
ns3_module_deps = list(ns3_modules[ns3_module_name][0])
|
||||
ns3_module_deps.sort()
|
||||
for dep in ns3_module_deps:
|
||||
graph.append((dep, ns3_module_name))
|
||||
sorted_ns3_modules = topsort(graph)
|
||||
#print >> sys.stderr, "******* topological sort: ", sorted_ns3_modules
|
||||
|
||||
sections = [PygenSection('__main__', FileCodeSink(open(pygen_file_name, "wt")))]
|
||||
headers_map = {} # header_name -> section_name
|
||||
section_precendences = {} # section_name -> precedence
|
||||
for prec, ns3_module in enumerate(sorted_ns3_modules):
|
||||
section_name = "ns3_module_%s" % ns3_module.replace('-', '_')
|
||||
file_name = os.path.join(os.path.dirname(pygen_file_name), "%s.py" % section_name)
|
||||
sections.append(PygenSection(section_name, FileCodeSink(open(file_name, "wt")),
|
||||
section_name + "__local"))
|
||||
for header in ns3_modules[ns3_module][1]:
|
||||
headers_map[header] = section_name
|
||||
section_precendences[section_name] = prec
|
||||
|
||||
module_parser = ModuleParser('ns3', 'ns3')
|
||||
|
||||
module_parser.add_pre_scan_hook(pre_scan_hook)
|
||||
#module_parser.add_post_scan_hook(post_scan_hook)
|
||||
|
||||
gccxml_options = dict(
|
||||
include_paths=[top_builddir],
|
||||
define_symbols={
|
||||
#'NS3_ASSERT_ENABLE': None,
|
||||
#'NS3_LOG_ENABLE': None,
|
||||
},
|
||||
cflags=('--gccxml-cxxflags "%s -DPYTHON_SCAN"' % cflags)
|
||||
)
|
||||
|
||||
module_parser.parse_init([everything_h],
|
||||
None, whitelist_paths=[top_builddir, os.path.dirname(everything_h)],
|
||||
#includes=['"ns3/everything.h"'],
|
||||
pygen_sink=sections,
|
||||
pygen_classifier=MyPygenClassifier(headers_map, section_precendences),
|
||||
gccxml_options=gccxml_options)
|
||||
module_parser.scan_types()
|
||||
|
||||
callback_classes_file = open(os.path.join(os.path.dirname(pygen_file_name), "callbacks_list.py"), "wt")
|
||||
scan_callback_classes(module_parser, callback_classes_file)
|
||||
callback_classes_file.close()
|
||||
|
||||
|
||||
module_parser.scan_methods()
|
||||
module_parser.scan_functions()
|
||||
module_parser.parse_finalize()
|
||||
|
||||
for section in sections:
|
||||
section.code_sink.file.close()
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
ns3_module_scan(sys.argv[1], sys.argv[3], sys.argv[2], sys.argv[4])
|
||||
|
||||
@@ -1 +1,218 @@
|
||||
import builtins
|
||||
import os.path
|
||||
import sys
|
||||
import re
|
||||
|
||||
def find_ns3_directory():
|
||||
# Get the absolute path to this file
|
||||
path_to_this_init_file = os.path.dirname(os.path.abspath(__file__))
|
||||
path_to_lock = path_to_this_init_file
|
||||
lock_file = (".lock-ns3_%s_build" % sys.platform)
|
||||
|
||||
# Go back and scan each folder until the c4che folder is found
|
||||
found = False
|
||||
while not found:
|
||||
for f in os.listdir(path_to_lock):
|
||||
# Skip files
|
||||
if not os.path.isfile(f):
|
||||
continue
|
||||
# Search for lock
|
||||
if lock_file in f:
|
||||
path_to_lock += os.sep + f
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
break
|
||||
|
||||
# Move to the directory above if we haven't found it yet
|
||||
old_path = path_to_lock
|
||||
path_to_lock = os.path.dirname(path_to_lock)
|
||||
|
||||
# We give up if we can't move to the directory above
|
||||
if path_to_lock == old_path:
|
||||
raise Exception("ns-3 lock file firectory was not found.\n"
|
||||
"Are you sure %s is inside your ns-3 directory?" % path_to_this_init_file)
|
||||
return path_to_lock
|
||||
|
||||
|
||||
def load_modules(ns3_directory):
|
||||
# Load NS3_ENABLED_MODULES from _cache.py file inside the build directory
|
||||
values = {}
|
||||
|
||||
exec(open(find_ns3_directory()).read(), {}, values)
|
||||
suffix = "-" + values["BUILD_PROFILE"] if values["BUILD_PROFILE"] != "release" else ""
|
||||
required_modules = [module.replace("ns3-", "") for module in values["NS3_ENABLED_MODULES"]]
|
||||
ns3_output_directory = values["out_dir"]
|
||||
libraries = {x.split(".")[0]: x for x in os.listdir(os.path.join(ns3_output_directory, "lib"))}
|
||||
|
||||
import cppyy
|
||||
|
||||
# Enable full logs for debugging
|
||||
# cppyy.set_debug(True)
|
||||
|
||||
# Register Ptr<> as a smart pointer
|
||||
import libcppyy
|
||||
libcppyy.AddSmartPtrType('Ptr')
|
||||
|
||||
# Import ns-3 libraries
|
||||
cppyy.add_library_path("%s/lib" % ns3_output_directory)
|
||||
cppyy.add_include_path("%s/include" % ns3_output_directory)
|
||||
|
||||
for module in required_modules:
|
||||
cppyy.include("ns3/%s-module.h" % module)
|
||||
|
||||
for module in required_modules:
|
||||
library_name = "libns{version}-{module}{suffix}".format(
|
||||
version=values["VERSION"],
|
||||
module=module,
|
||||
suffix=suffix
|
||||
)
|
||||
if library_name not in libraries:
|
||||
raise Exception("Missing library %s\n" % library_name,
|
||||
"Build all modules with './ns3 build'"
|
||||
)
|
||||
cppyy.load_library(libraries[library_name])
|
||||
|
||||
# We expose cppyy to consumers of this module as ns.cppyy
|
||||
setattr(cppyy.gbl.ns3, "cppyy", cppyy)
|
||||
|
||||
# To maintain compatibility with pybindgen scripts,
|
||||
# we set an attribute per module that just redirects to the upper object
|
||||
for module in required_modules:
|
||||
setattr(cppyy.gbl.ns3, module.replace("-", "_"), cppyy.gbl.ns3)
|
||||
|
||||
# Setup a few tricks
|
||||
cppyy.cppdef("""
|
||||
using namespace ns3;
|
||||
bool Time_ge(Time& a, Time& b){ return a >= b;}
|
||||
bool Time_eq(Time& a, Time& b){ return a == b;}
|
||||
bool Time_ne(Time& a, Time& b){ return a != b;}
|
||||
bool Time_le(Time& a, Time& b){ return a <= b;}
|
||||
bool Time_gt(Time& a, Time& b){ return a > b;}
|
||||
bool Time_lt(Time& a, Time& b){ return a < b;}
|
||||
""")
|
||||
cppyy.gbl.ns3.Time.__ge__ = cppyy.gbl.Time_ge
|
||||
cppyy.gbl.ns3.Time.__eq__ = cppyy.gbl.Time_eq
|
||||
cppyy.gbl.ns3.Time.__ne__ = cppyy.gbl.Time_ne
|
||||
cppyy.gbl.ns3.Time.__le__ = cppyy.gbl.Time_le
|
||||
cppyy.gbl.ns3.Time.__gt__ = cppyy.gbl.Time_gt
|
||||
cppyy.gbl.ns3.Time.__lt__ = cppyy.gbl.Time_lt
|
||||
|
||||
# Node::~Node isn't supposed to destroy the object,
|
||||
# since it gets destroyed at the end of the simulation
|
||||
# we need to hold the reference until it gets destroyed by C++
|
||||
#
|
||||
# Search for NodeList::Add (this)
|
||||
cppyy.gbl.ns3.__nodes_pending_deletion = []
|
||||
def Nodedel(self: cppyy.gbl.ns3.Node) -> None:
|
||||
cppyy.gbl.ns3.__nodes_pending_deletion.append(self)
|
||||
return None
|
||||
cppyy.gbl.ns3.Node.__del__ = Nodedel
|
||||
|
||||
# Define ns.cppyy.gbl.addressFromIpv4Address and others
|
||||
cppyy.cppdef("""using namespace ns3;
|
||||
Address addressFromIpv4Address(Ipv4Address ip){ return Address(ip); };
|
||||
Address addressFromInetSocketAddress(InetSocketAddress addr){ return Address(addr); };
|
||||
Address addressFromPacketSocketAddress(PacketSocketAddress addr){ return Address(addr); };
|
||||
""")
|
||||
# Expose addressFromIpv4Address as a member of the ns3 namespace (equivalent to ns)
|
||||
setattr(cppyy.gbl.ns3, "addressFromIpv4Address", cppyy.gbl.addressFromIpv4Address)
|
||||
setattr(cppyy.gbl.ns3, "addressFromInetSocketAddress", cppyy.gbl.addressFromInetSocketAddress)
|
||||
setattr(cppyy.gbl.ns3, "addressFromPacketSocketAddress", cppyy.gbl.addressFromPacketSocketAddress)
|
||||
cppyy.cppdef(
|
||||
"""using namespace ns3; CommandLine& getCommandLine(std::string filename){ static CommandLine g_cmd = CommandLine(filename); return g_cmd; };""")
|
||||
setattr(cppyy.gbl.ns3, "getCommandLine", cppyy.gbl.getCommandLine)
|
||||
cppyy.cppdef(
|
||||
"""using namespace ns3; template Callback<bool, std::string> ns3::MakeNullCallback<bool, std::string>(void);""")
|
||||
cppyy.cppdef(
|
||||
"""using namespace ns3; Callback<bool, std::string> null_callback(){ return MakeNullCallback<bool, std::string>(); };""")
|
||||
setattr(cppyy.gbl.ns3, "null_callback", cppyy.gbl.null_callback)
|
||||
#cppyy.cppdef(
|
||||
# """using namespace ns3; template <typename T> Ptr<T> getAggregatedObject(Ptr<Object> parentPtr, T param))
|
||||
# {
|
||||
# return parentPtr->GetObject<T>();
|
||||
# }
|
||||
# """
|
||||
#)
|
||||
cppyy.cppdef("""
|
||||
using namespace ns3;
|
||||
std::tuple<bool, TypeId> LookupByNameFailSafe(std::string name)
|
||||
{
|
||||
TypeId id;
|
||||
bool ok = TypeId::LookupByNameFailSafe(name, &id);
|
||||
return std::make_tuple(ok, id);
|
||||
}
|
||||
""")
|
||||
setattr(cppyy.gbl.ns3, "LookupByNameFailSafe", cppyy.gbl.LookupByNameFailSafe)
|
||||
def CreateObject(className):
|
||||
try:
|
||||
try:
|
||||
func = "CreateObject%s" % re.sub('[<|>]', '_', className)
|
||||
return getattr(cppyy.gbl, func)()
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
func = "Create%s" % re.sub('[<|>]', '_', className)
|
||||
return getattr(cppyy.gbl, func)()
|
||||
except AttributeError:
|
||||
pass
|
||||
raise AttributeError
|
||||
except AttributeError:
|
||||
try:
|
||||
func = "CreateObject%s" % re.sub('[<|>]', '_', className)
|
||||
cppyy.cppdef("""
|
||||
using namespace ns3;
|
||||
Ptr<%s> %s(){
|
||||
Ptr<%s> object = CreateObject<%s>();
|
||||
return object;
|
||||
}
|
||||
""" % (className, func, className, className)
|
||||
)
|
||||
except Exception as e:
|
||||
try:
|
||||
func = "Create%s" % re.sub('[<|>]', '_', className)
|
||||
cppyy.cppdef("""
|
||||
using namespace ns3;
|
||||
%s %s(){
|
||||
%s object = %s();
|
||||
return object;
|
||||
}
|
||||
""" % (className, func, className, className)
|
||||
)
|
||||
except Exception as e:
|
||||
exit(-1)
|
||||
return getattr(cppyy.gbl, func)()
|
||||
setattr(cppyy.gbl.ns3, "CreateObject", CreateObject)
|
||||
|
||||
def GetObject(parentObject, aggregatedObject):
|
||||
# Objects have __cpp_name__ attributes, so parentObject
|
||||
# should not have it while aggregatedObject can
|
||||
if hasattr(parentObject, "__cpp_name__"):
|
||||
raise Exception("Class was passed instead of an instance in parentObject")
|
||||
|
||||
aggregatedIsClass = hasattr(aggregatedObject, "__cpp_name__")
|
||||
aggregatedIsString = type(aggregatedObject) == str
|
||||
aggregatedIsInstance = not aggregatedIsClass and not aggregatedIsString
|
||||
|
||||
if aggregatedIsClass:
|
||||
aggregatedType = aggregatedObject.__cpp_name__
|
||||
if aggregatedIsInstance:
|
||||
aggregatedType = aggregatedObject.__class__.__cpp_name__
|
||||
if aggregatedIsString:
|
||||
aggregatedType = aggregatedObject
|
||||
|
||||
cppyy.cppdef(
|
||||
"""using namespace ns3; template <> Ptr<%s> getAggregatedObject<%s>(Ptr<Object> parentPtr, %s param)
|
||||
{
|
||||
return parentPtr->GetObject<%s>();
|
||||
}
|
||||
""" % (aggregatedType, aggregatedType, aggregatedType, aggregatedType)
|
||||
)
|
||||
return cppyy.gbl.getAggregatedObject(parentObject, aggregatedObject if aggregatedIsClass else aggregatedObject.__class__)
|
||||
setattr(cppyy.gbl.ns3, "GetObject", GetObject)
|
||||
return cppyy.gbl.ns3
|
||||
|
||||
|
||||
# Load all modules and make them available via a built-in
|
||||
ns = load_modules(find_ns3_directory()) # can be imported via 'from ns import ns'
|
||||
builtins.__dict__['ns'] = ns # or be made widely available with 'from ns import *'
|
||||
|
||||
@@ -1,909 +0,0 @@
|
||||
# Copyright (c) 2007 RADLogic
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
"""Provide various handy Python functions.
|
||||
|
||||
Running this script directly will execute the doctests.
|
||||
|
||||
Functions:
|
||||
int2bin(i, n) -- Convert integer to binary string.
|
||||
bin2int(bin_string) -- Convert binary string to integer.
|
||||
reverse(input_string) -- Reverse a string.
|
||||
transpose(matrix) -- Transpose a list of lists.
|
||||
polygon_area(points_list) -- Calculate the area of an arbitrary polygon.
|
||||
timestamp() -- Return string containing current time stamp.
|
||||
pt2str(point) -- Return prettier string version of point tuple.
|
||||
gcf(a, b) -- Return the greatest common factor of two numbers.
|
||||
lcm(a, b) -- Return the least common multiple of two numbers.
|
||||
permutations(input_list) -- Generate all permutations of a list of items.
|
||||
reduce_fraction(fraction) -- Reduce fraction (num, denom) to simplest form.
|
||||
quantile(l, p) -- Return p quantile of list l. E.g. p=0.25 for q1.
|
||||
trim(l) -- Discard values in list more than 1.5*IQR outside IQR.
|
||||
nice_units(value) -- Return value converted to human readable units.
|
||||
uniquify(seq) -- Return sequence with duplicate items in sequence seq removed.
|
||||
reverse_dict(d) -- Return the dictionary with the items as keys and vice-versa.
|
||||
lsb(x, n) -- Return the n least significant bits of x.
|
||||
gray_encode(i) -- Gray encode the given integer.
|
||||
random_vec(bits, max_value=None) -- Return a random binary vector.
|
||||
binary_range(bits) -- Return list of all possible binary numbers width=bits.
|
||||
float_range([start], stop, [step]) -- Return range of floats.
|
||||
find_common_fixes(s1, s2) -- Find common (prefix, suffix) of two strings.
|
||||
is_rotated(seq1, seq2) -- Return true if the list is a rotation of other list.
|
||||
getmodule(obj) -- Return the module that contains the object definition of obj.
|
||||
(use inspect.getmodule instead, though)
|
||||
get_args(argv) -- Store command-line args in a dictionary.
|
||||
|
||||
This module requires Python >= 2.2
|
||||
|
||||
"""
|
||||
__author__ = 'Tim Wegener <twegener@radlogic.com.au>'
|
||||
__date__ = '$Date: 2007/03/27 03:15:06 $'
|
||||
__version__ = '$Revision: 0.45 $'
|
||||
__credits__ = """
|
||||
David Chandler, for polygon area algorithm.
|
||||
(https://web.archive.org/web/20091104151838/http://www.davidchandler.com/AreaOfAGeneralPolygon.pdf)
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import random
|
||||
|
||||
try:
|
||||
True, False
|
||||
except NameError:
|
||||
True, False = (1==1, 0==1)
|
||||
|
||||
|
||||
def int2bin(i, n):
|
||||
"""Convert decimal integer i to n-bit binary number (string).
|
||||
|
||||
>>> int2bin(0, 8)
|
||||
'00000000'
|
||||
|
||||
>>> int2bin(123, 8)
|
||||
'01111011'
|
||||
|
||||
>>> int2bin(123L, 8)
|
||||
'01111011'
|
||||
|
||||
>>> int2bin(15, 2)
|
||||
Traceback (most recent call last):
|
||||
ValueError: Value too large for given number of bits.
|
||||
|
||||
"""
|
||||
hex2bin = {'0': '0000', '1': '0001', '2': '0010', '3': '0011',
|
||||
'4': '0100', '5': '0101', '6': '0110', '7': '0111',
|
||||
'8': '1000', '9': '1001', 'a': '1010', 'b': '1011',
|
||||
'c': '1100', 'd': '1101', 'e': '1110', 'f': '1111'}
|
||||
# Convert to hex then map each hex digit to binary equivalent.
|
||||
result = ''.join([hex2bin[x] for x in hex(i).lower().replace('l','')[2:]])
|
||||
|
||||
# Shrink result to appropriate length.
|
||||
# Raise an error if the value is changed by the truncation.
|
||||
if '1' in result[:-n]:
|
||||
raise ValueError("Value too large for given number of bits.")
|
||||
result = result[-n:]
|
||||
# Zero-pad if length longer than mapped result.
|
||||
result = '0'*(n-len(result)) + result
|
||||
return result
|
||||
|
||||
|
||||
def bin2int(bin_string):
|
||||
"""Convert binary number string to decimal integer.
|
||||
|
||||
Note: Python > v2 has int(bin_string, 2)
|
||||
|
||||
>>> bin2int('1111')
|
||||
15
|
||||
|
||||
>>> bin2int('0101')
|
||||
5
|
||||
|
||||
"""
|
||||
## result = 0
|
||||
## bin_list = list(bin_string)
|
||||
## if len(filter(lambda x: x in ('1','0'), bin_list)) < len(bin_list):
|
||||
## raise Exception ("bin2int: Error - not a binary number: %s"
|
||||
## % bin_string)
|
||||
## bit_list = map(int, bin_list)
|
||||
## bit_list.reverse() # Make most significant bit have highest index.
|
||||
## for bit_place in range(len(bit_list)):
|
||||
## result = result + ((2**bit_place) * bit_list[bit_place])
|
||||
## return result
|
||||
return int(bin_string, 2)
|
||||
|
||||
|
||||
def reverse(input_string):
|
||||
"""Reverse a string. Useful for strings of binary numbers.
|
||||
|
||||
>>> reverse('abc')
|
||||
'cba'
|
||||
|
||||
"""
|
||||
str_list = list(input_string)
|
||||
str_list.reverse()
|
||||
return ''.join(str_list)
|
||||
|
||||
|
||||
def transpose(matrix):
|
||||
"""Transpose a list of lists.
|
||||
|
||||
>>> transpose([['a', 'b', 'c'], ['d', 'e', 'f'], ['g', 'h', 'i']])
|
||||
[['a', 'd', 'g'], ['b', 'e', 'h'], ['c', 'f', 'i']]
|
||||
|
||||
>>> transpose([['a', 'b', 'c'], ['d', 'e', 'f']])
|
||||
[['a', 'd'], ['b', 'e'], ['c', 'f']]
|
||||
|
||||
>>> transpose([['a', 'b'], ['d', 'e'], ['g', 'h']])
|
||||
[['a', 'd', 'g'], ['b', 'e', 'h']]
|
||||
|
||||
"""
|
||||
result = zip(*matrix)
|
||||
# Convert list of tuples to list of lists.
|
||||
# map is faster than a list comprehension since it is being used with
|
||||
# a built-in function as an argument.
|
||||
result = map(list, result)
|
||||
return result
|
||||
|
||||
|
||||
def polygon_area(points_list, precision=100):
|
||||
"""Calculate area of an arbitrary polygon using an algorithm from the web.
|
||||
|
||||
Return the area of the polygon as a positive float.
|
||||
|
||||
Arguments:
|
||||
points_list -- list of point tuples [(x0, y0), (x1, y1), (x2, y2), ...]
|
||||
(Unclosed polygons will be closed automatically.
|
||||
precision -- Internal arithmetic precision (integer arithmetic).
|
||||
|
||||
>>> polygon_area([(0, 0), (0, 1), (1, 1), (1, 2), (2, 2), (2, 0), (0, 0)])
|
||||
3.0
|
||||
|
||||
Credits:
|
||||
Area of a General Polygon by David Chandler
|
||||
https://web.archive.org/web/20091104151838/http://www.davidchandler.com/AreaOfAGeneralPolygon.pdf
|
||||
|
||||
"""
|
||||
# Scale up co-ordinates and convert them to integers.
|
||||
for i in range(len(points_list)):
|
||||
points_list[i] = (int(points_list[i][0] * precision),
|
||||
int(points_list[i][1] * precision))
|
||||
# Close polygon if not closed.
|
||||
if points_list[-1] != points_list[0]:
|
||||
points_list.append(points_list[0])
|
||||
# Calculate area.
|
||||
area = 0
|
||||
for i in range(len(points_list)-1):
|
||||
(x_i, y_i) = points_list[i]
|
||||
(x_i_plus_1, y_i_plus_1) = points_list[i+1]
|
||||
area = area + (x_i_plus_1 * y_i) - (y_i_plus_1 * x_i)
|
||||
area = abs(area / 2)
|
||||
# Unscale area.
|
||||
area = float(area)/(precision**2)
|
||||
return area
|
||||
|
||||
|
||||
def timestamp():
|
||||
"""Return string containing current time stamp.
|
||||
|
||||
Note: In Python 2 onwards can use time.asctime() with no arguments.
|
||||
|
||||
"""
|
||||
|
||||
return time.asctime()
|
||||
|
||||
|
||||
def pt2str(point):
|
||||
"""Return prettier string version of point tuple.
|
||||
|
||||
>>> pt2str((1.8, 1.9))
|
||||
'(1.8, 1.9)'
|
||||
|
||||
"""
|
||||
return "(%s, %s)" % (str(point[0]), str(point[1]))
|
||||
|
||||
|
||||
def gcf(a, b, epsilon=1e-16):
|
||||
"""Return the greatest common factor of a and b, using Euclidean algorithm.
|
||||
|
||||
Arguments:
|
||||
a, b -- two numbers
|
||||
If both numbers are integers return an integer result,
|
||||
otherwise return a float result.
|
||||
epsilon -- floats less than this magnitude are considered to be zero
|
||||
(default: 1e-16)
|
||||
|
||||
Examples:
|
||||
|
||||
>>> gcf(12, 34)
|
||||
2
|
||||
|
||||
>>> gcf(13.5, 4)
|
||||
0.5
|
||||
|
||||
>>> gcf(-2, 4)
|
||||
2
|
||||
|
||||
>>> gcf(5, 0)
|
||||
5
|
||||
|
||||
By (a convenient) definition:
|
||||
>>> gcf(0, 0)
|
||||
0
|
||||
|
||||
"""
|
||||
result = max(a, b)
|
||||
remainder = min(a, b)
|
||||
while remainder and abs(remainder) > epsilon:
|
||||
new_remainder = result % remainder
|
||||
result = remainder
|
||||
remainder = new_remainder
|
||||
return abs(result)
|
||||
|
||||
def lcm(a, b, precision=None):
|
||||
"""Return the least common multiple of a and b, using the gcf function.
|
||||
|
||||
Arguments:
|
||||
a, b -- two numbers. If both are integers return an integer result,
|
||||
otherwise a return a float result.
|
||||
precision -- scaling factor if a and/or b are floats.
|
||||
|
||||
>>> lcm(21, 6)
|
||||
42
|
||||
|
||||
>>> lcm(2.5, 3.5)
|
||||
17.5
|
||||
|
||||
>>> str(lcm(1.5e-8, 2.5e-8, precision=1e9))
|
||||
'7.5e-08'
|
||||
|
||||
By (an arbitrary) definition:
|
||||
>>> lcm(0, 0)
|
||||
0
|
||||
|
||||
"""
|
||||
# Note: Dummy precision argument is for backwards compatibility.
|
||||
# Do the division first.
|
||||
# (See https://en.wikipedia.org/wiki/Least_common_multiple )
|
||||
denom = gcf(a, b)
|
||||
if denom == 0:
|
||||
result = 0
|
||||
else:
|
||||
result = a * (b / denom)
|
||||
return result
|
||||
|
||||
|
||||
def permutations(input_list):
|
||||
"""Return a list containing all permutations of the input list.
|
||||
|
||||
Note: This is a recursive function.
|
||||
|
||||
>>> perms = permutations(['a', 'b', 'c'])
|
||||
>>> perms.sort()
|
||||
>>> for perm in perms:
|
||||
... print perm
|
||||
['a', 'b', 'c']
|
||||
['a', 'c', 'b']
|
||||
['b', 'a', 'c']
|
||||
['b', 'c', 'a']
|
||||
['c', 'a', 'b']
|
||||
['c', 'b', 'a']
|
||||
|
||||
"""
|
||||
out_lists = []
|
||||
if len(input_list) > 1:
|
||||
# Extract first item in list.
|
||||
item = input_list[0]
|
||||
# Find all permutations of remainder of list. (Recursive call.)
|
||||
sub_lists = permutations(input_list[1:])
|
||||
# For every permutation of the sub list...
|
||||
for sub_list in sub_lists:
|
||||
# Insert the extracted first item at every position of the list.
|
||||
for i in range(len(input_list)):
|
||||
new_list = sub_list[:]
|
||||
new_list.insert(i, item)
|
||||
out_lists.append(new_list)
|
||||
else:
|
||||
# Termination condition: only one item in input list.
|
||||
out_lists = [input_list]
|
||||
return out_lists
|
||||
|
||||
|
||||
def reduce_fraction(fraction):
|
||||
"""Reduce fraction tuple to simplest form. fraction=(num, denom)
|
||||
|
||||
>>> reduce_fraction((14, 7))
|
||||
(2, 1)
|
||||
|
||||
>>> reduce_fraction((-2, 4))
|
||||
(-1, 2)
|
||||
|
||||
>>> reduce_fraction((0, 4))
|
||||
(0, 1)
|
||||
|
||||
>>> reduce_fraction((4, 0))
|
||||
(1, 0)
|
||||
|
||||
"""
|
||||
(numerator, denominator) = fraction
|
||||
common_factor = abs(gcf(numerator, denominator))
|
||||
result = (numerator/common_factor, denominator/common_factor)
|
||||
return result
|
||||
|
||||
|
||||
def quantile(l, p):
|
||||
"""Return p quantile of list l. E.g. p=0.25 for q1.
|
||||
|
||||
See:
|
||||
https://www.rdocumentation.org/packages/stats/versions/3.6.2/topics/quantile
|
||||
|
||||
"""
|
||||
l_sort = l[:]
|
||||
l_sort.sort()
|
||||
n = len(l)
|
||||
r = 1 + ((n - 1) * p)
|
||||
i = int(r)
|
||||
f = r - i
|
||||
if i < n:
|
||||
result = (1-f)*l_sort[i-1] + f*l_sort[i]
|
||||
else:
|
||||
result = l_sort[i-1]
|
||||
return result
|
||||
|
||||
|
||||
def trim(l):
|
||||
"""Discard values in list more than 1.5*IQR outside IQR.
|
||||
|
||||
(IQR is inter-quartile-range)
|
||||
|
||||
This function uses rad_util.quantile
|
||||
|
||||
1.5*IQR -- mild outlier
|
||||
3*IQR -- extreme outlier
|
||||
|
||||
See:
|
||||
http://wind.cc.whecn.edu/~pwildman/statnew/section_7_-_exploratory_data_analysis.htm
|
||||
|
||||
"""
|
||||
l_sort = l[:]
|
||||
l_sort.sort()
|
||||
# Calculate medianscore (based on stats.py lmedianscore by Gary Strangman)
|
||||
if len(l_sort) % 2 == 0:
|
||||
# If even number of scores, average middle 2.
|
||||
index = int(len(l_sort) / 2) # Integer division correct
|
||||
median = float(l_sort[index] + l_sort[index-1]) / 2
|
||||
else:
|
||||
# int division gives mid value when count from 0
|
||||
index = int(len(l_sort) / 2)
|
||||
median = l_sort[index]
|
||||
# Calculate IQR.
|
||||
q1 = quantile(l_sort, 0.25)
|
||||
q3 = quantile(l_sort, 0.75)
|
||||
iqr = q3 - q1
|
||||
iqr_extra = iqr * 1.5
|
||||
def in_interval(x, i=iqr_extra, q1=q1, q3=q3):
|
||||
return (x >= q1-i and x <= q3+i)
|
||||
l_trimmed = [x for x in l_sort if in_interval(x)]
|
||||
return l_trimmed
|
||||
|
||||
|
||||
def nice_units(value, dp=0, sigfigs=None, suffix='', space=' ',
|
||||
use_extra_prefixes=False, use_full_name=False, mode='si'):
|
||||
"""Return value converted to human readable units eg milli, micro, etc.
|
||||
|
||||
Arguments:
|
||||
value -- number in base units
|
||||
dp -- number of decimal places to display (rounded)
|
||||
sigfigs -- number of significant figures to display (rounded)
|
||||
This overrides dp if set.
|
||||
suffix -- optional unit suffix to append to unit multiplier
|
||||
space -- separator between value and unit multiplier (default: ' ')
|
||||
use_extra_prefixes -- use hecto, deka, deci and centi as well if set.
|
||||
(default: False)
|
||||
use_full_name -- use full name for multiplier symbol,
|
||||
e.g. milli instead of m
|
||||
(default: False)
|
||||
mode -- 'si' for SI prefixes, 'bin' for binary multipliers (1024, etc.)
|
||||
(Default: 'si')
|
||||
|
||||
SI prefixes from:
|
||||
http://physics.nist.gov/cuu/Units/prefixes.html
|
||||
(Greek mu changed to u.)
|
||||
Binary prefixes based on:
|
||||
http://physics.nist.gov/cuu/Units/binary.html
|
||||
|
||||
>>> nice_units(2e-11)
|
||||
'20 p'
|
||||
|
||||
>>> nice_units(2e-11, space='')
|
||||
'20p'
|
||||
|
||||
"""
|
||||
si_prefixes = {1e24: ('Y', 'yotta'),
|
||||
1e21: ('Z', 'zetta'),
|
||||
1e18: ('E', 'exa'),
|
||||
1e15: ('P', 'peta'),
|
||||
1e12: ('T', 'tera'),
|
||||
1e9: ('G', 'giga'),
|
||||
1e6: ('M', 'mega'),
|
||||
1e3: ('k', 'kilo'),
|
||||
1e-3: ('m', 'milli'),
|
||||
1e-6: ('u', 'micro'),
|
||||
1e-9: ('n', 'nano'),
|
||||
1e-12: ('p', 'pico'),
|
||||
1e-15: ('f', 'femto'),
|
||||
1e-18: ('a', 'atto'),
|
||||
1e-21: ('z', 'zepto'),
|
||||
1e-24: ('y', 'yocto')
|
||||
}
|
||||
if use_extra_prefixes:
|
||||
si_prefixes.update({1e2: ('h', 'hecto'),
|
||||
1e1: ('da', 'deka'),
|
||||
1e-1: ('d', 'deci'),
|
||||
1e-2: ('c', 'centi')
|
||||
})
|
||||
bin_prefixes = {2**10: ('K', 'kilo'),
|
||||
2**20: ('M', 'mega'),
|
||||
2**30: ('G', 'mega'),
|
||||
2**40: ('T', 'tera'),
|
||||
2**50: ('P', 'peta'),
|
||||
2**60: ('E', 'exa')
|
||||
}
|
||||
if mode == 'bin':
|
||||
prefixes = bin_prefixes
|
||||
else:
|
||||
prefixes = si_prefixes
|
||||
prefixes[1] = ('', '') # Unity.
|
||||
# Determine appropriate multiplier.
|
||||
multipliers = prefixes.keys()
|
||||
multipliers.sort()
|
||||
mult = None
|
||||
for i in range(len(multipliers) - 1):
|
||||
lower_mult = multipliers[i]
|
||||
upper_mult = multipliers[i+1]
|
||||
if lower_mult <= value < upper_mult:
|
||||
mult_i = i
|
||||
break
|
||||
if mult is None:
|
||||
if value < multipliers[0]:
|
||||
mult_i = 0
|
||||
elif value >= multipliers[-1]:
|
||||
mult_i = len(multipliers) - 1
|
||||
mult = multipliers[mult_i]
|
||||
# Convert value for this multiplier.
|
||||
new_value = value / mult
|
||||
# Deal with special case due to rounding.
|
||||
if sigfigs is None:
|
||||
if mult_i < (len(multipliers) - 1) and \
|
||||
round(new_value, dp) == \
|
||||
round((multipliers[mult_i+1] / mult), dp):
|
||||
mult = multipliers[mult_i + 1]
|
||||
new_value = value / mult
|
||||
# Concatenate multiplier symbol.
|
||||
if use_full_name:
|
||||
label_type = 1
|
||||
else:
|
||||
label_type = 0
|
||||
# Round and truncate to appropriate precision.
|
||||
if sigfigs is None:
|
||||
str_value = eval('"%.'+str(dp)+'f" % new_value', locals(), {})
|
||||
else:
|
||||
str_value = eval('"%.'+str(sigfigs)+'g" % new_value', locals(), {})
|
||||
return str_value + space + prefixes[mult][label_type] + suffix
|
||||
|
||||
|
||||
def uniquify(seq, preserve_order=False):
|
||||
"""Return sequence with duplicate items in sequence seq removed.
|
||||
|
||||
The code is based on usenet post by Tim Peters.
|
||||
|
||||
This code is O(N) if the sequence items are hashable, O(N**2) if not.
|
||||
|
||||
Peter Bengtsson has a blog post with an empirical comparison of other
|
||||
approaches:
|
||||
http://www.peterbe.com/plog/uniqifiers-benchmark
|
||||
|
||||
If order is not important and the sequence items are hashable then
|
||||
list(set(seq)) is readable and efficient.
|
||||
|
||||
If order is important and the sequence items are hashable generator
|
||||
expressions can be used (in py >= 2.4) (useful for large sequences):
|
||||
seen = set()
|
||||
do_something(x for x in seq if x not in seen or seen.add(x))
|
||||
|
||||
Arguments:
|
||||
seq -- sequence
|
||||
preserve_order -- if not set the order will be arbitrary
|
||||
Using this option will incur a speed penalty.
|
||||
(default: False)
|
||||
|
||||
Example showing order preservation:
|
||||
|
||||
>>> uniquify(['a', 'aa', 'b', 'b', 'ccc', 'ccc', 'd'], preserve_order=True)
|
||||
['a', 'aa', 'b', 'ccc', 'd']
|
||||
|
||||
Example using a sequence of un-hashable items:
|
||||
|
||||
>>> uniquify([['z'], ['x'], ['y'], ['z']], preserve_order=True)
|
||||
[['z'], ['x'], ['y']]
|
||||
|
||||
The sorted output or the non-order-preserving approach should equal
|
||||
that of the sorted order-preserving approach output:
|
||||
|
||||
>>> unordered = uniquify([3, 3, 1, 2], preserve_order=False)
|
||||
>>> unordered.sort()
|
||||
>>> ordered = uniquify([3, 3, 1, 2], preserve_order=True)
|
||||
>>> ordered.sort()
|
||||
>>> ordered
|
||||
[1, 2, 3]
|
||||
>>> int(ordered == unordered)
|
||||
1
|
||||
|
||||
"""
|
||||
try:
|
||||
# Attempt fast algorithm.
|
||||
d = {}
|
||||
if preserve_order:
|
||||
# This is based on Dave Kirby's method (f8) noted in the post:
|
||||
# http://www.peterbe.com/plog/uniqifiers-benchmark
|
||||
return [x for x in seq if (x not in d) and not d.__setitem__(x, 0)]
|
||||
else:
|
||||
for x in seq:
|
||||
d[x] = 0
|
||||
return d.keys()
|
||||
except TypeError:
|
||||
# Have an unhashable object, so use slow algorithm.
|
||||
result = []
|
||||
app = result.append
|
||||
for x in seq:
|
||||
if x not in result:
|
||||
app(x)
|
||||
return result
|
||||
|
||||
# Alias to noun form for backward compatibility.
|
||||
unique = uniquify
|
||||
|
||||
|
||||
def reverse_dict(d):
|
||||
"""Reverse a dictionary so the items become the keys and vice-versa.
|
||||
|
||||
Note: The results will be arbitrary if the items are not unique.
|
||||
|
||||
>>> d = reverse_dict({'a': 1, 'b': 2})
|
||||
>>> d_items = d.items()
|
||||
>>> d_items.sort()
|
||||
>>> d_items
|
||||
[(1, 'a'), (2, 'b')]
|
||||
|
||||
"""
|
||||
result = {}
|
||||
for key, value in d.items():
|
||||
result[value] = key
|
||||
return result
|
||||
|
||||
|
||||
def lsb(x, n):
|
||||
"""Return the n least significant bits of x.
|
||||
|
||||
>>> lsb(13, 3)
|
||||
5
|
||||
|
||||
"""
|
||||
return x & ((2 ** n) - 1)
|
||||
|
||||
|
||||
def gray_encode(i):
|
||||
"""Gray encode the given integer."""
|
||||
|
||||
return i ^ (i >> 1)
|
||||
|
||||
|
||||
def random_vec(bits, max_value=None):
|
||||
"""Generate a random binary vector of length bits and given max value."""
|
||||
|
||||
vector = ""
|
||||
for _ in range(int(bits / 10) + 1):
|
||||
i = int((2**10) * random.random())
|
||||
vector += int2bin(i, 10)
|
||||
|
||||
if max_value and (max_value < 2 ** bits - 1):
|
||||
vector = int2bin((int(vector, 2) / (2 ** bits - 1)) * max_value, bits)
|
||||
|
||||
return vector[0:bits]
|
||||
|
||||
|
||||
def binary_range(bits):
|
||||
"""Return a list of all possible binary numbers in order with width=bits.
|
||||
|
||||
It would be nice to extend it to match the
|
||||
functionality of python's range() built-in function.
|
||||
|
||||
"""
|
||||
l = []
|
||||
v = ['0'] * bits
|
||||
|
||||
toggle = [1] + [0] * bits
|
||||
|
||||
while toggle[bits] != 1:
|
||||
v_copy = v[:]
|
||||
v_copy.reverse()
|
||||
l.append(''.join(v_copy))
|
||||
|
||||
toggle = [1] + [0]*bits
|
||||
i = 0
|
||||
while i < bits and toggle[i] == 1:
|
||||
if toggle[i]:
|
||||
if v[i] == '0':
|
||||
v[i] = '1'
|
||||
toggle[i+1] = 0
|
||||
else:
|
||||
v[i] = '0'
|
||||
toggle[i+1] = 1
|
||||
i += 1
|
||||
return l
|
||||
|
||||
|
||||
def float_range(start, stop=None, step=None):
|
||||
"""Return a list containing an arithmetic progression of floats.
|
||||
|
||||
Return a list of floats between 0.0 (or start) and stop with an
|
||||
increment of step.
|
||||
|
||||
This is in functionality to python's range() built-in function
|
||||
but can accept float increments.
|
||||
|
||||
As with range(), stop is omitted from the list.
|
||||
|
||||
"""
|
||||
if stop is None:
|
||||
stop = float(start)
|
||||
start = 0.0
|
||||
|
||||
if step is None:
|
||||
step = 1.0
|
||||
|
||||
cur = float(start)
|
||||
l = []
|
||||
while cur < stop:
|
||||
l.append(cur)
|
||||
cur += step
|
||||
|
||||
return l
|
||||
|
||||
|
||||
def find_common_fixes(s1, s2):
|
||||
"""Find common (prefix, suffix) of two strings.
|
||||
|
||||
>>> find_common_fixes('abc', 'def')
|
||||
('', '')
|
||||
|
||||
>>> find_common_fixes('abcelephantdef', 'abccowdef')
|
||||
('abc', 'def')
|
||||
|
||||
>>> find_common_fixes('abcelephantdef', 'abccow')
|
||||
('abc', '')
|
||||
|
||||
>>> find_common_fixes('elephantdef', 'abccowdef')
|
||||
('', 'def')
|
||||
|
||||
"""
|
||||
prefix = []
|
||||
suffix = []
|
||||
|
||||
i = 0
|
||||
common_len = min(len(s1), len(s2))
|
||||
while i < common_len:
|
||||
if s1[i] != s2[i]:
|
||||
break
|
||||
|
||||
prefix.append(s1[i])
|
||||
i += 1
|
||||
|
||||
i = 1
|
||||
while i < (common_len + 1):
|
||||
if s1[-i] != s2[-i]:
|
||||
break
|
||||
|
||||
suffix.append(s1[-i])
|
||||
i += 1
|
||||
|
||||
suffix.reverse()
|
||||
|
||||
prefix = ''.join(prefix)
|
||||
suffix = ''.join(suffix)
|
||||
|
||||
return (prefix, suffix)
|
||||
|
||||
|
||||
def is_rotated(seq1, seq2):
|
||||
"""Return true if the first sequence is a rotation of the second sequence.
|
||||
|
||||
>>> seq1 = ['A', 'B', 'C', 'D']
|
||||
>>> seq2 = ['C', 'D', 'A', 'B']
|
||||
>>> int(is_rotated(seq1, seq2))
|
||||
1
|
||||
|
||||
>>> seq2 = ['C', 'D', 'B', 'A']
|
||||
>>> int(is_rotated(seq1, seq2))
|
||||
0
|
||||
|
||||
>>> seq1 = ['A', 'B', 'C', 'A']
|
||||
>>> seq2 = ['A', 'A', 'B', 'C']
|
||||
>>> int(is_rotated(seq1, seq2))
|
||||
1
|
||||
|
||||
>>> seq2 = ['A', 'B', 'C', 'A']
|
||||
>>> int(is_rotated(seq1, seq2))
|
||||
1
|
||||
|
||||
>>> seq2 = ['A', 'A', 'C', 'B']
|
||||
>>> int(is_rotated(seq1, seq2))
|
||||
0
|
||||
|
||||
"""
|
||||
# Do a sanity check.
|
||||
if len(seq1) != len(seq2):
|
||||
return False
|
||||
# Look for occurrences of second sequence head item in first sequence.
|
||||
start_indexes = []
|
||||
head_item = seq2[0]
|
||||
for index1 in range(len(seq1)):
|
||||
if seq1[index1] == head_item:
|
||||
start_indexes.append(index1)
|
||||
# Check that wrapped sequence matches.
|
||||
double_seq1 = seq1 + seq1
|
||||
for index1 in start_indexes:
|
||||
if double_seq1[index1:index1+len(seq1)] == seq2:
|
||||
return True
|
||||
return False
|
||||
|
||||
def getmodule(obj):
|
||||
"""Return the module that contains the object definition of obj.
|
||||
|
||||
Note: Use inspect.getmodule instead.
|
||||
|
||||
Arguments:
|
||||
obj -- python obj, generally a class or a function
|
||||
|
||||
Examples:
|
||||
|
||||
A function:
|
||||
>>> module = getmodule(random.choice)
|
||||
>>> module.__name__
|
||||
'random'
|
||||
>>> module is random
|
||||
1
|
||||
|
||||
A class:
|
||||
>>> module = getmodule(random.Random)
|
||||
>>> module.__name__
|
||||
'random'
|
||||
>>> module is random
|
||||
1
|
||||
|
||||
A class inheriting from a class in another module:
|
||||
(note: The inheriting class must define at least one function.)
|
||||
>>> class MyRandom(random.Random):
|
||||
... def play(self):
|
||||
... pass
|
||||
>>> module = getmodule(MyRandom)
|
||||
>>> if __name__ == '__main__':
|
||||
... name = 'rad_util'
|
||||
... else:
|
||||
... name = module.__name__
|
||||
>>> name
|
||||
'rad_util'
|
||||
>>> module is sys.modules[__name__]
|
||||
1
|
||||
|
||||
Discussion:
|
||||
This approach is slightly hackish, and won't work in various situations.
|
||||
However, this was the approach recommended by GvR, so it's as good as
|
||||
you'll get.
|
||||
|
||||
See GvR's post in this thread:
|
||||
http://groups.google.com.au/group/comp.lang.python/browse_thread/thread/966a7bdee07e3b34/c3cab3f41ea84236?lnk=st&q=python+determine+class+module&rnum=4&hl=en#c3cab3f41ea84236
|
||||
|
||||
"""
|
||||
if hasattr(obj, 'func_globals'):
|
||||
func = obj
|
||||
else:
|
||||
# Handle classes.
|
||||
func = None
|
||||
for item in obj.__dict__.values():
|
||||
if hasattr(item, 'func_globals'):
|
||||
func = item
|
||||
break
|
||||
if func is None:
|
||||
raise ValueError("No functions attached to object: %r" % obj)
|
||||
module_name = func.func_globals['__name__']
|
||||
# Get module.
|
||||
module = sys.modules[module_name]
|
||||
return module
|
||||
|
||||
|
||||
def round_grid(value, grid, mode=0):
|
||||
"""Round off the given value to the given grid size.
|
||||
|
||||
Arguments:
|
||||
value -- value to be roudne
|
||||
grid -- result must be a multiple of this
|
||||
mode -- 0 nearest, 1 up, -1 down
|
||||
|
||||
Examples:
|
||||
|
||||
>>> round_grid(7.5, 5)
|
||||
10
|
||||
|
||||
>>> round_grid(7.5, 5, mode=-1)
|
||||
5
|
||||
|
||||
>>> round_grid(7.3, 5, mode=1)
|
||||
10
|
||||
|
||||
>>> round_grid(7.3, 5.0, mode=1)
|
||||
10.0
|
||||
|
||||
"""
|
||||
off_grid = value % grid
|
||||
if mode == 0:
|
||||
add_one = int(off_grid >= (grid / 2.0))
|
||||
elif mode == 1 and off_grid:
|
||||
add_one = 1
|
||||
elif mode == -1 and off_grid:
|
||||
add_one = 0
|
||||
result = ((int(value / grid) + add_one) * grid)
|
||||
return result
|
||||
|
||||
|
||||
def get_args(argv):
|
||||
"""Store command-line args in a dictionary.
|
||||
|
||||
-, -- prefixes are removed
|
||||
Items not prefixed with - or -- are stored as a list, indexed by 'args'
|
||||
|
||||
For options that take a value use --option=value
|
||||
|
||||
Consider using optparse or getopt (in Python standard library) instead.
|
||||
|
||||
"""
|
||||
d = {}
|
||||
args = []
|
||||
|
||||
for arg in argv:
|
||||
|
||||
if arg.startswith('-'):
|
||||
parts = re.sub(r'^-+', '', arg).split('=')
|
||||
if len(parts) == 2:
|
||||
d[parts[0]] = parts[1]
|
||||
else:
|
||||
d[parts[0]] = None
|
||||
else:
|
||||
args.append(arg)
|
||||
|
||||
d['args'] = args
|
||||
|
||||
return d
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
doctest.testmod(sys.modules['__main__'])
|
||||
|
||||
@@ -1,392 +0,0 @@
|
||||
# topsort - dependency (topological) sorting and cycle finding functions
|
||||
# Copyright (C) 2007 RADLogic
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation;
|
||||
# version 2.1 of the License.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# See https://www.fsf.org/licensing/licenses/lgpl.txt for full license text.
|
||||
"""Provide toplogical sorting (i.e. dependency sorting) functions.
|
||||
|
||||
The topsort function is based on code posted on Usenet by Tim Peters.
|
||||
|
||||
Modifications:
|
||||
- added doctests
|
||||
- changed some bits to use current Python idioms
|
||||
(listcomp instead of filter, +=/-=, inherit from Exception)
|
||||
- added a topsort_levels version that ports items in each dependency level
|
||||
into a sub-list
|
||||
- added find_cycles to aid in cycle debugging
|
||||
|
||||
Run this module directly to run the doctests (unittests).
|
||||
Make sure they all pass before checking in any modifications.
|
||||
|
||||
Requires Python >= 2.2
|
||||
(For Python 2.2 also requires separate sets.py module)
|
||||
|
||||
This requires the rad_util.py module.
|
||||
|
||||
"""
|
||||
|
||||
# Provide support for Python 2.2*
|
||||
from __future__ import generators
|
||||
|
||||
__version__ = '$Revision: 0.9 $'
|
||||
__date__ = '$Date: 2007/03/27 04:15:26 $'
|
||||
__credits__ = '''Tim Peters -- original topsort code
|
||||
Tim Wegener -- doctesting, updating to current idioms, topsort_levels,
|
||||
find_cycles
|
||||
'''
|
||||
|
||||
# Make Python 2.3 sets look like Python 2.4 sets.
|
||||
try:
|
||||
set
|
||||
except NameError:
|
||||
from sets import Set as set
|
||||
|
||||
from rad_util import is_rotated
|
||||
|
||||
|
||||
class CycleError(Exception):
|
||||
"""Cycle Error"""
|
||||
pass
|
||||
|
||||
|
||||
def topsort(pairlist):
|
||||
"""Topologically sort a list of (parent, child) pairs.
|
||||
|
||||
Return a list of the elements in dependency order (parent to child order).
|
||||
|
||||
>>> print topsort( [(1,2), (3,4), (5,6), (1,3), (1,5), (1,6), (2,5)] )
|
||||
[1, 2, 3, 5, 4, 6]
|
||||
|
||||
>>> print topsort( [(1,2), (1,3), (2,4), (3,4), (5,6), (4,5)] )
|
||||
[1, 2, 3, 4, 5, 6]
|
||||
|
||||
>>> print topsort( [(1,2), (2,3), (3,2)] )
|
||||
Traceback (most recent call last):
|
||||
CycleError: ([1], {2: 1, 3: 1}, {2: [3], 3: [2]})
|
||||
|
||||
"""
|
||||
num_parents = {} # element -> # of predecessors
|
||||
children = {} # element -> list of successors
|
||||
for parent, child in pairlist:
|
||||
# Make sure every element is a key in num_parents.
|
||||
if not num_parents.has_key( parent ):
|
||||
num_parents[parent] = 0
|
||||
if not num_parents.has_key( child ):
|
||||
num_parents[child] = 0
|
||||
|
||||
# Since child has a parent, increment child's num_parents count.
|
||||
num_parents[child] += 1
|
||||
|
||||
# ... and parent gains a child.
|
||||
children.setdefault(parent, []).append(child)
|
||||
|
||||
# Suck up everything without a parent.
|
||||
answer = [x for x in num_parents.keys() if num_parents[x] == 0]
|
||||
|
||||
# For everything in answer, knock down the parent count on its children.
|
||||
# Note that answer grows *in* the loop.
|
||||
for parent in answer:
|
||||
del num_parents[parent]
|
||||
if children.has_key( parent ):
|
||||
for child in children[parent]:
|
||||
num_parents[child] -= 1
|
||||
if num_parents[child] == 0:
|
||||
answer.append( child )
|
||||
# Following "del" isn't needed; just makes
|
||||
# CycleError details easier to grasp.
|
||||
del children[parent]
|
||||
|
||||
if num_parents:
|
||||
# Everything in num_parents has at least one child ->
|
||||
# there's a cycle.
|
||||
raise CycleError(answer, num_parents, children)
|
||||
return answer
|
||||
|
||||
def topsort_levels(pairlist):
|
||||
"""Topologically sort a list of (parent, child) pairs into depth levels.
|
||||
|
||||
This returns a generator.
|
||||
Turn this into a an iterator using the iter built-in function.
|
||||
(if you iterate over the iterator, each element gets generated when
|
||||
it is asked for, rather than generating the whole list up-front.)
|
||||
|
||||
Each generated element is a list of items at that dependency level.
|
||||
|
||||
>>> dependency_pairs = [(1,2), (3,4), (5,6), (1,3), (1,5), (1,6), (2,5)]
|
||||
>>> for level in iter(topsort_levels( dependency_pairs )):
|
||||
... print level
|
||||
[1]
|
||||
[2, 3]
|
||||
[4, 5]
|
||||
[6]
|
||||
|
||||
>>> dependency_pairs = [(1,2), (1,3), (2,4), (3,4), (5,6), (4,5)]
|
||||
>>> for level in iter(topsort_levels( dependency_pairs )):
|
||||
... print level
|
||||
[1]
|
||||
[2, 3]
|
||||
[4]
|
||||
[5]
|
||||
[6]
|
||||
|
||||
>>> dependency_pairs = [(1,2), (2,3), (3,4), (4, 3)]
|
||||
>>> try:
|
||||
... for level in iter(topsort_levels( dependency_pairs )):
|
||||
... print level
|
||||
... except CycleError, exc:
|
||||
... print 'CycleError:', exc
|
||||
[1]
|
||||
[2]
|
||||
CycleError: ({3: 1, 4: 1}, {3: [4], 4: [3]})
|
||||
|
||||
|
||||
The cycle error should look like.
|
||||
CycleError: ({3: 1, 4: 1}, {3: [4], 4: [3]})
|
||||
# todo: Make the doctest more robust (i.e. handle arbitrary dict order).
|
||||
|
||||
"""
|
||||
num_parents = {} # element -> # of predecessors
|
||||
children = {} # element -> list of successors
|
||||
for parent, child in pairlist:
|
||||
# Make sure every element is a key in num_parents.
|
||||
if not num_parents.has_key( parent ):
|
||||
num_parents[parent] = 0
|
||||
if not num_parents.has_key( child ):
|
||||
num_parents[child] = 0
|
||||
|
||||
# Since child has a parent, increment child's num_parents count.
|
||||
num_parents[child] += 1
|
||||
|
||||
# ... and parent gains a child.
|
||||
children.setdefault(parent, []).append(child)
|
||||
|
||||
return topsort_levels_core(num_parents, children)
|
||||
|
||||
def topsort_levels_core(num_parents, children):
|
||||
"""Topologically sort a bunch of interdependent items based on dependency.
|
||||
|
||||
This returns a generator.
|
||||
Turn this into a an iterator using the iter built-in function.
|
||||
(if you iterate over the iterator, each element gets generated when
|
||||
it is asked for, rather than generating the whole list up-front.)
|
||||
|
||||
Each generated element is a list of items at that dependency level.
|
||||
|
||||
>>> list(topsort_levels_core(
|
||||
... {1: 0, 2: 1, 3: 1, 4: 1, 5: 2, 6: 2},
|
||||
... {1: [2, 3, 5, 6], 2: [5], 3: [4], 4: [], 5: [6]}))
|
||||
[[1], [2, 3], [4, 5], [6]]
|
||||
|
||||
>>> list(topsort_levels_core(
|
||||
... {1: 0, 2: 2, 3: 1},
|
||||
... {1: [2], 2: [3], 3: [2]}))
|
||||
Traceback (most recent call last):
|
||||
CycleError: ({2: 1, 3: 1}, {2: [3], 3: [2]})
|
||||
|
||||
This function has a more complicated interface than topsort_levels,
|
||||
but is useful if the data is easier to generate in this form.
|
||||
|
||||
Arguments:
|
||||
num_parents -- key: item, value: number of parents (predecessors)
|
||||
children -- key: item, value: list of children (successors)
|
||||
|
||||
"""
|
||||
while 1:
|
||||
# Suck up everything without a predecessor.
|
||||
level_parents = [x for x in num_parents.keys() if num_parents[x] == 0]
|
||||
|
||||
if not level_parents:
|
||||
break
|
||||
|
||||
# Offer the next generated item,
|
||||
# which is a list of the items at this dependency level.
|
||||
yield level_parents
|
||||
|
||||
# For everything item in this level,
|
||||
# decrement the parent count,
|
||||
# since we have accounted for its parent.
|
||||
for level_parent in level_parents:
|
||||
|
||||
del num_parents[level_parent]
|
||||
|
||||
if children.has_key(level_parent):
|
||||
for level_parent_child in children[level_parent]:
|
||||
num_parents[level_parent_child] -= 1
|
||||
del children[level_parent]
|
||||
|
||||
if num_parents:
|
||||
# Everything in num_parents has at least one child ->
|
||||
# there's a cycle.
|
||||
raise CycleError(num_parents, children)
|
||||
else:
|
||||
# This is the end of the generator.
|
||||
raise StopIteration
|
||||
|
||||
|
||||
def find_cycles(parent_children):
|
||||
"""Yield cycles. Each result is a list of items comprising a cycle.
|
||||
|
||||
Use a 'stack' based approach to find all the cycles.
|
||||
This is a generator, so yields each cycle as it finds it.
|
||||
|
||||
It is implicit that the last item in each cycle list is a parent of the
|
||||
first item (thereby forming a cycle).
|
||||
|
||||
Arguments:
|
||||
parent_children -- parent -> collection of children
|
||||
|
||||
Simplest cycle:
|
||||
>>> cycles = list(find_cycles({'A': ['B'], 'B': ['A']}))
|
||||
>>> len(cycles)
|
||||
1
|
||||
>>> cycle = cycles[0]
|
||||
>>> cycle.sort()
|
||||
>>> print cycle
|
||||
['A', 'B']
|
||||
|
||||
Simplest cycle with extra baggage at the start and the end:
|
||||
>>> cycles = list(find_cycles(parent_children={'A': ['B'],
|
||||
... 'B': ['C'],
|
||||
... 'C': ['B', 'D'],
|
||||
... 'D': [],
|
||||
... }))
|
||||
>>> len(cycles)
|
||||
1
|
||||
>>> cycle = cycles[0]
|
||||
>>> cycle.sort()
|
||||
>>> print cycle
|
||||
['B', 'C']
|
||||
|
||||
Double cycle:
|
||||
>>> cycles = list(find_cycles(parent_children={'A': ['B'],
|
||||
... 'B': ['C1', 'C2'],
|
||||
... 'C1': ['D1'],
|
||||
... 'D1': ['E1'],
|
||||
... 'E1': ['D1'],
|
||||
... 'C2': ['D2'],
|
||||
... 'D2': ['E2'],
|
||||
... 'E2': ['D2'],
|
||||
... }))
|
||||
>>> len(cycles)
|
||||
2
|
||||
>>> for cycle in cycles:
|
||||
... cycle.sort()
|
||||
>>> cycles.sort()
|
||||
>>> cycle1 = cycles[0]
|
||||
>>> cycle1.sort()
|
||||
>>> print cycle1
|
||||
['D1', 'E1']
|
||||
>>> cycle2 = cycles[1]
|
||||
>>> cycle2.sort()
|
||||
>>> print cycle2
|
||||
['D2', 'E2']
|
||||
|
||||
Simple cycle with children not specified for one item:
|
||||
# todo: Should this barf instead?
|
||||
>>> cycles = list(find_cycles(parent_children={'A': ['B'],
|
||||
... 'B': ['A'],
|
||||
... 'C': ['D']}))
|
||||
>>> len(cycles)
|
||||
1
|
||||
>>> cycle = cycles[0]
|
||||
>>> cycle.sort()
|
||||
>>> print cycle
|
||||
['A', 'B']
|
||||
|
||||
Diamond cycle
|
||||
>>> cycles = list(find_cycles(parent_children={'A': ['B1', 'B2'],
|
||||
... 'B1': ['C'],
|
||||
... 'B2': ['C'],
|
||||
... 'C': ['A', 'B1']}))
|
||||
>>> len(cycles)
|
||||
3
|
||||
>>> sorted_cycles = []
|
||||
>>> for cycle in cycles:
|
||||
... cycle = list(cycle)
|
||||
... cycle.sort()
|
||||
... sorted_cycles.append(cycle)
|
||||
>>> sorted_cycles.sort()
|
||||
>>> for cycle in sorted_cycles:
|
||||
... print cycle
|
||||
['A', 'B1', 'C']
|
||||
['A', 'B2', 'C']
|
||||
['B1', 'C']
|
||||
|
||||
Hairy case (order can matter if something is wrong):
|
||||
(Note order of B and C in the list.)
|
||||
>>> cycles = list(find_cycles(parent_children={
|
||||
... 'TD': ['DD'],
|
||||
... 'TC': ['DC'],
|
||||
... 'DC': ['DQ'],
|
||||
... 'C': ['DQ'],
|
||||
... 'DQ': ['IA', 'TO'],
|
||||
... 'IA': ['A'],
|
||||
... 'A': ['B', 'C'],
|
||||
... }))
|
||||
>>> len(cycles)
|
||||
1
|
||||
>>> cycle = cycles[0]
|
||||
>>> cycle.sort()
|
||||
>>> print cycle
|
||||
['A', 'C', 'DQ', 'IA']
|
||||
|
||||
"""
|
||||
cycles = []
|
||||
visited_nodes = set()
|
||||
|
||||
for parent in parent_children:
|
||||
if parent in visited_nodes:
|
||||
# This node is part of a path that has already been traversed.
|
||||
continue
|
||||
|
||||
paths = [[parent]]
|
||||
while paths:
|
||||
path = paths.pop()
|
||||
|
||||
parent = path[-1]
|
||||
|
||||
try:
|
||||
children = parent_children[parent]
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
for child in children:
|
||||
# Keeping a set of the path nodes, for O(1) lookups at the
|
||||
# expense of more memory and complexity, actually makes speed
|
||||
# worse. (Due to construction of sets.)
|
||||
# This is O(N).
|
||||
if child in path:
|
||||
# This is a cycle.
|
||||
cycle = path[path.index(child):]
|
||||
# Check that this is not a dup cycle.
|
||||
is_dup = False
|
||||
for other_cycle in cycles:
|
||||
if is_rotated(other_cycle, cycle):
|
||||
is_dup = True
|
||||
break
|
||||
if not is_dup:
|
||||
cycles.append(cycle)
|
||||
yield cycle
|
||||
else:
|
||||
# Push this new path onto the 'stack'.
|
||||
# This is probably the most expensive part of the algorithm
|
||||
# (a list copy).
|
||||
paths.append(path + [child])
|
||||
# Mark the node as visited.
|
||||
visited_nodes.add(child)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Run the doctest tests.
|
||||
import sys
|
||||
import doctest
|
||||
doctest.testmod(sys.modules['__main__'])
|
||||
Reference in New Issue
Block a user