add support for --update-data to test.py, move test runner code to library for modular build, add support for hierarchical tests beyond the previous two-level hierarchy.

This commit is contained in:
Mathieu Lacage
2011-07-29 03:38:59 -04:00
parent 6937841b85
commit 8a38290eb8
34 changed files with 1620 additions and 2050 deletions

View File

@@ -41,11 +41,9 @@
#include "ns3/v4ping-helper.h"
#include "ns3/nqos-wifi-mac-helper.h"
#include "ns3/config.h"
#include "ns3/pcap-test.h"
#include <sstream>
/// Set to true to rewrite reference traces, leave false to run regression tests
const bool WRITE_VECTORS = false;
namespace ns3 {
namespace aodv {
//-----------------------------------------------------------------------------
@@ -56,6 +54,7 @@ class AodvRegressionTestSuite : public TestSuite
public:
AodvRegressionTestSuite () : TestSuite ("routing-aodv-regression", SYSTEM)
{
SetDataDir (NS_TEST_SOURCEDIR);
// General RREQ-RREP-RRER test case
AddTestCase (new ChainRegressionTest ("aodv-chain-regression-test"));
// Bug 606 test case, should crash if bug is not fixed
@@ -107,7 +106,7 @@ ChainRegressionTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
delete m_nodes, m_nodes = 0;
}
@@ -161,8 +160,7 @@ ChainRegressionTest::CreateDevices ()
p.Stop (m_time);
// 4. write PCAP
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : GetTempDir ()) + m_prefix;
wifiPhy.EnablePcapAll (prefix);
wifiPhy.EnablePcapAll (CreateTempDirFilename (m_prefix));
}
void
@@ -170,15 +168,7 @@ ChainRegressionTest::CheckResults ()
{
for (uint32_t i = 0; i < m_size; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << m_prefix << "-" << i << "-0.pcap";
os2 << GetTempDir () << m_prefix << "-" << i << "-0.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec);
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (m_prefix << "-" << i << "-0.pcap");
}
}

View File

@@ -43,11 +43,9 @@
#include "ns3/inet-socket-address.h"
#include "ns3/data-rate.h"
#include "ns3/packet-sink-helper.h"
#include "ns3/pcap-test.h"
#include <sstream>
/// Set to true to rewrite reference traces, leave false to run regression tests
const bool WRITE_VECTORS = false;
namespace ns3 {
namespace aodv {
@@ -55,7 +53,7 @@ namespace aodv {
// UdpChainTest
//-----------------------------------------------------------------------------
Bug772ChainTest::Bug772ChainTest (const char * const prefix, const char * const proto, Time t, uint32_t size) :
TestCase ("Bug 772 UDP/TCP chain regression test"),
TestCase ("Bug 772 UDP and TCP chain regression test"),
m_nodes (0),
m_prefix (prefix),
m_proto (proto),
@@ -82,7 +80,7 @@ Bug772ChainTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
delete m_nodes, m_nodes = 0;
}
@@ -143,9 +141,8 @@ Bug772ChainTest::CreateDevices ()
app.Start (Seconds (0.0));
// 4. write PCAP on the first and last nodes only
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : GetTempDir ()) + m_prefix;
wifiPhy.EnablePcap (prefix, devices.Get (0));
wifiPhy.EnablePcap (prefix, devices.Get (m_size-1));
wifiPhy.EnablePcap (CreateTempDirFilename (m_prefix), devices.Get (0));
wifiPhy.EnablePcap (CreateTempDirFilename (m_prefix), devices.Get (m_size-1));
}
void
@@ -153,15 +150,7 @@ Bug772ChainTest::CheckResults ()
{
for (uint32_t i = 0; i < m_size; i += (m_size - 1) /*first and last nodes only*/)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << m_prefix << "-" << i << "-0.pcap";
os2 << GetTempDir () << m_prefix << "-" << i << "-0.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec);
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ(m_prefix << "-" << i << "-0.pcap");
}
}

View File

@@ -0,0 +1,270 @@
#include "system-path.h"
#include "fatal-error.h"
#include "assert.h"
#include "ns3/core-config.h"
#include <stdlib.h>
#include <errno.h>
#include <string.h>
#if defined (HAVE_DIRENT_H) and defined (HAVE_SYS_TYPES_H)
#define HAVE_OPENDIR
#include <sys/types.h>
#include <dirent.h>
#endif
#if defined (HAVE_SYS_STAT_H) and defined (HAVE_SYS_TYPES_H)
#define HAVE_MKDIR_H
#include <sys/types.h>
#include <sys/stat.h>
#endif
#include <sstream>
#ifdef __APPLE__
#include <mach-o/dyld.h>
#endif /* __APPLE__ */
#if defined (__win32__)
#define SYSTEM_PATH_SEP "\\"
#else
#define SYSTEM_PATH_SEP "/"
#endif
namespace ns3 {
namespace SystemPath {
std::string Dirname (std::string path)
{
std::list<std::string> elements = Split (path);
std::list<std::string>::const_iterator last = elements.end();
last--;
return Join (elements.begin (), last);
}
std::string FindSelfDirectory (void)
{
/**
* This function returns the path to the running $PREFIX.
* Mac OS X: _NSGetExecutablePath() (man 3 dyld)
* Linux: readlink /proc/self/exe
* Solaris: getexecname()
* FreeBSD: sysctl CTL_KERN KERN_PROC KERN_PROC_PATHNAME -1
* BSD with procfs: readlink /proc/curproc/file
* Windows: GetModuleFileName() with hModule = NULL
*/
std::string filename;
#if defined(__linux__)
{
ssize_t size = 1024;
char *buffer = (char*)malloc (size);
memset (buffer, 0, size);
int status;
while (true)
{
status = readlink("/proc/self/exe", buffer, size);
if (status != 1 || (status == -1 && errno != ENAMETOOLONG))
{
break;
}
size *= 2;
free (buffer);
buffer = (char*)malloc (size);
memset (buffer, 0, size);
}
if (status == -1)
{
NS_FATAL_ERROR ("Oops, could not find self directory.");
}
filename = buffer;
free (buffer);
}
#elif defined (__win32__)
{
// XXX: untested. it should work if code is compiled with
// LPTSTR = char *
DWORD size = 1024;
LPTSTR lpFilename = (LPTSTR) malloc (sizeof(TCHAR) * size);
DWORD status = GetModuleFilename (0, lpFilename, size);
while (status == size)
{
size = size * 2;
free (lpFilename);
lpFilename = (LPTSTR) malloc (sizeof(TCHAR) * size);
status = GetModuleFilename (0, lpFilename, size);
}
NS_ASSERT (status != 0);
filename = lpFilename;
free (lpFilename);
}
#elif defined (__APPLE__)
{
uint32_t bufsize = 1024;
char *buffer = (char *) malloc (bufsize);
NS_ASSERT (buffer != 0);
int status = _NSGetExecutablePath (buffer, &bufsize);
if (status == -1)
{
free (buffer);
buffer = (char *) malloc (bufsize);
status = _NSGetExecutablePath (buffer, &bufsize);
}
NS_ASSERT (status == 0);
filename = buffer;
free (buffer);
}
#endif
return Dirname (filename);
}
std::string Append (std::string left, std::string right)
{
// removing trailing separators from 'left'
while (true)
{
std::string::size_type lastSep = left.rfind (SYSTEM_PATH_SEP);
if (lastSep != left.size () - 1)
{
break;
}
left = left.substr (0, left.size () - 1);
}
std::string retval = left + SYSTEM_PATH_SEP + right;
return retval;
}
std::list<std::string> Split (std::string path)
{
std::list<std::string> retval;
std::string::size_type current = 0, next = 0;
next = path.find (SYSTEM_PATH_SEP, current);
while (next != std::string::npos)
{
std::string item = path.substr (current, next - current);
retval.push_back (item);
current = next + 1;
next = path.find (SYSTEM_PATH_SEP, current);
}
std::string item = path.substr (current, next - current);
retval.push_back (item);
return retval;
}
std::string Join (std::list<std::string>::const_iterator begin,
std::list<std::string>::const_iterator end)
{
std::string retval = "";
for (std::list<std::string>::const_iterator i = begin; i != end; i++)
{
if (i == begin)
{
retval = *i;
}
else
{
retval = retval + SYSTEM_PATH_SEP + *i;
}
}
return retval;
}
std::list<std::string> ReadFiles (std::string path)
{
std::list<std::string> files;
#if defined HAVE_OPENDIR
DIR *dp = opendir (path.c_str ());
if (dp == NULL)
{
NS_FATAL_ERROR ("Could not open directory=" << path);
}
struct dirent *de = readdir (dp);
while (de != 0)
{
files.push_back (de->d_name);
de = readdir (dp);
}
closedir (dp);
#elif defined (HAVE_FIND_FIRST_FILE)
// XXX: untested
HANDLE hFind;
WIN32_FIND_DATA fileData;
hFind = FindFirstFile (path.c_str (), &FindFileData);
if (hFind == INVALID_HANDLE_VALUE)
{
NS_FATAL_ERROR ("Could not open directory=" << path);
}
do
{
files.push_back (fileData.cFileName);
} while (FindNextFile (hFind, &fileData));
FindClose(hFind);
#else
#error "No support for reading a directory on this platform"
#endif
return files;
}
std::string
MakeTemporaryDirectoryName (void)
{
char *path = NULL;
path = getenv ("TMP");
if (path == NULL)
{
path = getenv ("TEMP");
if (path == NULL)
{
path = const_cast<char *> ("/tmp");
}
}
//
// Just in case the user wants to go back and find the output, we give
// a hint as to which dir we created by including a time hint.
//
time_t now = time (NULL);
struct tm *tm_now = localtime (&now);
//
// But we also randomize the name in case there are multiple users doing
// this at the same time
//
srand (time (0));
long int n = rand ();
//
// The final path to the directory is going to look something like
//
// /tmp/ns3-14.30.29.32767
//
// The first segment comes from one of the temporary directory env
// variables or /tmp if not found. The directory name starts with an
// identifier telling folks who is making all of the temp directories
// and then the local time (in this case 14.30.29 -- which is 2:30 and
// 29 seconds PM).
//
std::ostringstream oss;
oss << path << SYSTEM_PATH_SEP << "ns-3." << tm_now->tm_hour << "."
<< tm_now->tm_min << "." << tm_now->tm_sec << "." << n;
return oss.str ();
}
void
MakeDirectories (std::string path)
{
std::list<std::string> elements = Split (path);
for (std::list<std::string>::const_iterator i = elements.begin (); i != elements.end (); ++i)
{
std::string tmp = Join (elements.begin (), i);
#if defined(HAVE_MKDIR_H)
mkdir (tmp.c_str (), S_IRWXU);
#endif
}
#if defined(HAVE_MKDIR_H)
mkdir (path.c_str (), S_IRWXU);
#endif
}
} // namespace SystemPath
} // namespace ns3

View File

@@ -0,0 +1,72 @@
#ifndef SYSTEM_PATH
#define SYSTEM_PATH
#include <string>
#include <list>
namespace ns3 {
/**
* \brief Encapsulate OS-specific functions to manipulate file and directory paths.
*
* The functions provided here are used mostly to implement the ns-3 test framework.
*/
namespace SystemPath {
/**
* \return the directory in which the currently-executing binary is located
*/
std::string FindSelfDirectory (void);
/**
* \param left a path element
* \param right a path element
* \return a concatenation of the two input paths
*/
std::string Append (std::string left, std::string right);
/**
* \param path a path
* \return a list of path elements that can be joined together again with
* the Join function.
* \sa ns3::SystemPath::Join
*/
std::list<std::string> Split (std::string path);
/**
* \param begin iterator to first element to join
* \param end iterator to last element to join
* \return a path that is a concatenation of all the input elements.
*/
std::string Join (std::list<std::string>::const_iterator begin,
std::list<std::string>::const_iterator end);
/**
* \param path a path which identifies a directory
* \return a list of the filenames which are located in the input directory
*/
std::list<std::string> ReadFiles (std::string path);
/**
* \return a path which identifies a temporary directory.
*
* The returned path identifies a directory which does not exist yet
* Call ns3::SystemPath::MakeDirectories to create it. Yes, there is a
* well-known security race in this API but we don't care in ns-3.
*/
std::string MakeTemporaryDirectoryName (void);
/**
* \param path a path to a directory
*
* Create all the directories leading to path.
*/
void MakeDirectories (std::string path);
} // namespace SystemPath
} // namespace ns3
#endif /* SYSTEM_PATH */

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -36,8 +36,10 @@ def configure(conf):
conf.check(header_name='stdint.h', define_name='HAVE_STDINT_H')
conf.check(header_name='inttypes.h', define_name='HAVE_INTTYPES_H')
conf.check(header_name='sys/inttypes.h', define_name='HAVE_SYS_INT_TYPES_H')
conf.check(header_name='sys/types.h', define_name='HAVE_SYS_TYPES_H')
conf.check(header_name='sys/stat.h', define_name='HAVE_SYS_STAT_H')
conf.check(header_name='dirent.h', define_name='HAVE_DIRENT_H')
if conf.check(header_name='stdlib.h'):
conf.define('HAVE_STDLIB_H', 1)
@@ -144,6 +146,7 @@ def build(bld):
'model/names.cc',
'model/vector.cc',
'model/fatal-impl.cc',
'model/system-path.cc',
]
core_test = bld.create_ns3_module_test_library('core')

View File

@@ -32,13 +32,12 @@
#include "ns3/abort.h"
#include "ns3/udp-echo-helper.h"
#include "ns3/mobility-model.h"
#include "ns3/pcap-test.h"
#include <sstream>
#include "hwmp-proactive-regression.h"
using namespace ns3;
/// Set to true to rewrite reference traces, leave false to run regression test
const bool WRITE_VECTORS = false;
/// Unique PCAP file name prefix
const char * const PREFIX = "hwmp-proactive-regression-test";
@@ -65,7 +64,7 @@ HwmpProactiveRegressionTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
delete m_nodes, m_nodes = 0;
}
@@ -122,8 +121,7 @@ HwmpProactiveRegressionTest::CreateDevices ()
address.SetBase ("10.1.1.0", "255.255.255.0");
m_interfaces = address.Assign (meshDevices);
// 4. write PCAP if needed
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : std::string (GetTempDir ())) + PREFIX;
wifiPhy.EnablePcapAll (prefix);
wifiPhy.EnablePcapAll (CreateTempDirFilename (PREFIX));
}
void
@@ -131,15 +129,7 @@ HwmpProactiveRegressionTest::CheckResults ()
{
for (int i = 0; i < 5; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << PREFIX << "-" << i << "-1.pcap";
os2 << GetTempDir () << PREFIX << "-" << i << "-1.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec); // TODO support default PcapWriter snap length here
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (PREFIX << "-" << i << "-1.pcap");
}
}

View File

@@ -31,12 +31,11 @@
#include "ns3/abort.h"
#include "ns3/udp-echo-helper.h"
#include "ns3/mobility-model.h"
#include "ns3/pcap-test.h"
#include <sstream>
#include "hwmp-reactive-regression.h"
/// Set to true to rewrite reference traces, leave false to run regression test
const bool WRITE_VECTORS = false;
/// Unique PCAP file name prefix
const char * const PREFIX = "hwmp-reactive-regression-test";
@@ -61,7 +60,7 @@ HwmpReactiveRegressionTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
delete m_nodes, m_nodes = 0;
}
void
@@ -119,9 +118,7 @@ HwmpReactiveRegressionTest::CreateDevices ()
address.SetBase ("10.1.1.0", "255.255.255.0");
m_interfaces = address.Assign (meshDevices);
// 4. write PCAP if needed
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : std::string (GetTempDir ())) + PREFIX;
wifiPhy.EnablePcapAll (prefix);
wifiPhy.EnablePcapAll (CreateTempDirFilename (PREFIX));
}
void
@@ -129,15 +126,7 @@ HwmpReactiveRegressionTest::CheckResults ()
{
for (int i = 0; i < 6; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << PREFIX << "-" << i << "-1.pcap";
os2 << GetTempDir () << PREFIX << "-" << i << "-1.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec); // TODO support default PcapWriter snap length here
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (PREFIX << "-" << i << "-1.pcap");
}
}

View File

@@ -31,13 +31,12 @@
#include "ns3/abort.h"
#include "ns3/udp-echo-helper.h"
#include "ns3/mobility-model.h"
#include "ns3/pcap-test.h"
#include <sstream>
#include "hwmp-simplest-regression.h"
using namespace ns3;
/// Set to true to rewrite reference traces, leave false to run regression test
const bool WRITE_VECTORS = false;
/// Unique PCAP file name prefix
const char * const PREFIX = "hwmp-simplest-regression-test";
@@ -64,7 +63,7 @@ HwmpSimplestRegressionTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
delete m_nodes, m_nodes = 0;
}
@@ -131,8 +130,7 @@ HwmpSimplestRegressionTest::CreateDevices ()
address.SetBase ("10.1.1.0", "255.255.255.0");
m_interfaces = address.Assign (meshDevices);
// 4. write PCAP if needed
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : std::string (GetTempDir ())) + PREFIX;
wifiPhy.EnablePcapAll (prefix);
wifiPhy.EnablePcapAll (CreateTempDirFilename (PREFIX));
}
void
@@ -140,15 +138,7 @@ HwmpSimplestRegressionTest::CheckResults ()
{
for (int i = 0; i < 2; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << PREFIX << "-" << i << "-1.pcap";
os2 << GetTempDir () << PREFIX << "-" << i << "-1.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec); // TODO support default PcapWriter snap length here
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (PREFIX << "-" << i << "-1.pcap");
}
}

View File

@@ -31,13 +31,12 @@
#include "ns3/abort.h"
#include "ns3/udp-echo-helper.h"
#include "ns3/mobility-model.h"
#include "ns3/pcap-test.h"
#include <sstream>
#include "hwmp-target-flags-regression.h"
using namespace ns3;
/// Set to true to rewrite reference traces, leave false to run regression test
const bool WRITE_VECTORS = false;
/// Unique PCAP file name prefix
const char * const PREFIX = "hwmp-target-flags-regression-test";
@@ -64,7 +63,7 @@ HwmpDoRfRegressionTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
delete m_nodes, m_nodes = 0;
}
@@ -138,8 +137,7 @@ HwmpDoRfRegressionTest::CreateDevices ()
address.SetBase ("10.1.1.0", "255.255.255.0");
m_interfaces = address.Assign (meshDevices);
// 4. write PCAP if needed
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : std::string (GetTempDir ())) + PREFIX;
wifiPhy.EnablePcapAll (prefix);
wifiPhy.EnablePcapAll (CreateTempDirFilename (PREFIX));
}
void
@@ -147,15 +145,7 @@ HwmpDoRfRegressionTest::CheckResults ()
{
for (int i = 0; i < 4; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << PREFIX << "-" << i << "-1.pcap";
os2 << GetTempDir () << PREFIX << "-" << i << "-1.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec); // TODO support default PcapWriter snap length here
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (PREFIX << "-" << i << "-1.pcap");
}
}

View File

@@ -28,14 +28,13 @@
#include "ns3/internet-stack-helper.h"
#include "ns3/mobility-model.h"
#include "ns3/pcap-file.h"
#include "ns3/pcap-test.h"
#include <sstream>
#include "pmp-regression.h"
using namespace ns3;
/// Set to true to rewrite reference traces, leave false to run regression test
const bool WRITE_VECTORS = false;
/// Unique PCAP file name prefix
const char * const PREFIX = "pmp-regression-test";
@@ -61,7 +60,7 @@ PeerManagementProtocolRegressionTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
delete m_nodes, m_nodes = 0;
}
@@ -97,8 +96,7 @@ PeerManagementProtocolRegressionTest::CreateDevices ()
mesh.SetNumberOfInterfaces (1);
NetDeviceContainer meshDevices = mesh.Install (wifiPhy, *m_nodes);
// 3. write PCAP if needed
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : std::string (GetTempDir ())) + PREFIX;
wifiPhy.EnablePcapAll (prefix);
wifiPhy.EnablePcapAll (CreateTempDirFilename (PREFIX));
}
void
@@ -106,15 +104,7 @@ PeerManagementProtocolRegressionTest::CheckResults ()
{
for (int i = 0; i < 2; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << PREFIX << "-" << i << "-1.pcap";
os2 << GetTempDir () << PREFIX << "-" << i << "-1.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec); // TODO support default PcapWriter snap length here
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (PREFIX << "-" << i << "-1.pcap");
}
}

View File

@@ -31,6 +31,7 @@ class Dot11sRegressionSuite : public TestSuite
public:
Dot11sRegressionSuite () : TestSuite ("devices-mesh-dot11s-regression", SYSTEM)
{
SetDataDir (NS_TEST_SOURCEDIR);
AddTestCase (new PeerManagementProtocolRegressionTest);
AddTestCase (new HwmpSimplestRegressionTest);
AddTestCase (new HwmpReactiveRegressionTest);

View File

@@ -28,6 +28,7 @@
#include "ns3/internet-stack-helper.h"
#include "ns3/ipv4-address-helper.h"
#include "ns3/abort.h"
#include "ns3/pcap-test.h"
#include "ns3/udp-echo-helper.h"
#include "ns3/mobility-model.h"
#include <sstream>
@@ -36,8 +37,6 @@
using namespace ns3;
/// Set to true to rewrite reference traces, leave false to run regression test
const bool WRITE_VECTORS = false;
/// Unique PCAP file name prefix
const char * const PREFIX = "flame-regression-test";
@@ -65,7 +64,7 @@ FlameRegressionTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
delete m_nodes, m_nodes = 0;
}
@@ -109,8 +108,7 @@ FlameRegressionTest::CreateDevices ()
address.SetBase ("10.1.1.0", "255.255.255.0");
m_interfaces = address.Assign (meshDevices);
// 4. write PCAP if needed
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : GetTempDir ()) + PREFIX;
wifiPhy.EnablePcapAll (prefix);
wifiPhy.EnablePcapAll (CreateTempDirFilename (PREFIX));
}
void
@@ -134,15 +132,7 @@ FlameRegressionTest::CheckResults ()
{
for (int i = 0; i < 3; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << PREFIX << "-" << i << "-1.pcap";
os2 << GetTempDir () << PREFIX << "-" << i << "-1.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec); // TODO support default PcapWriter snap length here
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (PREFIX << "-" << i << "-1.pcap");
}
}

View File

@@ -87,7 +87,7 @@ private:
static FlameRtableTest g_FlameRtableTest;
FlameRtableTest::FlameRtableTest () :
TestCase ("Mesh/Flame/FlameRtable"),
TestCase ("FlameRtable"),
error (false),
dst ("01:00:00:01:00:01"),
hop ("01:00:00:01:00:03"),

View File

@@ -27,6 +27,7 @@ class FlameRegressionSuite : public TestSuite
public:
FlameRegressionSuite () : TestSuite ("devices-mesh-flame-regression", SYSTEM)
{
SetDataDir (NS_TEST_SOURCEDIR);
AddTestCase (new FlameRegressionTest);
}
} g_flameRegressionSuite;

View File

@@ -140,7 +140,7 @@ private:
/// Dump NS-2 trace to tmp file
bool WriteTrace ()
{
m_traceFile = GetTempDir () + "Ns2MobilityHelperTest.tcl";
m_traceFile = CreateTempDirFilename ("Ns2MobilityHelperTest.tcl");
std::ofstream of (m_traceFile.c_str ());
NS_TEST_ASSERT_MSG_EQ_RETURNS_BOOL (of.is_open (), true, "Need to write tmp. file");
of << m_trace;
@@ -176,7 +176,7 @@ private:
m_nextRefPoint++;
}
return GetErrorStatus ();
return IsStatusFailure ();
}
/// Listen for course change events
void CourseChange (std::string context, Ptr<const MobilityModel> mobility)
@@ -243,6 +243,8 @@ class Ns2MobilityHelperTestSuite : public TestSuite
public:
Ns2MobilityHelperTestSuite () : TestSuite ("mobility-ns2-trace-helper", UNIT)
{
SetDataDir (NS_TEST_SOURCEDIR);
// to be used as temporary variable for test cases.
// Note that test suite takes care of deleting all test cases.
Ns2MobilityHelperTest * t (0);

View File

@@ -40,8 +40,8 @@ protected:
virtual void DoRun (void);
private:
bool TestSerialize (void);
bool TestDeserialize (void);
void TestSerialize (void);
void TestDeserialize (void);
Ptr<PbbPacket> m_refPacket;
Buffer m_refBuffer;
@@ -65,44 +65,38 @@ PbbTestCase::~PbbTestCase (void)
void
PbbTestCase::DoRun (void)
{
NS_TEST_ASSERT_MSG_EQ (TestSerialize (), false,
"serialization failed");
NS_TEST_ASSERT_MSG_EQ (TestDeserialize (), false,
"deserialization failed");
TestSerialize ();
TestDeserialize ();
}
bool
void
PbbTestCase::TestSerialize (void)
{
Buffer newBuffer;
newBuffer.AddAtStart (m_refPacket->GetSerializedSize ());
m_refPacket->Serialize (newBuffer.Begin ());
NS_TEST_ASSERT_MSG_EQ_RETURNS_BOOL (newBuffer.GetSize (), m_refBuffer.GetSize (),
"serialization failed, buffers have different sizes");
NS_TEST_ASSERT_MSG_EQ (newBuffer.GetSize (), m_refBuffer.GetSize (),
"serialization failed, buffers have different sizes");
int memrv = memcmp (newBuffer.PeekData (), m_refBuffer.PeekData (),
newBuffer.GetSize ());
NS_TEST_ASSERT_MSG_EQ_RETURNS_BOOL (memrv, 0,
"serialization faled, buffers differ");
return GetErrorStatus ();
NS_TEST_ASSERT_MSG_EQ (memrv, 0,
"serialization faled, buffers differ");
}
bool
void
PbbTestCase::TestDeserialize (void)
{
Ptr<PbbPacket> newPacket = Create<PbbPacket> ();
uint32_t numbytes = newPacket->Deserialize (m_refBuffer.Begin ());
NS_TEST_ASSERT_MSG_EQ_RETURNS_BOOL (numbytes, m_refBuffer.GetSize (),
NS_TEST_ASSERT_MSG_EQ (numbytes, m_refBuffer.GetSize (),
"deserialization failed, did not use all bytes");
NS_TEST_ASSERT_MSG_EQ_RETURNS_BOOL (*newPacket, *m_refPacket,
NS_TEST_ASSERT_MSG_EQ (*newPacket, *m_refPacket,
"deserialization failed, objects do not match");
return GetErrorStatus ();
}
class PbbTestSuite : public TestSuite

View File

@@ -107,7 +107,7 @@ WriteModeCreateTestCase::DoSetup (void)
std::stringstream filename;
uint32_t n = rand ();
filename << n;
m_testFilename = GetTempDir () + filename.str () + ".pcap";
m_testFilename = CreateTempDirFilename (filename.str () + ".pcap");
}
void
@@ -219,7 +219,7 @@ ReadModeCreateTestCase::DoSetup (void)
std::stringstream filename;
uint32_t n = rand ();
filename << n;
m_testFilename = GetTempDir () + filename.str () + ".pcap";
m_testFilename = CreateTempDirFilename (filename.str () + ".pcap");
}
void
@@ -325,7 +325,7 @@ AppendModeCreateTestCase::DoSetup (void)
std::stringstream filename;
uint32_t n = rand ();
filename << n;
m_testFilename = GetTempDir () + filename.str () + ".pcap";
m_testFilename = CreateTempDirFilename (filename.str () + ".pcap");
}
void
@@ -431,7 +431,7 @@ FileHeaderTestCase::DoSetup (void)
std::stringstream filename;
uint32_t n = rand ();
filename << n;
m_testFilename = GetTempDir () + filename.str () + ".pcap";
m_testFilename = CreateTempDirFilename (filename.str () + ".pcap");
}
void
@@ -668,7 +668,7 @@ RecordHeaderTestCase::DoSetup (void)
std::stringstream filename;
uint32_t n = rand ();
filename << n;
m_testFilename = GetTempDir () + filename.str () + ".pcap";
m_testFilename = CreateTempDirFilename (filename.str () + ".pcap");
}
void
@@ -1008,7 +1008,7 @@ ReadFileTestCase::DoRun (void)
//
//
std::string filename = NS_TEST_SOURCEDIR + "known.pcap";
std::string filename = CreateDataDirFilename ("known.pcap");
f.Open (filename, std::ios::in);
NS_TEST_ASSERT_MSG_EQ (f.Fail (), false, "Open (" << filename <<
", \"std::ios::in\") returns error");
@@ -1070,7 +1070,7 @@ DiffTestCase::DoRun (void)
//
// Check that PcapDiff(file, file) is false
//
std::string filename = NS_TEST_SOURCEDIR + "known.pcap";
std::string filename = CreateDataDirFilename ("known.pcap");
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (filename, filename, sec, usec);
NS_TEST_EXPECT_MSG_EQ (diff, false, "PcapDiff(file, file) must always be false");
@@ -1110,6 +1110,7 @@ public:
PcapFileTestSuite::PcapFileTestSuite ()
: TestSuite ("pcap-file", UNIT)
{
SetDataDir (NS_TEST_SOURCEDIR);
AddTestCase (new WriteModeCreateTestCase);
AddTestCase (new ReadModeCreateTestCase);
//AddTestCase (new AppendModeCreateTestCase);

View File

@@ -0,0 +1,34 @@
#ifndef PCAP_TEST_H
#define PCAP_TEST_H
#include <sstream>
#include <string>
#include <stdint.h>
#include "pcap-file.h"
#include "ns3/test.h"
/**
* \brief Test that a pair of reference/new pcap files are equal
*
* The filename is interpreted as a stream.
*
* \param filename The name of the file to read in the reference/temporary
* directories
*/
#define NS_PCAP_TEST_EXPECT_EQ(filename) \
do { \
std::ostringstream oss; \
oss << filename; \
std::string expected = CreateDataDirFilename (oss.str()); \
std::string got = CreateTempDirFilename (oss.str()); \
uint32_t sec(0), usec(0); \
/* TODO support default PcapWriter snap length here */ \
bool diff = PcapFile::Diff (got, expected, sec, usec); \
NS_TEST_EXPECT_MSG_EQ (diff, false, \
"PCAP traces " << got << " and " << expected \
<< " differ starting from " << sec << " s " \
<< usec << " us"); \
} while (false)
#endif /* PCAP_TEST_H */

View File

@@ -119,6 +119,7 @@ def build(bld):
'utils/sgi-hashmap.h',
'utils/simple-channel.h',
'utils/simple-net-device.h',
'utils/pcap-test.h',
'helper/application-container.h',
'helper/net-device-container.h',
'helper/node-container.h',

View File

@@ -38,16 +38,10 @@
#include "ns3/internet-stack-helper.h"
#include "ns3/v4ping-helper.h"
#include "ns3/pcap-file.h"
#include "ns3/pcap-test.h"
#include "bug780-test.h"
/// Set to true to rewrite reference traces, leave false to run regression tests
namespace
{
const bool WRITE_VECTORS = false;
}
namespace ns3
{
namespace olsr
@@ -84,10 +78,7 @@ Bug780Test::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS)
{
CheckResults ();
}
CheckResults ();
}
void
@@ -172,8 +163,7 @@ Bug780Test::CreateNodes (void)
p.Stop (Seconds (SimTime) - Seconds (0.001));
// pcap
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : GetTempDir ()) + PREFIX;
wifiPhy.EnablePcapAll (prefix);
wifiPhy.EnablePcapAll (CreateTempDirFilename (PREFIX));
}
void
@@ -181,15 +171,7 @@ Bug780Test::CheckResults ()
{
for (uint32_t i = 0; i < 2; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << PREFIX << "-" << i << "-0.pcap";
os2 << GetTempDir () << PREFIX << "-" << i << "-0.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec);
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (PREFIX << "-" << i << "-0.pcap");
}
}

View File

@@ -30,9 +30,7 @@
#include "ns3/point-to-point-helper.h"
#include "ns3/ipv4-address-helper.h"
#include "ns3/abort.h"
/// Set to true to rewrite reference traces, leave false to run regression tests
const bool WRITE_VECTORS = false;
#include "ns3/pcap-test.h"
namespace ns3
{
@@ -61,7 +59,7 @@ HelloRegressionTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
}
void
@@ -85,8 +83,7 @@ HelloRegressionTest::CreateNodes ()
ipv4.SetBase ("10.1.1.0", "255.255.255.0");
ipv4.Assign (nd);
// setup PCAP traces
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : GetTempDir ()) + PREFIX;
p2p.EnablePcapAll (prefix);
p2p.EnablePcapAll (CreateTempDirFilename (PREFIX));
}
void
@@ -94,15 +91,7 @@ HelloRegressionTest::CheckResults ()
{
for (uint32_t i = 0; i < 2; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << PREFIX << "-" << i << "-1.pcap";
os2 << GetTempDir () << PREFIX << "-" << i << "-1.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec);
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (PREFIX << "-" << i << "-1.pcap");
}
}

View File

@@ -30,6 +30,7 @@ class RegressionTestSuite : public TestSuite
public:
RegressionTestSuite () : TestSuite ("routing-olsr-regression", SYSTEM)
{
SetDataDir (NS_TEST_SOURCEDIR);
AddTestCase (new HelloRegressionTest);
AddTestCase (new TcRegressionTest);
AddTestCase (new Bug780Test);

View File

@@ -25,6 +25,7 @@
#include "ns3/double.h"
#include "ns3/uinteger.h"
#include "ns3/string.h"
#include "ns3/pcap-test.h"
#include "ns3/pcap-file.h"
#include "ns3/olsr-helper.h"
#include "ns3/internet-stack-helper.h"
@@ -35,9 +36,6 @@
#include "ns3/mobility-helper.h"
#include "ns3/nqos-wifi-mac-helper.h"
/// Set to true to rewrite reference traces, leave false to run regression tests
const bool WRITE_VECTORS = false;
namespace ns3
{
namespace olsr
@@ -66,7 +64,7 @@ TcRegressionTest::DoRun ()
Simulator::Run ();
Simulator::Destroy ();
if (!WRITE_VECTORS) CheckResults ();
CheckResults ();
}
void
@@ -113,8 +111,7 @@ TcRegressionTest::CreateNodes ()
ipv4.Assign (nd);
// setup PCAP traces
std::string prefix = (WRITE_VECTORS ? NS_TEST_SOURCEDIR : GetTempDir ()) + PREFIX;
wifiPhy.EnablePcapAll (prefix);
wifiPhy.EnablePcapAll (CreateTempDirFilename(PREFIX));
}
void
@@ -122,15 +119,7 @@ TcRegressionTest::CheckResults ()
{
for (uint32_t i = 0; i < 3; ++i)
{
std::ostringstream os1, os2;
// File naming conventions are hard-coded here.
os1 << NS_TEST_SOURCEDIR << PREFIX << "-" << i << "-1.pcap";
os2 << GetTempDir () << PREFIX << "-" << i << "-1.pcap";
uint32_t sec (0), usec (0);
bool diff = PcapFile::Diff (os1.str (), os2.str (), sec, usec);
NS_TEST_EXPECT_MSG_EQ (diff, false, "PCAP traces " << os1.str () << " and " << os2.str ()
<< " differ starting from " << sec << " s " << usec << " us");
NS_PCAP_TEST_EXPECT_EQ (PREFIX << "-" << i << "-1.pcap");
}
}

View File

@@ -75,6 +75,7 @@ public:
private:
virtual void DoRun (void);
static std::string Name (std::string channelType, double snrLinear, uint64_t phyRate);
double m_snrLinear;
uint64_t m_phyRate;
@@ -82,24 +83,27 @@ private:
std::string m_channelType;
};
std::string
SpectrumIdealPhyTestCase::Name (std::string channelType, double snrLinear, uint64_t phyRate)
{
std::ostringstream oss;
oss << channelType
<< " snr = " << snrLinear << " (linear), "
<< " phyRate = " << phyRate << " bps";
return oss.str();
}
SpectrumIdealPhyTestCase::SpectrumIdealPhyTestCase (double snrLinear,
uint64_t phyRate,
bool rateIsAchievable,
std::string channelType)
: TestCase (""),
: TestCase (Name (channelType, snrLinear, phyRate)),
m_snrLinear (snrLinear),
m_phyRate (phyRate),
m_rateIsAchievable (rateIsAchievable),
m_channelType (channelType)
{
std::ostringstream oss;
oss << channelType
<< " snr = " << snrLinear << " (linear), "
<< " phyRate = " << phyRate << " bps";
SetName (oss.str ());
}
SpectrumIdealPhyTestCase::~SpectrumIdealPhyTestCase ()

View File

@@ -56,7 +56,7 @@
|| (i->fc > j->fc + (tol)) || (i->fc < j->fc - (tol)) \
|| (i->fh > j->fh + (tol)) || (i->fh < j->fh - (tol))) \
{ \
if (gBreakOnFailure) { *(int *)0 = 0; } \
ASSERT_ON_FAILURE; \
std::ostringstream indexStream; \
indexStream << "[" << k << "]"; \
std::ostringstream msgStream; \
@@ -67,6 +67,7 @@
expectedStream << j->fl << " <-- " << j->fc << " --> " << j->fh; \
ReportTestFailure (std::string (# actual) + indexStream.str () + " == " + std::string (# expected) + indexStream.str (), \
actualStream.str (), expectedStream.str (), msgStream.str (), (file), (line)); \
CONTINUE_ON_FAILURE; \
} \
++i; \
++j; \
@@ -117,7 +118,7 @@
{ \
if ((*i) > (*j) + (tol) || (*i) < (*j) - (tol)) \
{ \
if (gBreakOnFailure) { *(int *)0 = 0; } \
ASSERT_ON_FAILURE; \
std::ostringstream indexStream; \
indexStream << "[" << k << "]"; \
std::ostringstream msgStream; \
@@ -128,6 +129,7 @@
expectedStream << expected; \
ReportTestFailure (std::string (# actual) + indexStream.str () + " == " + std::string (# expected) + indexStream.str (), \
actualStream.str (), expectedStream.str (), msgStream.str (), file, line); \
CONTINUE_ON_FAILURE; \
} \
++i; \
++j; \

View File

@@ -72,8 +72,8 @@ SpectrumValueTestCase::MoreOrLessEqual (SpectrumValue x, SpectrumValue y)
void
SpectrumValueTestCase::DoRun (void)
{
NS_TEST_ASSERT_MSG_SPECTRUM_MODEL_EQ_TOL (*m_a.GetSpectrumModel (), *m_b.GetSpectrumModel (), TOLERANCE, GetName ());
NS_TEST_ASSERT_MSG_SPECTRUM_VALUE_EQ_TOL (m_a, m_b, TOLERANCE, GetName ());
NS_TEST_ASSERT_MSG_SPECTRUM_MODEL_EQ_TOL (*m_a.GetSpectrumModel (), *m_b.GetSpectrumModel (), TOLERANCE, "");
NS_TEST_ASSERT_MSG_SPECTRUM_VALUE_EQ_TOL (m_a, m_b, TOLERANCE, "");
}

View File

@@ -184,7 +184,7 @@ Ns3TcpInteroperabilityTestCase::Ipv4L3Tx (std::string context, Ptr<const Packet>
//
// Avoid streams of errors -- only report the first.
//
if (GetErrorStatus () == false)
if (IsStatusSuccess ())
{
NS_TEST_EXPECT_MSG_EQ (result, 0, "Expected data comparison error");
}

View File

@@ -121,7 +121,7 @@ Ns3TcpLossTestCase::DoSetup (void)
//
std::ostringstream oss;
oss << "/response-vectors/ns3tcp-loss-" << m_tcpModel << m_testCase << "-response-vectors.pcap";
m_pcapFilename = NS_TEST_SOURCEDIR + oss.str ();
m_pcapFilename = CreateDataDirFilename(oss.str ());
if (m_writeVectors)
{
@@ -185,6 +185,8 @@ Ns3TcpLossTestCase::Ipv4L3Tx (std::string context, Ptr<const Packet> packet, Ptr
uint32_t tsSec, tsUsec, inclLen, origLen, readLen;
m_pcapFile.Read (expected, sizeof(expected), tsSec, tsUsec, inclLen, origLen, readLen);
NS_LOG_DEBUG ("read " << readLen);
uint8_t *actual = new uint8_t[readLen];
p->CopyData (actual, readLen);
@@ -195,7 +197,7 @@ Ns3TcpLossTestCase::Ipv4L3Tx (std::string context, Ptr<const Packet> packet, Ptr
//
// Avoid streams of errors -- only report the first.
//
if (GetErrorStatus () == false)
if (IsStatusSuccess ())
{
NS_TEST_EXPECT_MSG_EQ (result, 0, "Expected data comparison error");
}
@@ -442,6 +444,7 @@ public:
Ns3TcpLossTestSuite::Ns3TcpLossTestSuite ()
: TestSuite ("ns3-tcp-loss", SYSTEM)
{
SetDataDir (NS_TEST_SOURCEDIR);
Packet::EnablePrinting (); // Enable packet metadata for all test cases
AddTestCase (new Ns3TcpLossTestCase ("Tahoe", 0));
AddTestCase (new Ns3TcpLossTestCase ("Tahoe", 1));

View File

@@ -190,7 +190,7 @@ Ns3TcpStateTestCase::Ipv4L3Tx (std::string context, Ptr<const Packet> packet, Pt
//
// Avoid streams of errors -- only report the first.
//
if (GetErrorStatus () == false)
if (IsStatusSuccess ())
{
NS_TEST_EXPECT_MSG_EQ (result, 0, "Expected data comparison error");
}

View File

@@ -151,7 +151,6 @@ class ns3module_taskgen(TaskGen.task_gen):
pcfile = bld.new_task_gen('ns3pcfile')
pcfile.module = self
def _create_ns3_module(self, bld, name, dependencies, static):
# FIXME: env modifications are overwritten by parent caller
@@ -169,22 +168,28 @@ class ns3module_taskgen(TaskGen.task_gen):
module.features.extend(features)
module.path = self.path
module.uselib = self.uselib
module.target = 'ns3-' + name
if hasattr(self, 'includes'):
module.includes = self.includes
if hasattr(self, 'defines'):
module.defines = self.defines
else:
module.defines = []
if hasattr(self, 'add_objects'):
module.add_objects = self.add_objects
else:
module.add_objects = []
if hasattr(self, "is_ns3_module"):
module.is_ns3_module = self.is_ns3_module
if hasattr(self, 'add_objects'):
module.add_objects = self.add_objects
module.is_static = static
module.vnum = wutils.VNUM
# Add the proper path to the module's name.
module.target = '%s/ns3-%s' % (bld.srcnode.relpath_gen(self.path), name)
# Set the libraries this module depends on.
module.module_deps = list(dependencies)
linkflags = []
cxxflags = []
ccflags = []
if not static:
module.env.append_value('CXXFLAGS', module.env['shlib_CXXFLAGS'])
module.env.append_value('CCFLAGS', module.env['shlib_CXXFLAGS'])
cxxflags = module.env['shlib_CXXFLAGS']
ccflags = module.env['shlib_CXXFLAGS']
# Turn on the link flags for shared libraries if we have the
# proper compiler and platform.
if module.env['CXX_NAME'] in ['gcc', 'icc'] and module.env['WL_SONAME_SUPPORTED']:
@@ -192,17 +197,54 @@ class ns3module_taskgen(TaskGen.task_gen):
# at its beginning because all of the libraries will end
# up in the same directory.
module_library_name = os.path.basename(ccroot.get_target_name(module))
module.env.append_value('LINKFLAGS', '-Wl,--soname=%s' % module_library_name)
linkflags = '-Wl,--soname=%s' % module_library_name
elif module.env['CXX_NAME'] in ['gcc', 'icc'] and \
os.uname()[4] == 'x86_64' and \
module.env['ENABLE_PYTHON_BINDINGS']:
# enable that flag for static builds only on x86-64 platforms
# when gcc is present and only when we want python bindings
# (it's more efficient to not use this option if we can avoid it)
module.env.append_value('CXXFLAGS', '-mcmodel=large')
module.env.append_value('CCFLAGS', '-mcmodel=large')
module.env.append_value('CXXDEFINES', "NS3_MODULE_COMPILATION")
module.env.append_value('CCDEFINES', "NS3_MODULE_COMPILATION")
cxxflags = '-mcmodel=large'
ccflags = '-mcmodel=large'
cxxdefines = "NS3_MODULE_COMPILATION"
ccdefines = "NS3_MODULE_COMPILATION"
if len(module.source) > 0 and hasattr(self, 'ns3_dir_location'):
uselib_cpppath = []
for lib in module.uselib.split():
if 'CPPPATH_%s' % lib in module.env:
uselib_cpppath.extend(module.env['CPPPATH_%s' % lib])
objects = []
for src in module.source[0:-1]:
full_src = os.path.join(self.ns3_dir_location, src)
path = os.path.dirname(full_src)
target = '%s_object' % src
# XXX: calculate the features correctly here.
obj = bld (source=[full_src], target=target, features='cxx cc',
defines=['NS_TEST_SOURCEDIR="%s"' % path],
cxxflags = module.env['CXXFLAGS'] + cxxflags,
ccflags = module.env['CCFLAGS'] + ccflags,
includes=' '.join(uselib_cpppath))
objects.append(target)
last = module.source[-1]
full_src = os.path.join(self.ns3_dir_location, last)
path = os.path.dirname(full_src)
module.defines.append('NS_TEST_SOURCEDIR="%s"' % path)
module.source = [last]
module.add_objects.extend(objects)
module.is_static = static
module.vnum = wutils.VNUM
# Add the proper path to the module's name.
module.target = '%s/ns3-%s' % (bld.srcnode.relpath_gen(self.path), name)
# Set the libraries this module depends on.
module.module_deps = list(dependencies)
module.env.append_value('CXXFLAGS', cxxflags)
module.env.append_value('CCFLAGS', ccflags)
module.env.append_value('LINKFLAGS', linkflags)
module.env.append_value('CXXDEFINES', cxxdefines)
module.env.append_value('CCDEFINES', ccdefines)
module.install_path = "${LIBDIR}"
@@ -221,6 +263,7 @@ def create_ns3_module(bld, name, dependencies=(), test=False):
module.module_deps = list(dependencies)
module.test = test
module.is_ns3_module = True
module.ns3_dir_location = bld.path.relpath_gen(bld.srcnode)
return module
@@ -439,7 +482,7 @@ class ns3pcfile_task(Task.Task):
def _generate_pcfile(self, name, use, uselib_local, env, outfilename):
outfile = open(outfilename, 'w')
prefix = env.PREFIX
includedir = os.path.join(env.INCLUDEDIR, "ns3")
includedir = env.INCLUDEDIR
libdir = env.LIBDIR
libs = self._self_libs(self.env, name, '${libdir}')
for dep in use:

168
test.py
View File

@@ -165,53 +165,49 @@ def parse_examples_to_run_file(
#
TMP_OUTPUT_DIR = "testpy-output"
def get_node_text(node):
for child in node.childNodes:
if child.nodeType == child.TEXT_NODE:
return child.nodeValue
return "None"
def read_test(test):
result = test.find('Result').text
name = test.find('Name').text
if not test.find('Time') is None:
time_real = test.find('Time').get('real')
else:
time_real = ''
return (result, name, time_real)
#
# A simple example of writing a text file with a test result summary. It is
# expected that this output will be fine for developers looking for problems.
#
def node_to_text (test, f):
(result, name, time) = read_test(test)
output = "%s: Test Suite \"%s\" (%s)\n" % (result, name, time_real)
f.write(output)
for details in test.findall('FailureDetails'):
f.write(" Details:\n")
f.write(" Message: %s\n" % details.find('Message').text)
f.write(" Condition: %s\n" % details.find('Condition').text)
f.write(" Actual: %s\n" % details.find('Actual').text)
f.write(" Limit: %s\n" % details.find('Limit').text)
f.write(" File: %s\n" % details.find('File').text)
f.write(" Line: %s\n" % details.find('Line').text)
for child in test.findall('Test'):
node_to_text(child, f)
def translate_to_text(results_file, text_file):
f = open(text_file, 'w')
try:
dom = xml.dom.minidom.parse(results_file)
except xml.parsers.expat.error:
print "\nAn error was encountered while parsing the XML file %s." % (results_file)
sys.exit(1)
import xml.etree.ElementTree as ET
et = ET.parse (results_file)
for test in et.findall('Test'):
node_to_text (test, f)
for suite in dom.getElementsByTagName("TestSuite"):
result = get_node_text(suite.getElementsByTagName("SuiteResult")[0])
name = get_node_text(suite.getElementsByTagName("SuiteName")[0])
time = get_node_text(suite.getElementsByTagName("SuiteTime")[0])
output = "%s: Test Suite \"%s\" (%s)\n" % (result, name, time)
f.write(output)
if result != "CRASH":
for case in suite.getElementsByTagName("TestCase"):
result = get_node_text(case.getElementsByTagName("CaseResult")[0])
name = get_node_text(case.getElementsByTagName("CaseName")[0])
time = get_node_text(case.getElementsByTagName("CaseTime")[0])
output = " %s: Test Case \"%s\" (%s)\n" % (result, name, time)
f.write(output)
if result == "FAIL":
for details in case.getElementsByTagName("FailureDetails"):
f.write(" Details:\n")
f.write(" Message: %s\n" % get_node_text(details.getElementsByTagName("Message")[0]))
f.write(" Condition: %s\n" % get_node_text(details.getElementsByTagName("Condition")[0]))
f.write(" Actual: %s\n" % get_node_text(details.getElementsByTagName("Actual")[0]))
f.write(" Limit: %s\n" % get_node_text(details.getElementsByTagName("Limit")[0]))
f.write(" File: %s\n" % get_node_text(details.getElementsByTagName("File")[0]))
f.write(" Line: %s\n" % get_node_text(details.getElementsByTagName("Line")[0]))
for example in dom.getElementsByTagName("Example"):
result = get_node_text(example.getElementsByTagName("Result")[0])
name = get_node_text(example.getElementsByTagName("Name")[0])
time = get_node_text(example.getElementsByTagName("ElapsedTime")[0])
output = "%s: Example \"%s\" (%s)\n" % (result, name, time)
for example in et.findall('Example'):
result = example.find('Result').text
name = example.find('Name').text
if not example.find('Time') is None:
time_real = example.find('Time').get('real')
else:
time_real = ''
output = "%s: Example \"%s\" (%s)\n" % (result, name, time_real)
f.write(output)
f.close()
@@ -231,20 +227,18 @@ def translate_to_html(results_file, html_file):
#
# Read and parse the whole results file.
#
dom = xml.dom.minidom.parse(results_file)
import xml.etree.ElementTree as ET
et = ET.parse(results_file)
#
# Iterate through the test suites
#
f.write("<h2>Test Suites</h2>\n")
for suite in dom.getElementsByTagName("TestSuite"):
for suite in et.findall('Test'):
#
# For each test suite, get its name, result and execution time info
#
name = get_node_text(suite.getElementsByTagName("SuiteName")[0])
result = get_node_text(suite.getElementsByTagName("SuiteResult")[0])
time = get_node_text(suite.getElementsByTagName("SuiteTime")[0])
(result, name, time) = read_test (suite)
#
# Print a level three header with the result, name and time. If the
@@ -316,15 +310,13 @@ def translate_to_html(results_file, html_file):
#
# Now iterate through all of the test cases.
#
for case in suite.getElementsByTagName("TestCase"):
for case in suite.findall('Test'):
#
# Get the name, result and timing information from xml to use in
# printing table below.
#
name = get_node_text(case.getElementsByTagName("CaseName")[0])
result = get_node_text(case.getElementsByTagName("CaseResult")[0])
time = get_node_text(case.getElementsByTagName("CaseTime")[0])
(result, name, time) = read_test(case)
#
# If the test case failed, we iterate through possibly multiple
@@ -349,7 +341,7 @@ def translate_to_html(results_file, html_file):
#
first_row = True
for details in case.getElementsByTagName("FailureDetails"):
for details in case.findall('FailureDetails'):
#
# Start a new row in the table for each possible Failure Detail
@@ -367,12 +359,12 @@ def translate_to_html(results_file, html_file):
f.write("<td></td>\n")
f.write("<td>")
f.write("<b>Message: </b>%s, " % get_node_text(details.getElementsByTagName("Message")[0]))
f.write("<b>Condition: </b>%s, " % get_node_text(details.getElementsByTagName("Condition")[0]))
f.write("<b>Actual: </b>%s, " % get_node_text(details.getElementsByTagName("Actual")[0]))
f.write("<b>Limit: </b>%s, " % get_node_text(details.getElementsByTagName("Limit")[0]))
f.write("<b>File: </b>%s, " % get_node_text(details.getElementsByTagName("File")[0]))
f.write("<b>Line: </b>%s" % get_node_text(details.getElementsByTagName("Line")[0]))
f.write("<b>Message: </b>%s, " % details.find('Message').text)
f.write("<b>Condition: </b>%s, " % details.find('Condition').text)
f.write("<b>Actual: </b>%s, " % details.find('Actual').text)
f.write("<b>Limit: </b>%s, " % details.find('Limit').text)
f.write("<b>File: </b>%s, " % details.find('File').text)
f.write("<b>Line: </b>%s" % details.find('Line').text)
f.write("</td>\n")
#
@@ -428,7 +420,7 @@ def translate_to_html(results_file, html_file):
#
# Now iterate through all of the examples
#
for example in dom.getElementsByTagName("Example"):
for example in et.findall("Example"):
#
# Start a new row for each example
@@ -438,9 +430,7 @@ def translate_to_html(results_file, html_file):
#
# Get the result and name of the example in question
#
result = get_node_text(example.getElementsByTagName("Result")[0])
name = get_node_text(example.getElementsByTagName("Name")[0])
time = get_node_text(example.getElementsByTagName("ElapsedTime")[0])
(result, name, time) = read_test(example)
#
# If the example either failed or crashed, print its result status
@@ -786,7 +776,7 @@ class Job:
#
# This is the shell command that will be executed in the job. For example,
#
# "utils/test-runner --suite=some-test-suite"
# "utils/test-runner --test-name=some-test-suite"
#
def set_shell_command(self, shell_command):
self.shell_command = shell_command
@@ -919,8 +909,12 @@ class worker_thread(threading.Thread):
# to the test runner, specifically the base directory and temp
# file name
#
if options.update_data:
update_data = '--update-data'
else:
update_data = ''
(job.returncode, standard_out, standard_err, et) = run_job_synchronously(job.shell_command +
" --basedir=%s --tempdir=%s --out=%s" % (job.basedir, job.tempdir, job.tmp_file_name),
" --xml --tempdir=%s --out=%s %s" % (job.tempdir, job.tmp_file_name, update_data),
job.cwd, options.valgrind, False)
job.set_elapsed_time(et)
@@ -1063,12 +1057,12 @@ def run_tests():
# handle them without doing all of the hard work.
#
if options.kinds:
path_cmd = os.path.join("utils", "test-runner --kinds")
path_cmd = os.path.join("utils", "test-runner --print-test-type-list")
(rc, standard_out, standard_err, et) = run_job_synchronously(path_cmd, os.getcwd(), False, False)
print standard_out
if options.list:
path_cmd = os.path.join("utils", "test-runner --list")
path_cmd = os.path.join("utils", "test-runner --print-test-name-list")
(rc, standard_out, standard_err, et) = run_job_synchronously(path_cmd, os.getcwd(), False, False)
print standard_out
@@ -1113,7 +1107,7 @@ def run_tests():
xml_results_file = os.path.join(testpy_output_dir, "results.xml")
f = open(xml_results_file, 'w')
f.write('<?xml version="1.0"?>\n')
f.write('<TestResults>\n')
f.write('<Results>\n')
f.close()
#
@@ -1141,10 +1135,10 @@ def run_tests():
suites = options.suite + "\n"
elif len(options.example) == 0 and len(options.pyexample) == 0:
if len(options.constrain):
path_cmd = os.path.join("utils", "test-runner --list --constrain=%s" % options.constrain)
path_cmd = os.path.join("utils", "test-runner --print-test-name-list --test-type=%s" % options.constrain)
(rc, suites, standard_err, et) = run_job_synchronously(path_cmd, os.getcwd(), False, False)
else:
path_cmd = os.path.join("utils", "test-runner --list")
path_cmd = os.path.join("utils", "test-runner --print-test-name-list")
(rc, suites, standard_err, et) = run_job_synchronously(path_cmd, os.getcwd(), False, False)
else:
suites = ""
@@ -1222,11 +1216,11 @@ def run_tests():
job.set_basedir(os.getcwd())
job.set_tempdir(testpy_output_dir)
if (options.multiple):
multiple = " --multiple"
else:
multiple = ""
else:
multiple = " --stop-on-failure"
path_cmd = os.path.join("utils", "test-runner --suite=%s%s" % (test, multiple))
path_cmd = os.path.join("utils", "test-runner --test-name=%s%s" % (test, multiple))
job.set_shell_command(path_cmd)
if options.valgrind and test in core_valgrind_skip_tests:
@@ -1510,7 +1504,7 @@ def run_tests():
else:
f.write(' <Result>CRASH</Result>\n')
f.write(' <ElapsedTime>%.3f</ElapsedTime>\n' % job.elapsed_time)
f.write(' <Time real="%.3f"/>\n' % job.elapsed_time)
f.write('</Example>\n')
f.close()
@@ -1561,11 +1555,10 @@ def run_tests():
#
if job.is_skip:
f = open(xml_results_file, 'a')
f.write("<TestSuite>\n")
f.write(" <SuiteName>%s</SuiteName>\n" % job.display_name)
f.write(' <SuiteResult>SKIP</SuiteResult>\n')
f.write(' <SuiteTime>Execution times not available</SuiteTime>\n')
f.write("</TestSuite>\n")
f.write("<Test>\n")
f.write(" <Name>%s</Name>\n" % job.display_name)
f.write(' <Result>SKIP</Result>\n')
f.write("</Test>\n")
f.close()
else:
if job.returncode == 0 or job.returncode == 1 or job.returncode == 2:
@@ -1576,20 +1569,18 @@ def run_tests():
f_from.close()
else:
f = open(xml_results_file, 'a')
f.write("<TestSuite>\n")
f.write(" <SuiteName>%s</SuiteName>\n" % job.display_name)
f.write(' <SuiteResult>CRASH</SuiteResult>\n')
f.write(' <SuiteTime>Execution times not available</SuiteTime>\n')
f.write("</TestSuite>\n")
f.write("<Test>\n")
f.write(" <Name>%s</Name>\n" % job.display_name)
f.write(' <Result>CRASH</Suite>\n')
f.write("</Test>\n")
f.close()
if job.returncode == 2:
f = open(xml_results_file, 'a')
f.write("<TestSuite>\n")
f.write(" <SuiteName>%s</SuiteName>\n" % job.display_name)
f.write(' <SuiteResult>VALGR</SuiteResult>\n')
f.write(' <SuiteTime>Execution times not available</SuiteTime>\n')
f.write("</TestSuite>\n")
f.write("<Test>\n")
f.write(" <Name>%s</Name>\n" % job.display_name)
f.write(' <Result>VALGR</Result>\n')
f.write("</Test>\n")
f.close()
#
@@ -1607,7 +1598,7 @@ def run_tests():
# document
#
f = open(xml_results_file, 'a')
f.write('</TestResults>\n')
f.write('</Results>\n')
f.close()
#
@@ -1669,6 +1660,9 @@ def main(argv):
metavar="EXAMPLE",
help="specify a single example to run (with relative path)")
parser.add_option("-u", "--update-data", action="store_true", dest="update_data", default=False,
help="If examples use reference data files, get them to re-generate them")
parser.add_option("-g", "--grind", action="store_true", dest="valgrind", default=False,
help="run the test suites and examples using valgrind")

View File

@@ -17,455 +17,8 @@
*/
#include "ns3/test.h"
#include "ns3/assert.h"
#include "ns3/abort.h"
#include <iostream>
#include <fstream>
#include <string>
#include <stdlib.h>
#include <stdio.h>
#include <time.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <dirent.h>
#include <string.h>
extern bool gBreakOnFailure;
using namespace ns3;
//
// Create a temporary directory for use by test programs. This is not a
// foolproof thing, but a reasonably good way to get a throwaway directory
// while running tests in a debugger.
//
std::string
TempDir (void)
int main (int argc, char *argv[])
{
char *path = NULL;
path = getenv ("TMP");
if (path == NULL)
{
path = getenv ("TEMP");
if (path == NULL)
{
path = const_cast<char *> ("/tmp");
}
}
//
// Just in case the user wants to go back and find the output, we give
// a hint as to which dir we created by including a time hint.
//
time_t now = time (NULL);
struct tm *tm_now = localtime (&now);
//
// But we also randomize the name in case there are multiple users doing
// this at the same time
//
srand (time (0));
long int n = rand ();
//
// The final path to the directory is going to look something like
//
// /tmp/ns3-14.30.29.32767
//
// The first segment comes from one of the temporary directory env
// variables or /tmp if not found. The directory name starts with an
// identifier telling folks who is making all of the temp directories
// and then the local time (in this case 14.30.29 -- which is 2:30 and
// 29 seconds PM).
//
char dirname[1024];
snprintf (dirname, sizeof(dirname), "%s/ns-3.%d.%d.%d.%ld", path, tm_now->tm_hour, tm_now->tm_min, tm_now->tm_sec, n);
#if (defined(_WIN32) || defined(_WIN64)) && !defined(__CYGWIN__)
if(mkdir(dirname) == 0)
#else
if (mkdir (dirname, S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) == 0)
#endif
{
return dirname;
}
else
{
return "";
}
}
//
// Test suites may need to figure out where their source directory is in order
// to find test vectors. To do that they will need to know where the base
// directory of the distribution is (the directory in which "src" is found).
// It is painful for a user debugging test suites to always provide that dir
// so we try and find it in the current directory tree.
//
std::string
BaseDir (void)
{
//
// Get an absolute path to the current working directory. Following code
// depends on the leading '/'
//
char pathbuf[PATH_MAX];
if (getcwd (pathbuf, sizeof(pathbuf)) == NULL)
{
NS_ABORT_MSG ("Basedir(): unable to getcwd()");
}
//
// Walk up the directory tree looking for a directory that has files that
// indicate it is the base of an ns-3 distribution. We use VERSION and
// LICENSE which have been there from the beginning of time.
//
for (;;)
{
bool haveVersion = false;
bool haveLicense = false;
//
// Open the directory file for the current directory and loop through
// the directory entries.
//
DIR *dp = opendir (pathbuf);
if (dp != NULL)
{
while (struct dirent *de = readdir (dp))
{
if (strcmp (de->d_name, "VERSION") == 0)
{
haveVersion = true;
}
if (strcmp (de->d_name, "LICENSE") == 0)
{
haveLicense = true;
}
}
}
closedir (dp);
//
// If there's a file named VERSION and a file named LICENSE in this
// directory, we assume it's our base directory.
//
if (haveVersion && haveLicense)
{
return pathbuf;
}
//
// Strip off the last segment of the current directory.
//
char *last = strrchr (pathbuf, '/');
NS_ASSERT_MSG (last, "No \"/\" found in absolute path ???");
*last = '\0';
if (strlen(pathbuf) == 0)
{
return "";
}
}
//
// Quiet the compiler.
//
return "";
}
//
// Run one of the test suites. Returns an integer with the boolean sense of
// "an error has occurred." That is, 0 == false -> no error; 1 == true -> an
// error occurred.
//
int
main (int argc, char *argv[])
{
bool doVerbose = false;
bool doList = false;
bool doMultiple = false;
bool doHelp = false;
bool doSuite = false;
bool doKinds = false;
gBreakOnFailure = false;
bool haveBasedir = false;
bool haveTempdir = false;
bool haveType = false;
std::string suiteName;
std::string basedir;
std::string tempdir;
std::string outfileName;
std::string typeName;
for (int i = 1; i < argc; ++i)
{
std::string arg(argv[i]);
if (arg.find ("--assert") != std::string::npos)
{
gBreakOnFailure = true;
}
if (arg.find ("--basedir=") != std::string::npos)
{
basedir = arg.substr (arg.find_first_of ("=") + 1, 9999);
haveBasedir = true;
}
if (arg.find ("--constrain=") != std::string::npos)
{
typeName = arg.substr (arg.find_first_of ("=") + 1, 9999);
haveType = true;
}
if (arg.compare ("--help") == 0)
{
doHelp = true;
}
if (arg.compare ("--kinds") == 0)
{
doKinds = true;
}
if (arg.compare ("--list") == 0)
{
doList = true;
}
if (arg.compare ("--multiple") == 0)
{
doMultiple = true;
}
if (arg.find ("--out=") != std::string::npos)
{
outfileName = arg.substr (arg.find_first_of ("=") + 1, 9999);
}
if (arg.find ("--suite=") != std::string::npos)
{
suiteName = arg.substr (arg.find_first_of ("=") + 1, 9999);
doSuite = true;
}
if (arg.find ("--tempdir=") != std::string::npos)
{
tempdir = arg.substr (arg.find_first_of ("=") + 1, 9999);
haveTempdir = true;
}
if (arg.compare ("--verbose") == 0)
{
doVerbose = true;
}
}
//
// A help request trumps everything else. If we have one, just print the help
// and leave.
//
if (doHelp)
{
std::cout << " --assert: Tell tests to segfault (like assert) if an error is detected" << std::endl;
std::cout << " --basedir=dir: Set the base directory (where to find src) to \"dir\"" << std::endl;
std::cout << " --tempdir=dir: Set the temporary directory (where to find data files) to \"dir\"" << std::endl;
std::cout << " --constrain=test-type: Constrain checks to test suites of type \"test-type\"" << std::endl;
std::cout << " --help: Print this message" << std::endl;
std::cout << " --kinds: List all of the available kinds of tests" << std::endl;
std::cout << " --list: List all of the test suites (optionally constrained by test-type)" << std::endl;
std::cout << " --multiple: Allow test suites and cases to produce multiple failures" << std::endl;
std::cout << " --out=file-name: Set the test status output file to \"file-name\"" << std::endl;
std::cout << " --suite=suite-name: Run the test suite named \"suite-name\"" << std::endl;
std::cout << " --verbose: Turn on messages in the run test suites" << std::endl;
return false;
}
//
// A kinds request trumps everything remaining. If we are asked, just
// print the list of types and leave.
//
if (doKinds)
{
//
// Coming up with a string to represent a test type is completely up to
// us here. We just define the types as being a string composed of the
// enum defined in test.h converted to lower case.
//
std::cout << " bvt: Build Verification Tests (to see if build completed successfully)" << std::endl;
std::cout << " core: Run all TestSuite-based tests (exclude examples)" << std::endl;
std::cout << " example: Examples (to see if example programs run successfully)" << std::endl;
std::cout << " performance: Performance Tests (check to see if the system is as fast as expected)" << std::endl;
std::cout << " system: System Tests (spans modules to check integration of modules)" << std::endl;
std::cout << " unit: Unit Tests (within modules to check basic functionality)" << std::endl;
return false;
}
//
// A list request is the first functional request. It trumps running the
// actual tests. If we get a list request, we don't run anything, we just
// do the requested list which may or may not be qualified by a typename.
//
if (doList)
{
for (uint32_t i = 0; i < TestRunner::GetNTestSuites (); ++i)
{
TestSuite *suite = TestRunner::GetTestSuite (i);
//
// Filter the tests listed by type if requested. The special typeName
// "core" means any TestSuite.
//
if (haveType && typeName != "core")
{
TestSuite::TestType type = suite->GetTestType ();
if (typeName == "bvt" && type != TestSuite::BVT)
{
continue;
}
if (typeName == "unit" && type != TestSuite::UNIT)
{
continue;
}
if (typeName == "system" && type != TestSuite::SYSTEM)
{
continue;
}
if (typeName == "example" && type != TestSuite::EXAMPLE)
{
continue;
}
if (typeName == "performance" && type != TestSuite::PERFORMANCE)
{
continue;
}
}
//
// This creates a list of test suite names that can be used by the
// high level test manager to get a list of all tests. It will then
// typically launch individual tests in parallel, calling back here
// with a specific "suite=" to run.
//
std::cout << suite->GetName () << std::endl;
}
return false;
}
//
// We have a lot of options possible to provide flexibility. It can become
// painful, however, to provide all of the options when debugging, and it
// turns out that not all tests require all options. It is really helpful
// to try and put together some reasonable defaults if we're not provided
// them.
//
if (!haveTempdir)
{
//
// No temporary directory was provided. We don't know if the selected
// test or tests will need one, but we can cook something up. The
// tmpnam function has its own set of problems, so we'll just do our
// own thing.
//
tempdir = TempDir ();
if (tempdir.size ())
{
std::cout << "Temporary directory not provided. Using \"" << tempdir << "\"" << std::endl;
haveTempdir = true;
}
else
{
std::cout << "Temporary directory not provided and unable to create one." << std::endl;
return true;
}
}
if (haveBasedir == false)
{
//
// No basedir was provided. If we don't have it, we can try and find it
// in the current directory tree.
//
basedir = BaseDir ();
if (basedir.size ())
{
std::cout << "Base directory not provided. Using \"" << basedir << "\"" << std::endl;
haveBasedir = true;
}
else
{
std::cout << "Base directory not provided and unable to find one." << std::endl;
return true;
}
}
//
// If given an output file, we just append the output of each test suite
// we're asked to run to the end of that file. We need to append since the
// higher level test runner may be just running a number of tests back to
// back. We leave it up to that code to decide how to deal with possible
// parallel operation -- we just append to a file here. If no output file
// is specified, we don't do any output and just return the sense of error
// given by the test.
//
std::ofstream *pofs = 0;
std::ofstream ofs;
if (!outfileName.empty ())
{
ofs.open (outfileName.c_str (), std::fstream::out | std::fstream::app);
pofs = &ofs;
}
//
// If we have a specified test suite to run, then we only run that suite.
// The default case is to "run everything. We don't expect this to be done
// much since typically higher level code will be running suites in parallel
// but we'll do it if asked.
//
bool result = false;
bool suiteRan = false;
for (uint32_t i = 0; i < TestRunner::GetNTestSuites (); ++i)
{
TestSuite *testSuite = TestRunner::GetTestSuite (i);
if (doSuite == false || (doSuite == true && suiteName == testSuite->GetName ()))
{
testSuite->SetBaseDir (basedir);
testSuite->SetTempDir (tempdir);
testSuite->SetStream (pofs);
testSuite->SetVerbose (doVerbose);
testSuite->SetContinueOnFailure (doMultiple);
result |= testSuite->Run ();
suiteRan = true;
}
}
ofs.close();
//
// If we couldn't figure out how to run at least one test, then return an error
//
if (suiteRan == false)
{
std::cout << "Unable to find a test to run (constraints too severe or test not found)" << std::endl;
return true;
}
return result;
return ns3::TestRunner::Run (argc, argv);
}