Bug-fix inhibit Error Model when receiving data frame with only control messages in LteSpectrumPhy::EndRxData

This commit is contained in:
mmiozzo
2012-10-09 17:02:40 +02:00
parent 547378d4b7
commit f3ddd077d9
2 changed files with 2 additions and 1 deletions

View File

@@ -758,7 +758,7 @@ LteSpectrumPhy::EndRxData ()
while (itTb!=m_expectedTbs.end ())
{
if (m_dataErrorModelEnabled)
if ((m_dataErrorModelEnabled)&&(m_rxPacketBurstList.size ()>0)) // avoid to check for errors when there is no actual data transmitted
{
double errorRate = LteMiErrorModel::GetTbError (m_sinrPerceived, (*itTb).second.rbBitmap, (*itTb).second.size, (*itTb).second.mcs);
(*itTb).second.corrupt = m_random->GetValue () > errorRate ? false : true;

View File

@@ -643,6 +643,7 @@ LteUePhy::SubframeIndication (uint32_t frameNo, uint32_t subframeNo)
// send only PUCCH (ideal: fake null bandwidth signal)
if (ctrlMsg.size ()>0)
{
NS_LOG_LOGIC (this << " UE - start TX PUCCH (NO PUSCH)");
std::vector <int> dlRb;
SetSubChannelsForTransmission (dlRb);
m_uplinkSpectrumPhy->StartTxDataFrame (pb, ctrlMsg, UL_DATA_DURATION);