pre_processor.c 72.5 KB
Newer Older
1 2 3 4 5
/*
 * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The OpenAirInterface Software Alliance licenses this file to You under
6
 * the OAI Public License, Version 1.1  (the "License"); you may not use this file
7 8 9 10 11 12 13 14 15 16 17 18 19 20
 * except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.openairinterface.org/?page_id=698
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 *-------------------------------------------------------------------------------
 * For more information about the OpenAirInterface (OAI) Software Alliance:
 *      contact@openairinterface.org
 */
21 22

/*! \file pre_processor.c
23
 * \brief eNB scheduler preprocessing fuction prior to scheduling
24
 * \author Navid Nikaein and Ankit Bhamri
25
 * \date 2013 - 2014
26
 * \email navid.nikaein@eurecom.fr
27
 * \version 1.0
28 29 30 31
 * @ingroup _mac

 */

32 33 34
#define _GNU_SOURCE
#include <stdlib.h>

35
#include "assertions.h"
36 37 38
#include "LAYER2/MAC/mac.h"
#include "LAYER2/MAC/mac_proto.h"
#include "LAYER2/MAC/mac_extern.h"
39 40
#include "common/utils/LOG/log.h"
#include "common/utils/LOG/vcd_signal_dumper.h"
41 42 43
#include "UTIL/OPT/opt.h"
#include "OCG.h"
#include "OCG_extern.h"
44
#include "RRC/LTE/rrc_extern.h"
45
#include "RRC/L2_INTERFACE/openair_rrc_L2_interface.h"
46
#include "rlc.h"
47
#include "PHY/LTE_TRANSPORT/transport_common_proto.h"
48

49
#include "common/ran_context.h"
50

51
extern RAN_CONTEXT_t RC;
52

53 54 55 56 57 58
#define DEBUG_eNB_SCHEDULER 1
#define DEBUG_HEADER_PARSING 1
//#define DEBUG_PACKET_TRACE 1

//#define ICIC 0

59
/* this function checks that get_eNB_UE_stats returns
Cedric Roux's avatar
Cedric Roux committed
60
 * a non-NULL pointer for all the active CCs of an UE
61
 */
62
/*
63
int phy_stats_exist(module_id_t Mod_id, int rnti)
64 65
{
  int CC_id;
Cedric Roux's avatar
Cedric Roux committed
66 67
  int i;
  int UE_id          = find_UE_id(Mod_id, rnti);
68
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
Cedric Roux's avatar
Cedric Roux committed
69 70
  if (UE_id == -1) {
    LOG_W(MAC, "[eNB %d] UE %x not found, should be there (in phy_stats_exist)\n",
71
    Mod_id, rnti);
Cedric Roux's avatar
Cedric Roux committed
72 73 74 75
    return 0;
  }
  if (UE_list->numactiveCCs[UE_id] == 0) {
    LOG_W(MAC, "[eNB %d] UE %x has no active CC (in phy_stats_exist)\n",
76
    Mod_id, rnti);
Cedric Roux's avatar
Cedric Roux committed
77 78 79 80
    return 0;
  }
  for (i = 0; i < UE_list->numactiveCCs[UE_id]; i++) {
    CC_id = UE_list->ordered_CCids[i][UE_id];
81 82
    if (mac_xface->get_eNB_UE_stats(Mod_id, CC_id, rnti) == NULL)
      return 0;
83
  }
84 85
  return 1;
}
86
*/
87

88
// This function stores the downlink buffer for all the logical channels
89
void
90
store_dlsch_buffer(module_id_t Mod_id,
91
                   int slice_idx,
92 93
                   frame_t frameP,
                   sub_frame_t subframeP) {
94
  int UE_id, lcid;
95 96 97 98
  rnti_t rnti;
  mac_rlc_status_resp_t rlc_status;
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
  UE_TEMPLATE *UE_template;
99

100 101
  for (UE_id = 0; UE_id < MAX_MOBILES_PER_ENB; UE_id++) {
    if (UE_list->active[UE_id] != TRUE)
102
      continue;
103

104
    if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx))
105
      continue;
106

107
    UE_template = &UE_list->UE_template[UE_PCCID(Mod_id, UE_id)][UE_id];
108 109 110
    // clear logical channel interface variables
    UE_template->dl_buffer_total = 0;
    UE_template->dl_pdus_total = 0;
111

112 113 114 115 116
    for (lcid = 0; lcid < MAX_NUM_LCID; ++lcid) {
      UE_template->dl_buffer_info[lcid] = 0;
      UE_template->dl_pdus_in_buffer[lcid] = 0;
      UE_template->dl_buffer_head_sdu_creation_time[lcid] = 0;
      UE_template->dl_buffer_head_sdu_remaining_size_to_send[lcid] = 0;
117
    }
118

119
    rnti = UE_RNTI(Mod_id, UE_id);
120

121
    for (lcid = 0; lcid < MAX_NUM_LCID; ++lcid) {    // loop over all the logical channels
122 123 124 125 126 127
      rlc_status = mac_rlc_status_ind(Mod_id, rnti, Mod_id, frameP, subframeP,
                                      ENB_FLAG_YES, MBMS_FLAG_NO, lcid, 0
    #if (LTE_RRC_VERSION >= MAKE_VERSION(14, 0, 0))
                                      ,0, 0
    #endif
                                     );
128 129 130
      UE_template->dl_buffer_info[lcid] = rlc_status.bytes_in_buffer;    //storing the dlsch buffer for each logical channel
      UE_template->dl_pdus_in_buffer[lcid] = rlc_status.pdus_in_buffer;
      UE_template->dl_buffer_head_sdu_creation_time[lcid] = rlc_status.head_sdu_creation_time;
131
      UE_template->dl_buffer_head_sdu_creation_time_max =
132
        cmax(UE_template->dl_buffer_head_sdu_creation_time_max, rlc_status.head_sdu_creation_time);
133 134 135 136
      UE_template->dl_buffer_head_sdu_remaining_size_to_send[lcid] = rlc_status.head_sdu_remaining_size_to_send;
      UE_template->dl_buffer_head_sdu_is_segmented[lcid] = rlc_status.head_sdu_is_segmented;
      UE_template->dl_buffer_total += UE_template->dl_buffer_info[lcid];    //storing the total dlsch buffer
      UE_template->dl_pdus_total += UE_template->dl_pdus_in_buffer[lcid];
137

138
      #ifdef DEBUG_eNB_SCHEDULER
139
      /* note for dl_buffer_head_sdu_remaining_size_to_send[lcid] :
140 141
       * 0 if head SDU has not been segmented (yet), else remaining size not already segmented and sent
       */
142
      if (UE_template->dl_buffer_info[lcid] > 0)
143 144
        LOG_D(MAC,
              "[eNB %d][SLICE %d] Frame %d Subframe %d : RLC status for UE %d in LCID%d: total of %d pdus and size %d, head sdu queuing time %d, remaining size %d, is segmeneted %d \n",
145 146
              Mod_id, RC.mac[Mod_id]->slice_info.dl[slice_idx].id, frameP,
              subframeP, UE_id, lcid, UE_template->dl_pdus_in_buffer[lcid],
147 148 149 150
              UE_template->dl_buffer_info[lcid],
              UE_template->dl_buffer_head_sdu_creation_time[lcid],
              UE_template->dl_buffer_head_sdu_remaining_size_to_send[lcid],
              UE_template->dl_buffer_head_sdu_is_segmented[lcid]);
151
      #endif
152

153
    }
154

155 156 157 158 159 160 161
    if (UE_template->dl_buffer_total > 0)
      LOG_D(MAC,
            "[eNB %d] Frame %d Subframe %d : RLC status for UE %d : total DL buffer size %d and total number of pdu %d \n",
            Mod_id, frameP, subframeP, UE_id,
            UE_template->dl_buffer_total,
            UE_template->dl_pdus_total);
  }
162 163
}

Niccolò Iardella's avatar
Niccolò Iardella committed
164 165 166
int cqi2mcs(int cqi) {
  return cqi_to_mcs[cqi];
}
167

168
// This function returns the estimated number of RBs required by each UE for downlink scheduling
169 170
void
assign_rbs_required(module_id_t Mod_id,
171
                    int slice_idx,
172 173
                    frame_t frameP,
                    sub_frame_t subframe,
174
                    uint16_t nb_rbs_required[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
175
                    int min_rb_unit[NFAPI_CC_MAX]) {
176 177 178
  uint16_t TBS = 0;
  int UE_id, n, i, j, CC_id, pCCid, tmp;
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
179
  slice_info_t *sli = &RC.mac[Mod_id]->slice_info;
180 181
  eNB_UE_STATS *eNB_UE_stats, *eNB_UE_stats_i, *eNB_UE_stats_j;
  int N_RB_DL;
182

183 184 185
  // clear rb allocations across all CC_id
  for (UE_id = 0; UE_id < MAX_MOBILES_PER_ENB; UE_id++) {
    if (UE_list->active[UE_id] != TRUE) continue;
186

187
    if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx)) continue;
188

189
    pCCid = UE_PCCID(Mod_id, UE_id);
190

191 192 193 194
    //update CQI information across component carriers
    for (n = 0; n < UE_list->numactiveCCs[UE_id]; n++) {
      CC_id = UE_list->ordered_CCids[n][UE_id];
      eNB_UE_stats = &UE_list->eNB_UE_stats[CC_id][UE_id];
195
      //      eNB_UE_stats->dlsch_mcs1 = cmin(cqi_to_mcs[UE_list->UE_sched_ctrl[UE_id].dl_cqi[CC_id]], sli->dl[slice_idx].maxmcs);
196
      eNB_UE_stats->dlsch_mcs1 = cmin(cqi2mcs(UE_list->UE_sched_ctrl[UE_id].dl_cqi[CC_id]), sli->dl[slice_idx].maxmcs);
197
    }
198

199 200 201
    // provide the list of CCs sorted according to MCS
    for (i = 0; i < UE_list->numactiveCCs[UE_id]; ++i) {
      eNB_UE_stats_i = &UE_list->eNB_UE_stats[UE_list->ordered_CCids[i][UE_id]][UE_id];
202

203 204 205
      for (j = i + 1; j < UE_list->numactiveCCs[UE_id]; j++) {
        DevAssert(j < NFAPI_CC_MAX);
        eNB_UE_stats_j = &UE_list->eNB_UE_stats[UE_list->ordered_CCids[j][UE_id]][UE_id];
206

207 208 209 210 211 212 213
        if (eNB_UE_stats_j->dlsch_mcs1 > eNB_UE_stats_i->dlsch_mcs1) {
          tmp = UE_list->ordered_CCids[i][UE_id];
          UE_list->ordered_CCids[i][UE_id] = UE_list->ordered_CCids[j][UE_id];
          UE_list->ordered_CCids[j][UE_id] = tmp;
        }
      }
    }
214

215 216
    if (UE_list->UE_template[pCCid][UE_id].dl_buffer_total > 0) {
      LOG_D(MAC, "[preprocessor] assign RB for UE %d\n", UE_id);
217

218 219 220
      for (i = 0; i < UE_list->numactiveCCs[UE_id]; i++) {
        CC_id = UE_list->ordered_CCids[i][UE_id];
        eNB_UE_stats = &UE_list->eNB_UE_stats[CC_id][UE_id];
221

222 223 224 225 226
        if (eNB_UE_stats->dlsch_mcs1 == 0) {
          nb_rbs_required[CC_id][UE_id] = 4;    // don't let the TBS get too small
        } else {
          nb_rbs_required[CC_id][UE_id] = min_rb_unit[CC_id];
        }
227

228 229 230 231 232 233 234 235
        TBS = get_TBS_DL(eNB_UE_stats->dlsch_mcs1, nb_rbs_required[CC_id][UE_id]);
        LOG_D(MAC,
              "[preprocessor] start RB assignement for UE %d CC_id %d dl buffer %d (RB unit %d, MCS %d, TBS %d) \n",
              UE_id, CC_id,
              UE_list->UE_template[pCCid][UE_id].dl_buffer_total,
              nb_rbs_required[CC_id][UE_id],
              eNB_UE_stats->dlsch_mcs1, TBS);
        N_RB_DL = to_prb(RC.mac[Mod_id]->common_channels[CC_id].mib->message.dl_Bandwidth);
236
        UE_list->UE_sched_ctrl[UE_id].max_rbs_allowed_slice[CC_id][slice_idx] =
237
          nb_rbs_allowed_slice(sli->dl[slice_idx].pct, N_RB_DL);
238

239 240 241 242
        /* calculating required number of RBs for each UE */
        while (TBS < UE_list->UE_template[pCCid][UE_id].dl_buffer_total) {
          nb_rbs_required[CC_id][UE_id] += min_rb_unit[CC_id];

243 244 245
          if (nb_rbs_required[CC_id][UE_id] > UE_list->UE_sched_ctrl[UE_id].max_rbs_allowed_slice[CC_id][slice_idx]) {
            TBS = get_TBS_DL(eNB_UE_stats->dlsch_mcs1, UE_list->UE_sched_ctrl[UE_id].max_rbs_allowed_slice[CC_id][slice_idx]);
            nb_rbs_required[CC_id][UE_id] = UE_list->UE_sched_ctrl[UE_id].max_rbs_allowed_slice[CC_id][slice_idx];
246 247
            break;
          }
248

249 250 251 252 253 254 255 256
          TBS = get_TBS_DL(eNB_UE_stats->dlsch_mcs1, nb_rbs_required[CC_id][UE_id]);
        } // end of while

        LOG_D(MAC,
              "[eNB %d] Frame %d: UE %d on CC %d: RB unit %d,  nb_required RB %d (TBS %d, mcs %d)\n",
              Mod_id, frameP, UE_id, CC_id, min_rb_unit[CC_id],
              nb_rbs_required[CC_id][UE_id], TBS,
              eNB_UE_stats->dlsch_mcs1);
257
        sli->pre_processor_results[slice_idx].mcs[CC_id][UE_id] = eNB_UE_stats->dlsch_mcs1;
258
      }
Raymond Knopp's avatar
 
Raymond Knopp committed
259
    }
260
  }
Raymond Knopp's avatar
 
Raymond Knopp committed
261
}
262 263


Raymond Knopp's avatar
 
Raymond Knopp committed
264
// This function scans all CC_ids for a particular UE to find the maximum round index of its HARQ processes
265 266
int
maxround(module_id_t Mod_id, uint16_t rnti, int frame,
267 268 269 270 271
         sub_frame_t subframe, uint8_t ul_flag) {
  uint8_t round, round_max = 0, UE_id;
  int CC_id, harq_pid;
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
  COMMON_channels_t *cc;
272

273 274 275
  for (CC_id = 0; CC_id < RC.nb_mac_CC[Mod_id]; CC_id++) {
    cc = &RC.mac[Mod_id]->common_channels[CC_id];
    UE_id = find_UE_id(Mod_id, rnti);
276
    harq_pid = frame_subframe2_dl_harq_pid(cc->tdd_Config,frame,subframe);
277
    round = UE_list->UE_sched_ctrl[UE_id].round[CC_id][harq_pid];
278

279 280
    if (round > round_max) {
      round_max = round;
281
    }
282
  }
283

284
  return round_max;
Raymond Knopp's avatar
 
Raymond Knopp committed
285
}
286

Raymond Knopp's avatar
 
Raymond Knopp committed
287
// This function scans all CC_ids for a particular UE to find the maximum DL CQI
288
// it returns -1 if the UE is not found in PHY layer (get_eNB_UE_stats gives NULL)
289 290 291 292
int maxcqi(module_id_t Mod_id, int32_t UE_id) {
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
  int CC_id, n;
  int CQI = 0;
293

294 295
  for (n = 0; n < UE_list->numactiveCCs[UE_id]; n++) {
    CC_id = UE_list->ordered_CCids[n][UE_id];
296

297 298
    if (UE_list->UE_sched_ctrl[UE_id].dl_cqi[CC_id] > CQI) {
      CQI = UE_list->UE_sched_ctrl[UE_id].dl_cqi[CC_id];
299
    }
300
  }
301

302
  return CQI;
Raymond Knopp's avatar
 
Raymond Knopp committed
303
}
304

305 306 307 308 309 310 311 312 313 314 315 316 317 318
long min_lcgidpriority(module_id_t Mod_id, int32_t UE_id) {
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
  int i;
  int pCC_id = UE_PCCID(Mod_id, UE_id);
  long ret = UE_list->UE_template[pCC_id][UE_id].lcgidpriority[0];

  for (i = 1; i < 11; ++i) {
    if (UE_list->UE_template[pCC_id][UE_id].lcgidpriority[i] < ret)
      ret = UE_list->UE_template[pCC_id][UE_id].lcgidpriority[i];
  }

  return ret;
}

319
struct sort_ue_dl_params {
320 321 322 323
  int Mod_idP;
  int frameP;
  int subframeP;
  int slice_idx;
324
};
325

326
static int ue_dl_compare(const void *_a, const void *_b, void *_params) {
327 328 329
  struct sort_ue_dl_params *params = _params;
  UE_list_t *UE_list = &RC.mac[params->Mod_idP]->UE_list;
  int i;
330
  int slice_idx = params->slice_idx;
331 332 333 334 335 336 337 338 339 340
  int UE_id1 = *(const int *) _a;
  int UE_id2 = *(const int *) _b;
  int rnti1 = UE_RNTI(params->Mod_idP, UE_id1);
  int pCC_id1 = UE_PCCID(params->Mod_idP, UE_id1);
  int round1 = maxround(params->Mod_idP, rnti1, params->frameP, params->subframeP, 1);
  int rnti2 = UE_RNTI(params->Mod_idP, UE_id2);
  int pCC_id2 = UE_PCCID(params->Mod_idP, UE_id2);
  int round2 = maxround(params->Mod_idP, rnti2, params->frameP, params->subframeP, 1);
  int cqi1 = maxcqi(params->Mod_idP, UE_id1);
  int cqi2 = maxcqi(params->Mod_idP, UE_id2);
341 342 343
  long lcgid1 = min_lcgidpriority(params->Mod_idP, UE_id1);
  long lcgid2 = min_lcgidpriority(params->Mod_idP, UE_id2);

344
  for (i = 0; i < CR_NUM; ++i) {
345
    switch (UE_list->sorting_criteria[slice_idx][i]) {
346 347 348
      case CR_ROUND :
        if (round1 > round2)
          return -1;
349

350 351
        if (round1 < round2)
          return 1;
352

353 354 355 356 357 358 359 360
        break;

      case CR_SRB12 :
        if (UE_list->UE_template[pCC_id1][UE_id1].dl_buffer_info[1] +
            UE_list->UE_template[pCC_id1][UE_id1].dl_buffer_info[2] >
            UE_list->UE_template[pCC_id2][UE_id2].dl_buffer_info[1] +
            UE_list->UE_template[pCC_id2][UE_id2].dl_buffer_info[2])
          return -1;
361

362 363 364 365 366
        if (UE_list->UE_template[pCC_id1][UE_id1].dl_buffer_info[1] +
            UE_list->UE_template[pCC_id1][UE_id1].dl_buffer_info[2] <
            UE_list->UE_template[pCC_id2][UE_id2].dl_buffer_info[1] +
            UE_list->UE_template[pCC_id2][UE_id2].dl_buffer_info[2])
          return 1;
367

368 369 370 371 372 373
        break;

      case CR_HOL :
        if (UE_list-> UE_template[pCC_id1][UE_id1].dl_buffer_head_sdu_creation_time_max >
            UE_list-> UE_template[pCC_id2][UE_id2].dl_buffer_head_sdu_creation_time_max)
          return -1;
374

375 376 377
        if (UE_list-> UE_template[pCC_id1][UE_id1].dl_buffer_head_sdu_creation_time_max <
            UE_list-> UE_template[pCC_id2][UE_id2].dl_buffer_head_sdu_creation_time_max)
          return 1;
378

379 380 381 382 383 384
        break;

      case CR_LC :
        if (UE_list->UE_template[pCC_id1][UE_id1].dl_buffer_total >
            UE_list->UE_template[pCC_id2][UE_id2].dl_buffer_total)
          return -1;
385

386 387 388
        if (UE_list->UE_template[pCC_id1][UE_id1].dl_buffer_total <
            UE_list->UE_template[pCC_id2][UE_id2].dl_buffer_total)
          return 1;
389

390 391 392 393 394
        break;

      case CR_CQI :
        if (cqi1 > cqi2)
          return -1;
395

396 397
        if (cqi1 < cqi2)
          return 1;
398

Niccolò Iardella's avatar
Niccolò Iardella committed
399
        break;
400

401 402 403
      case CR_LCP :
        if (lcgid1 < lcgid2)
          return -1;
404

405 406 407
        if (lcgid1 > lcgid2)
          return 1;

408 409 410 411
      default :
        break;
    }
  }
412

413
  return 0;
414
}
415

416
void decode_sorting_policy(module_id_t Mod_idP, int slice_idx) {
417 418
  int i;
  UE_list_t *UE_list = &RC.mac[Mod_idP]->UE_list;
419
  uint32_t policy = RC.mac[Mod_idP]->slice_info.dl[slice_idx].sorting;
420 421 422 423 424
  uint32_t mask = 0x0000000F;
  uint16_t criterion;

  for (i = 0; i < CR_NUM; ++i) {
    criterion = (uint16_t) (policy >> 4 * (CR_NUM - 1 - i) & mask);
425

426
    if (criterion >= CR_NUM) {
427 428 429 430
      LOG_W(MAC,
            "Invalid criterion in slice index %d ID %d policy, revert to default policy \n",
            slice_idx, RC.mac[Mod_idP]->slice_info.dl[slice_idx].id);
      RC.mac[Mod_idP]->slice_info.dl[slice_idx].sorting = 0x12345;
431 432
      break;
    }
433

434
    UE_list->sorting_criteria[slice_idx][i] = criterion;
435
  }
436 437
}

438
void decode_slice_positioning(module_id_t Mod_idP,
439
                              int slice_idx,
440
                              uint8_t slice_allocation_mask[NFAPI_CC_MAX][N_RBG_MAX]) {
441 442 443 444
  uint8_t CC_id;
  int RBG, start_frequency, end_frequency;

  // Init slice_alloc_mask
445
  for (CC_id = 0; CC_id < RC.nb_mac_CC[Mod_idP]; ++CC_id) {
446 447 448 449 450
    for (RBG = 0; RBG < N_RBG_MAX; ++RBG) {
      slice_allocation_mask[CC_id][RBG] = 0;
    }
  }

451 452
  start_frequency = RC.mac[Mod_idP]->slice_info.dl[slice_idx].pos_low;
  end_frequency = RC.mac[Mod_idP]->slice_info.dl[slice_idx].pos_high;
453

454
  for (CC_id = 0; CC_id < RC.nb_mac_CC[Mod_idP]; ++CC_id) {
455 456 457 458 459 460
    for (RBG = start_frequency; RBG <= end_frequency; ++RBG) {
      slice_allocation_mask[CC_id][RBG] = 1;
    }
  }
}

461

Raymond Knopp's avatar
 
Raymond Knopp committed
462
// This fuction sorts the UE in order their dlsch buffer and CQI
Stefan's avatar
Stefan committed
463 464 465 466 467
void 
sort_UEs(module_id_t Mod_idP, 
         int slice_idx, 
         int frameP, 
         sub_frame_t subframeP)
468
{
469 470 471
  int i;
  int list[MAX_MOBILES_PER_ENB];
  int list_size = 0;
472
  struct sort_ue_dl_params params = {Mod_idP, frameP, subframeP, slice_idx};
473
  UE_list_t *UE_list = &RC.mac[Mod_idP]->UE_list;
474

475
  for (i = 0; i < MAX_MOBILES_PER_ENB; i++) {
476

Stefan's avatar
Stefan committed
477 478 479 480 481 482
    if (UE_list->active[i] == TRUE &&
        UE_RNTI(Mod_idP, i) != NOT_A_RNTI &&
        UE_list->UE_sched_ctrl[i].ul_out_of_sync != 1 &&
        ue_dl_slice_membership(Mod_idP, i, slice_idx)) {
      list[list_size++] = i;
    }
483
  }
484

485
  decode_sorting_policy(Mod_idP, slice_idx);
486
  qsort_r(list, list_size, sizeof(int), ue_dl_compare, &params);
487

488 489 490
  if (list_size) {
    for (i = 0; i < list_size - 1; ++i)
      UE_list->next[list[i]] = list[i + 1];
491

492 493 494 495 496
    UE_list->next[list[list_size - 1]] = -1;
    UE_list->head = list[0];
  } else {
    UE_list->head = -1;
  }
497 498
}

499
void dlsch_scheduler_pre_processor_partitioning(module_id_t Mod_id,
500 501
    int slice_idx,
    const uint8_t rbs_retx[NFAPI_CC_MAX]) {
502 503 504
  int UE_id, CC_id, N_RB_DL, i;
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
  UE_sched_ctrl *ue_sched_ctl;
505
  uint16_t available_rbs;
506 507 508

  for (UE_id = UE_list->head; UE_id >= 0; UE_id = UE_list->next[UE_id]) {
    if (UE_RNTI(Mod_id, UE_id) == NOT_A_RNTI) continue;
509

510
    if (UE_list->UE_sched_ctrl[UE_id].ul_out_of_sync == 1) continue;
511

512
    if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx)) continue;
513 514 515 516 517 518

    ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];

    for (i = 0; i < UE_num_active_CC(UE_list, UE_id); ++i) {
      CC_id = UE_list->ordered_CCids[i][UE_id];
      N_RB_DL = to_prb(RC.mac[Mod_id]->common_channels[CC_id].mib->message.dl_Bandwidth);
519
      available_rbs = nb_rbs_allowed_slice(RC.mac[Mod_id]->slice_info.dl[slice_idx].pct, N_RB_DL);
520

521
      if (rbs_retx[CC_id] < available_rbs)
522
        ue_sched_ctl->max_rbs_allowed_slice[CC_id][slice_idx] = available_rbs - rbs_retx[CC_id];
523
      else
524
        ue_sched_ctl->max_rbs_allowed_slice[CC_id][slice_idx] = 0;
525 526 527 528
    }
  }
}

529
void dlsch_scheduler_pre_processor_accounting(module_id_t Mod_id,
530 531 532 533 534 535
    int slice_idx,
    frame_t frameP,
    sub_frame_t subframeP,
    int min_rb_unit[NFAPI_CC_MAX],
    uint16_t nb_rbs_required[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
    uint16_t nb_rbs_accounted[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB]) {
536
  int UE_id, CC_id;
537
  int i;
538
  rnti_t rnti;
539
  uint8_t harq_pid, round;
540
  uint16_t available_rbs[NFAPI_CC_MAX];
541
  uint8_t rbs_retx[NFAPI_CC_MAX];
542
  uint16_t average_rbs_per_user[NFAPI_CC_MAX];
543
  int total_ue_count[NFAPI_CC_MAX];
544 545
  int ue_count_newtx[NFAPI_CC_MAX];
  int ue_count_retx[NFAPI_CC_MAX];
546
  //uint8_t ue_retx_flag[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB];
547
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
548
  UE_sched_ctrl *ue_sched_ctl;
549
  COMMON_channels_t *cc;
550

551
  // Reset
552
  for (CC_id = 0; CC_id < RC.nb_mac_CC[Mod_id]; CC_id++) {
553
    total_ue_count[CC_id] = 0;
554 555 556
    ue_count_newtx[CC_id] = 0;
    ue_count_retx[CC_id] = 0;
    rbs_retx[CC_id] = 0;
557
    average_rbs_per_user[CC_id] = 0;
558
    available_rbs[CC_id] = 0;
559 560 561
    //for (UE_id = 0; UE_id < NFAPI_CC_MAX; ++UE_id) {
    //  ue_retx_flag[CC_id][UE_id] = 0;
    //}
562
  }
Raymond Knopp's avatar
 
Raymond Knopp committed
563

564
  // Find total UE count, and account the RBs required for retransmissions
565 566
  for (UE_id = UE_list->head; UE_id >= 0; UE_id = UE_list->next[UE_id]) {
    rnti = UE_RNTI(Mod_id, UE_id);
567

568
    if (rnti == NOT_A_RNTI) continue;
569

570
    if (UE_list->UE_sched_ctrl[UE_id].ul_out_of_sync == 1) continue;
571

572
    if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx)) continue;
573

574
    for (i = 0; i < UE_num_active_CC(UE_list, UE_id); ++i) {
575
      CC_id = UE_list->ordered_CCids[i][UE_id];
576 577
      ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];
      cc = &RC.mac[Mod_id]->common_channels[CC_id];
578
      harq_pid = frame_subframe2_dl_harq_pid(cc->tdd_Config,frameP,subframeP);
579 580
      round = ue_sched_ctl->round[CC_id][harq_pid];

581 582 583
      if (nb_rbs_required[CC_id][UE_id] > 0) {
        total_ue_count[CC_id]++;
      }
584 585 586

      if (round != 8) {
        nb_rbs_required[CC_id][UE_id] = UE_list->UE_template[CC_id][UE_id].nb_rb[harq_pid];
587 588
        rbs_retx[CC_id] += nb_rbs_required[CC_id][UE_id];
        ue_count_retx[CC_id]++;
589
        //ue_retx_flag[CC_id][UE_id] = 1;
590 591
      } else {
        ue_count_newtx[CC_id]++;
592 593 594
      }
    }
  }
595

596 597
  // PARTITIONING
  // Reduces the available RBs according to slicing configuration
598
  dlsch_scheduler_pre_processor_partitioning(Mod_id, slice_idx, rbs_retx);
599

600 601
  for (CC_id = 0; CC_id < RC.nb_mac_CC[Mod_id]; ++CC_id) {
    if (UE_list->head < 0) continue; // no UEs in list
602 603

    // max_rbs_allowed_slice is saved in every UE, so take it from the first one
604 605 606 607
    ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_list->head];
    available_rbs[CC_id] = ue_sched_ctl->max_rbs_allowed_slice[CC_id][slice_idx];
  }

608
  switch (RC.mac[Mod_id]->slice_info.dl[slice_idx].accounting) {
609
    // If greedy scheduling, try to account all the required RBs
610
    case POL_GREEDY:
611 612
      for (UE_id = UE_list->head; UE_id >= 0; UE_id = UE_list->next[UE_id]) {
        rnti = UE_RNTI(Mod_id, UE_id);
613

614
        if (rnti == NOT_A_RNTI) continue;
615

616
        if (UE_list->UE_sched_ctrl[UE_id].ul_out_of_sync == 1) continue;
617

618
        if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx)) continue;
Raymond Knopp's avatar
 
Raymond Knopp committed
619

620 621
        for (i = 0; i < UE_num_active_CC(UE_list, UE_id); i++) {
          CC_id = UE_list->ordered_CCids[i][UE_id];
622

623
          if (available_rbs[CC_id] == 0) continue;
624

625 626
          nb_rbs_accounted[CC_id][UE_id] = cmin(nb_rbs_required[CC_id][UE_id], available_rbs[CC_id]);
          available_rbs[CC_id] -= nb_rbs_accounted[CC_id][UE_id];
627 628
        }
      }
629

630 631 632 633
      break;

    // Use the old, fair algorithm
    // Loop over all active UEs and account the avg number of RBs to each UE, based on all non-retx UEs.
634
    // case POL_FAIR:
635
    default:
636

637 638 639 640 641 642
      // FIXME: This is not ideal, why loop on UEs to find average_rbs_per_user[], that is per-CC?
      // TODO: Look how to loop on active CCs only without using the UE_num_active_CC() function.
      for (UE_id = UE_list->head; UE_id >= 0; UE_id = UE_list->next[UE_id]) {
        rnti = UE_RNTI(Mod_id, UE_id);

        if (rnti == NOT_A_RNTI) continue;
643

644
        if (UE_list->UE_sched_ctrl[UE_id].ul_out_of_sync == 1) continue;
645

646
        if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx)) continue;
647 648 649 650

        for (i = 0; i < UE_num_active_CC(UE_list, UE_id); ++i) {
          CC_id = UE_list->ordered_CCids[i][UE_id];
          ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];
651
          available_rbs[CC_id] = ue_sched_ctl->max_rbs_allowed_slice[CC_id][slice_idx];
652

653
          if (ue_count_newtx[CC_id] == 0) {
654
            average_rbs_per_user[CC_id] = 0;
655 656
          } else if (min_rb_unit[CC_id]*ue_count_newtx[CC_id] <= available_rbs[CC_id]) {
            average_rbs_per_user[CC_id] = (uint16_t)floor(available_rbs[CC_id]/ue_count_newtx[CC_id]);
657 658 659 660 661 662
          } else {
            // consider the total number of use that can be scheduled UE
            average_rbs_per_user[CC_id] = (uint16_t)min_rb_unit[CC_id];
          }
        }
      }
663

664 665 666 667
      // note: nb_rbs_required is assigned according to total_buffer_dl
      // extend nb_rbs_required to capture per LCID RB required
      for (UE_id = UE_list->head; UE_id >= 0; UE_id = UE_list->next[UE_id]) {
        rnti = UE_RNTI(Mod_id, UE_id);
668

669
        if (rnti == NOT_A_RNTI) continue;
670

671
        if (UE_list->UE_sched_ctrl[UE_id].ul_out_of_sync == 1) continue;
672

673
        if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx)) continue;
674 675 676 677 678

        for (i = 0; i < UE_num_active_CC(UE_list, UE_id); i++) {
          CC_id = UE_list->ordered_CCids[i][UE_id];
          nb_rbs_accounted[CC_id][UE_id] = cmin(average_rbs_per_user[CC_id], nb_rbs_required[CC_id][UE_id]);
        }
679
      }
680

681
      break;
682
  }
Raymond Knopp's avatar
 
Raymond Knopp committed
683

684 685
  // Check retransmissions
  // TODO: Do this once at the beginning
686 687
  for (UE_id = UE_list->head; UE_id >= 0; UE_id = UE_list->next[UE_id]) {
    rnti = UE_RNTI(Mod_id, UE_id);
688

689
    if (rnti == NOT_A_RNTI) continue;
690

691
    if (UE_list->UE_sched_ctrl[UE_id].ul_out_of_sync == 1) continue;
692

693
    if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx)) continue;
Raymond Knopp's avatar
 
Raymond Knopp committed
694

695 696
    for (i = 0; i < UE_num_active_CC(UE_list, UE_id); i++) {
      CC_id = UE_list->ordered_CCids[i][UE_id];
697 698
      ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];
      cc = &RC.mac[Mod_id]->common_channels[CC_id];
699
      harq_pid = frame_subframe2_dl_harq_pid(cc->tdd_Config,frameP,subframeP);
700 701 702 703
      round = ue_sched_ctl->round[CC_id][harq_pid];

      // control channel or retransmission
      /* TODO: do we have to check for retransmission? */
704
      if (mac_eNB_get_rrc_status(Mod_id, rnti) < RRC_RECONFIGURED || round != 8) {
705
        nb_rbs_accounted[CC_id][UE_id] = nb_rbs_required[CC_id][UE_id];
706
      }
707
    }
708
  }
709 710 711
}

void dlsch_scheduler_pre_processor_positioning(module_id_t Mod_id,
712 713 714 715 716 717 718
    int slice_idx,
    int min_rb_unit[NFAPI_CC_MAX],
    uint16_t nb_rbs_required[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
    uint16_t nb_rbs_accounted[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
    uint16_t nb_rbs_remaining[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
    uint8_t rballoc_sub[NFAPI_CC_MAX][N_RBG_MAX],
    uint8_t MIMO_mode_indicator[NFAPI_CC_MAX][N_RBG_MAX]) {
719 720
  int UE_id, CC_id;
  int i;
721 722

  #ifdef TM5
723
  uint8_t transmission_mode;
724 725
  #endif

726 727 728
  uint8_t slice_allocation_mask[NFAPI_CC_MAX][N_RBG_MAX];
  int N_RBG[NFAPI_CC_MAX];
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
729
  decode_slice_positioning(Mod_id, slice_idx, slice_allocation_mask);
730

731 732 733 734
  for (CC_id = 0; CC_id < RC.nb_mac_CC[Mod_id]; CC_id++) {
    COMMON_channels_t *cc = &RC.mac[Mod_id]->common_channels[CC_id];
    N_RBG[CC_id] = to_rbg(cc->mib->message.dl_Bandwidth);
  }
Xu Bo's avatar
Xu Bo committed
735

736 737 738
  // Try to allocate accounted RBs
  for (UE_id = UE_list->head; UE_id >= 0; UE_id = UE_list->next[UE_id]) {
    if (UE_RNTI(Mod_id, UE_id) == NOT_A_RNTI) continue;
739

740
    if (UE_list->UE_sched_ctrl[UE_id].ul_out_of_sync == 1) continue;
741

742
    if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx)) continue;
743

744 745 746
    for (i = 0; i < UE_num_active_CC(UE_list, UE_id); i++) {
      CC_id = UE_list->ordered_CCids[i][UE_id];
      nb_rbs_remaining[CC_id][UE_id] = nb_rbs_accounted[CC_id][UE_id];
747 748

      #ifdef TM5
749
      transmission_mode = get_tmode(Mod_id, CC_id, UE_id);
750
      #endif
751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774

      if (nb_rbs_required[CC_id][UE_id] > 0)
        LOG_D(MAC,
              "Step 1: nb_rbs_remaining[%d][%d]= %d (accounted %d, required %d,  pre_nb_available_rbs %d, N_RBG %d, rb_unit %d)\n",
              CC_id,
              UE_id,
              nb_rbs_remaining[CC_id][UE_id],
              nb_rbs_accounted[CC_id][UE_id],
              nb_rbs_required[CC_id][UE_id],
              UE_list->UE_sched_ctrl[UE_id].pre_nb_available_rbs[CC_id],
              N_RBG[CC_id],
              min_rb_unit[CC_id]);

      LOG_T(MAC, "calling dlsch_scheduler_pre_processor_allocate .. \n ");
      dlsch_scheduler_pre_processor_allocate(Mod_id,
                                             UE_id,
                                             CC_id,
                                             N_RBG[CC_id],
                                             min_rb_unit[CC_id],
                                             nb_rbs_required,
                                             nb_rbs_remaining,
                                             rballoc_sub,
                                             slice_allocation_mask,
                                             MIMO_mode_indicator);
775

776
      #ifdef TM5
777
      // data chanel TM5: to be revisited
778 779 780
      if ((round == 0) &&
          (transmission_mode == 5) &&
          (ue_sched_ctl->dl_pow_off[CC_id] != 1)) {
781
        for (j = 0; j < N_RBG[CC_id]; j += 2) {
782 783 784 785 786 787 788 789 790 791 792
          if ((((j == (N_RBG[CC_id] - 1))
                && (rballoc_sub[CC_id][j] == 0)
                && (ue_sched_ctl->
                    rballoc_sub_UE[CC_id][j] == 0))
               || ((j < (N_RBG[CC_id] - 1))
                   && (rballoc_sub[CC_id][j + 1] == 0)
                   &&
                   (ue_sched_ctl->rballoc_sub_UE
                    [CC_id][j + 1] == 0)))
              && (nb_rbs_remaining[CC_id][UE_id]
                  > 0)) {
793 794
            for (i = UE_list->next[UE_id + 1]; i >= 0;
                 i = UE_list->next[i]) {
795 796 797
              UE_id2 = i;
              rnti2 = UE_RNTI(Mod_id, UE_id2);
              ue_sched_ctl2 =
798
                &UE_list->UE_sched_ctrl[UE_id2];
799 800 801
              round2 = ue_sched_ctl2->round[CC_id];

              if (rnti2 == NOT_A_RNTI)
802
                continue;
803 804 805 806

              if (UE_list->
                  UE_sched_ctrl
                  [UE_id2].ul_out_of_sync == 1)
807 808
                continue;

809
              eNB_UE_stats2 =
810 811
                UE_list->
                eNB_UE_stats[CC_id][UE_id2];
812
              //mac_xface->get_ue_active_harq_pid(Mod_id,CC_id,rnti2,frameP,subframeP,&harq_pid2,&round2,0);
813

814 815 816 817 818 819 820 821 822
              if ((mac_eNB_get_rrc_status
                   (Mod_id,
                    rnti2) >= RRC_RECONFIGURED)
                  && (round2 == 0)
                  &&
                  (get_tmode(Mod_id, CC_id, UE_id2)
                   == 5)
                  && (ue_sched_ctl->
                      dl_pow_off[CC_id] != 1)) {
823
                if ((((j == (N_RBG[CC_id] - 1))
824
                      &&
825 826 827
                      (ue_sched_ctl->rballoc_sub_UE
                       [CC_id][j] == 0))
                     || ((j < (N_RBG[CC_id] - 1))
828 829 830 831 832
                         &&
                         (ue_sched_ctl->
                          rballoc_sub_UE[CC_id][j +
                                                1]
                          == 0)))
833 834 835 836
                    &&
                    (nb_rbs_remaining
                     [CC_id]
                     [UE_id2] > 0)) {
837 838 839 840 841
                  if ((((eNB_UE_stats2->
                         DL_pmi_single ^
                         eNB_UE_stats1->
                         DL_pmi_single)
                        << (14 - j)) & 0xc000) == 0x4000) { //MU-MIMO only for 25 RBs configuration
842 843
                    rballoc_sub[CC_id][j] = 1;
                    ue_sched_ctl->
844 845
                    rballoc_sub_UE[CC_id]
                    [j] = 1;
846
                    ue_sched_ctl2->
847 848
                    rballoc_sub_UE[CC_id]
                    [j] = 1;
849
                    MIMO_mode_indicator[CC_id]
850
                    [j] = 0;
851 852

                    if (j < N_RBG[CC_id] - 1) {
853 854 855 856 857 858 859 860 861 862
                      rballoc_sub[CC_id][j +
                                         1] =
                                           1;
                      ue_sched_ctl->
                      rballoc_sub_UE
                      [CC_id][j + 1] = 1;
                      ue_sched_ctl2->rballoc_sub_UE
                      [CC_id][j + 1] = 1;
                      MIMO_mode_indicator
                      [CC_id][j + 1]
863 864 865 866
                        = 0;
                    }

                    ue_sched_ctl->
867 868
                    dl_pow_off[CC_id]
                      = 0;
869
                    ue_sched_ctl2->
870 871
                    dl_pow_off[CC_id]
                      = 0;
872 873 874

                    if ((j == N_RBG[CC_id] - 1)
                        && ((N_RB_DL == 25)
875 876 877 878
                            || (N_RB_DL ==
                                50))) {
                      nb_rbs_remaining
                      [CC_id][UE_id] =
879 880 881 882
                        nb_rbs_remaining
                        [CC_id][UE_id] -
                        min_rb_unit[CC_id]
                        + 1;
883 884
                      ue_sched_ctl->pre_nb_available_rbs
                      [CC_id] =
885 886 887 888
                        ue_sched_ctl->pre_nb_available_rbs
                        [CC_id] +
                        min_rb_unit[CC_id]
                        - 1;
889 890
                      nb_rbs_remaining
                      [CC_id][UE_id2] =
891 892 893 894
                        nb_rbs_remaining
                        [CC_id][UE_id2] -
                        min_rb_unit[CC_id]
                        + 1;
895 896
                      ue_sched_ctl2->pre_nb_available_rbs
                      [CC_id] =
897 898 899 900 901
                        ue_sched_ctl2->pre_nb_available_rbs
                        [CC_id] +
                        min_rb_unit[CC_id]
                        - 1;
                    } else {
902 903
                      nb_rbs_remaining
                      [CC_id][UE_id] =
904 905
                        nb_rbs_remaining
                        [CC_id][UE_id] - 4;
906 907
                      ue_sched_ctl->pre_nb_available_rbs
                      [CC_id] =
908 909
                        ue_sched_ctl->pre_nb_available_rbs
                        [CC_id] + 4;
910 911
                      nb_rbs_remaining
                      [CC_id][UE_id2] =
912 913 914
                        nb_rbs_remaining
                        [CC_id][UE_id2] -
                        4;
915 916
                      ue_sched_ctl2->pre_nb_available_rbs
                      [CC_id] =
917 918 919 920
                        ue_sched_ctl2->pre_nb_available_rbs
                        [CC_id] + 4;
                    }
                    break;
921
                  }
922
                }
923
              }
924
            }
925
          }
926
        }
927 928 929
      }
      #endif

930 931 932 933 934
    }
  }
}

void dlsch_scheduler_pre_processor_intraslice_sharing(module_id_t Mod_id,
935 936 937 938 939 940 941
    int slice_idx,
    int min_rb_unit[NFAPI_CC_MAX],
    uint16_t nb_rbs_required[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
    uint16_t nb_rbs_accounted[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
    uint16_t nb_rbs_remaining[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
    uint8_t rballoc_sub[NFAPI_CC_MAX][N_RBG_MAX],
    uint8_t MIMO_mode_indicator[NFAPI_CC_MAX][N_RBG_MAX]) {
942 943
  int UE_id, CC_id;
  int i;
944 945

  #ifdef TM5
946
  uint8_t transmission_mode;
947 948
  #endif

949 950
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
  int N_RBG[NFAPI_CC_MAX];
951
  slice_info_t *sli = &RC.mac[Mod_id]->slice_info;
952 953
  uint8_t (*slice_allocation_mask)[N_RBG_MAX] = sli->pre_processor_results[slice_idx].slice_allocation_mask;
  decode_slice_positioning(Mod_id, slice_idx, slice_allocation_mask);
954 955 956 957 958 959 960 961 962

  for (CC_id = 0; CC_id < RC.nb_mac_CC[Mod_id]; CC_id++) {
    COMMON_channels_t *cc = &RC.mac[Mod_id]->common_channels[CC_id];
    N_RBG[CC_id] = to_rbg(cc->mib->message.dl_Bandwidth);
  }

  // Remaining RBs are allocated to high priority UEs
  for (UE_id = UE_list->head; UE_id >= 0; UE_id = UE_list->next[UE_id]) {
    if (UE_RNTI(Mod_id, UE_id) == NOT_A_RNTI) continue;
963

964
    if (UE_list->UE_sched_ctrl[UE_id].ul_out_of_sync == 1) continue;
965

966
    if (!ue_dl_slice_membership(Mod_id, UE_id, slice_idx)) continue;
967 968 969 970

    for (i = 0; i < UE_num_active_CC(UE_list, UE_id); i++) {
      CC_id = UE_list->ordered_CCids[i][UE_id];
      nb_rbs_remaining[CC_id][UE_id] =
971 972
        nb_rbs_required[CC_id][UE_id] - nb_rbs_accounted[CC_id][UE_id] + nb_rbs_remaining[CC_id][UE_id];

973 974
      if (nb_rbs_remaining[CC_id][UE_id] < 0)
        abort();
975 976

      #ifdef TM5
977
      transmission_mode = get_tmode(Mod_id, CC_id, UE_id);
978
      #endif
979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003

      if (nb_rbs_required[CC_id][UE_id] > 0)
        LOG_D(MAC,
              "Step 2: nb_rbs_remaining[%d][%d]= %d (accounted %d, required %d,  pre_nb_available_rbs %d, N_RBG %d, rb_unit %d)\n",
              CC_id,
              UE_id,
              nb_rbs_remaining[CC_id][UE_id],
              nb_rbs_accounted[CC_id][UE_id],
              nb_rbs_required[CC_id][UE_id],
              UE_list->UE_sched_ctrl[UE_id].pre_nb_available_rbs[CC_id],
              N_RBG[CC_id],
              min_rb_unit[CC_id]);

      LOG_T(MAC, "calling dlsch_scheduler_pre_processor_allocate .. \n ");
      dlsch_scheduler_pre_processor_allocate(Mod_id,
                                             UE_id,
                                             CC_id,
                                             N_RBG[CC_id],
                                             min_rb_unit[CC_id],
                                             nb_rbs_required,
                                             nb_rbs_remaining,
                                             rballoc_sub,
                                             slice_allocation_mask,
                                             MIMO_mode_indicator);

1004
      #ifdef TM5
1005
      // data chanel TM5: to be revisited
1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045
      if ((round == 0) &&
          (transmission_mode == 5) &&
          (ue_sched_ctl->dl_pow_off[CC_id] != 1)) {
        for (j = 0; j < N_RBG[CC_id]; j += 2) {
          if ((((j == (N_RBG[CC_id] - 1))
                && (rballoc_sub[CC_id][j] == 0)
                && (ue_sched_ctl->
                    rballoc_sub_UE[CC_id][j] == 0))
               || ((j < (N_RBG[CC_id] - 1))
                   && (rballoc_sub[CC_id][j + 1] == 0)
                   &&
                   (ue_sched_ctl->rballoc_sub_UE
                    [CC_id][j + 1] == 0)))
              && (nb_rbs_remaining[CC_id][UE_id]
                  > 0)) {
            for (i = UE_list->next[UE_id + 1]; i >= 0;
                 i = UE_list->next[i]) {
              UE_id2 = i;
              rnti2 = UE_RNTI(Mod_id, UE_id2);
              ue_sched_ctl2 =
                &UE_list->UE_sched_ctrl[UE_id2];
              round2 = ue_sched_ctl2->round[CC_id];

              if (rnti2 == NOT_A_RNTI)
                continue;

              if (UE_list->
                  UE_sched_ctrl
                  [UE_id2].ul_out_of_sync == 1)
                continue;

              eNB_UE_stats2 =
                UE_list->
                eNB_UE_stats[CC_id][UE_id2];
              //mac_xface->get_ue_active_harq_pid(Mod_id,CC_id,rnti2,frameP,subframeP,&harq_pid2,&round2,0);

              if ((mac_eNB_get_rrc_status
                   (Mod_id,
                    rnti2) >= RRC_RECONFIGURED)
                  && (round2 == 0)
1046
                  &&
1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071
                  (get_tmode(Mod_id, CC_id, UE_id2)
                   == 5)
                  && (ue_sched_ctl->
                      dl_pow_off[CC_id] != 1)) {
                if ((((j == (N_RBG[CC_id] - 1))
                      &&
                      (ue_sched_ctl->rballoc_sub_UE
                       [CC_id][j] == 0))
                     || ((j < (N_RBG[CC_id] - 1))
                         &&
                         (ue_sched_ctl->
                          rballoc_sub_UE[CC_id][j +
                                                1]
                          == 0)))
                    &&
                    (nb_rbs_remaining
                     [CC_id]
                     [UE_id2] > 0)) {
                  if ((((eNB_UE_stats2->
                         DL_pmi_single ^
                         eNB_UE_stats1->
                         DL_pmi_single)
                        << (14 - j)) & 0xc000) == 0x4000) { //MU-MIMO only for 25 RBs configuration
                    rballoc_sub[CC_id][j] = 1;
                    ue_sched_ctl->
1072 1073
                    rballoc_sub_UE[CC_id]
                    [j] = 1;
1074
                    ue_sched_ctl2->
1075 1076
                    rballoc_sub_UE[CC_id]
                    [j] = 1;
1077
                    MIMO_mode_indicator[CC_id]
1078 1079
                    [j] = 0;

1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092
                    if (j < N_RBG[CC_id] - 1) {
                      rballoc_sub[CC_id][j +
                                         1] =
                                           1;
                      ue_sched_ctl->
                      rballoc_sub_UE
                      [CC_id][j + 1] = 1;
                      ue_sched_ctl2->rballoc_sub_UE
                      [CC_id][j + 1] = 1;
                      MIMO_mode_indicator
                      [CC_id][j + 1]
                        = 0;
                    }
1093

1094
                    ue_sched_ctl->
1095
                    dl_pow_off[CC_id]
1096 1097
                      = 0;
                    ue_sched_ctl2->
1098
                    dl_pow_off[CC_id]
1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147
                      = 0;

                    if ((j == N_RBG[CC_id] - 1)
                        && ((N_RB_DL == 25)
                            || (N_RB_DL ==
                                50))) {
                      nb_rbs_remaining
                      [CC_id][UE_id] =
                        nb_rbs_remaining
                        [CC_id][UE_id] -
                        min_rb_unit[CC_id]
                        + 1;
                      ue_sched_ctl->pre_nb_available_rbs
                      [CC_id] =
                        ue_sched_ctl->pre_nb_available_rbs
                        [CC_id] +
                        min_rb_unit[CC_id]
                        - 1;
                      nb_rbs_remaining
                      [CC_id][UE_id2] =
                        nb_rbs_remaining
                        [CC_id][UE_id2] -
                        min_rb_unit[CC_id]
                        + 1;
                      ue_sched_ctl2->pre_nb_available_rbs
                      [CC_id] =
                        ue_sched_ctl2->pre_nb_available_rbs
                        [CC_id] +
                        min_rb_unit[CC_id]
                        - 1;
                    } else {
                      nb_rbs_remaining
                      [CC_id][UE_id] =
                        nb_rbs_remaining
                        [CC_id][UE_id] - 4;
                      ue_sched_ctl->pre_nb_available_rbs
                      [CC_id] =
                        ue_sched_ctl->pre_nb_available_rbs
                        [CC_id] + 4;
                      nb_rbs_remaining
                      [CC_id][UE_id2] =
                        nb_rbs_remaining
                        [CC_id][UE_id2] -
                        4;
                      ue_sched_ctl2->pre_nb_available_rbs
                      [CC_id] =
                        ue_sched_ctl2->pre_nb_available_rbs
                        [CC_id] + 4;
                    }
1148

1149 1150
                    break;
                  }
1151
                }
1152
              }
1153
            }
1154
          }
1155
        }
1156 1157 1158
      }
      #endif

1159 1160
    }
  }
1161
}
1162

1163 1164 1165
// This function assigns pre-available RBS to each UE in specified sub-bands before scheduling is done
void
dlsch_scheduler_pre_processor(module_id_t Mod_id,
1166
                              int slice_idx,
1167 1168
                              frame_t frameP,
                              sub_frame_t subframeP,
1169
                              int *mbsfn_flag,
1170
                              uint8_t rballoc_sub[NFAPI_CC_MAX][N_RBG_MAX]) {
1171 1172 1173
  int UE_id;
  uint8_t CC_id;
  uint16_t i, j;
1174
  int min_rb_unit[NFAPI_CC_MAX];
1175

Stefan's avatar
Stefan committed
1176 1177
  eNB_MAC_INST *eNB = RC.mac[Mod_id];
  slice_info_t *sli = &eNB->slice_info;
1178 1179 1180 1181
  uint16_t (*nb_rbs_required)[MAX_MOBILES_PER_ENB]  = sli->pre_processor_results[slice_idx].nb_rbs_required;
  uint16_t (*nb_rbs_accounted)[MAX_MOBILES_PER_ENB] = sli->pre_processor_results[slice_idx].nb_rbs_accounted;
  uint16_t (*nb_rbs_remaining)[MAX_MOBILES_PER_ENB] = sli->pre_processor_results[slice_idx].nb_rbs_remaining;
  uint8_t  (*MIMO_mode_indicator)[N_RBG_MAX]     = sli->pre_processor_results[slice_idx].MIMO_mode_indicator;
1182

Stefan's avatar
Stefan committed
1183
  UE_list_t *UE_list = &eNB->UE_list;
1184
  UE_sched_ctrl *ue_sched_ctl;
1185
  //  int rrc_status = RRC_IDLE;
1186 1187 1188 1189 1190 1191 1192 1193 1194
#ifdef TM5
  int harq_pid1 = 0;
  int round1 = 0, round2 = 0;
  int UE_id2;
  uint16_t i1, i2, i3;
  rnti_t rnti1, rnti2;
  LTE_eNB_UE_stats *eNB_UE_stats1 = NULL;
  LTE_eNB_UE_stats *eNB_UE_stats2 = NULL;
  UE_sched_ctrl *ue_sched_ctl1, *ue_sched_ctl2;
1195
#endif
1196
  // Initialize scheduling information for all active UEs
1197
  memset(&sli->pre_processor_results[slice_idx], 0, sizeof(sli->pre_processor_results[slice_idx]));
1198
  // FIXME: After the memset above, some of the resets in reset() are redundant
Stefan's avatar
Stefan committed
1199 1200 1201 1202
  dlsch_scheduler_pre_processor_reset(Mod_id, 
                                      slice_idx, 
                                      frameP, 
                                      subframeP,
1203 1204 1205 1206
                                      min_rb_unit,
                                      nb_rbs_required,
                                      rballoc_sub,
                                      MIMO_mode_indicator,
1207
                                      mbsfn_flag); // FIXME: Not sure if useful
1208
  // STATUS
1209
  // Store the DLSCH buffer for each logical channel
Stefan's avatar
Stefan committed
1210 1211 1212 1213
  store_dlsch_buffer(Mod_id, 
                     slice_idx, 
                     frameP, 
                     subframeP);
1214

1215
  // Calculate the number of RBs required by each UE on the basis of logical channel's buffer
Stefan's avatar
Stefan committed
1216 1217 1218 1219 1220 1221
  assign_rbs_required(Mod_id, 
                      slice_idx, 
                      frameP, 
                      subframeP, 
                      nb_rbs_required, 
                      min_rb_unit);
1222

1223
  // Sorts the user on the basis of dlsch logical channel buffer and CQI
Stefan's avatar
Stefan committed
1224 1225 1226 1227
  sort_UEs(Mod_id, 
           slice_idx, 
           frameP, 
           subframeP);
1228

1229
  // ACCOUNTING
1230
  // This procedure decides the number of RBs to allocate
Stefan's avatar
Stefan committed
1231 1232 1233 1234
  dlsch_scheduler_pre_processor_accounting(Mod_id, 
                                           slice_idx, 
                                           frameP, 
                                           subframeP,
1235
                                           min_rb_unit,
1236 1237 1238 1239
                                           nb_rbs_required,
                                           nb_rbs_accounted);
  // POSITIONING
  // This procedure does the main allocation of the RBs
Stefan's avatar
Stefan committed
1240 1241
  dlsch_scheduler_pre_processor_positioning(Mod_id, 
                                            slice_idx,
1242 1243 1244
                                            min_rb_unit,
                                            nb_rbs_required,
                                            nb_rbs_accounted,
1245
                                            nb_rbs_remaining,
1246 1247
                                            rballoc_sub,
                                            MIMO_mode_indicator);
1248

1249 1250
  // SHARING
  // If there are available RBs left in the slice, allocate them to the highest priority UEs
Stefan's avatar
Stefan committed
1251 1252 1253
  if (eNB->slice_info.intraslice_share_active) {
    dlsch_scheduler_pre_processor_intraslice_sharing(Mod_id, 
                                                     slice_idx,
1254 1255 1256 1257 1258 1259 1260
                                                     min_rb_unit,
                                                     nb_rbs_required,
                                                     nb_rbs_accounted,
                                                     nb_rbs_remaining,
                                                     rballoc_sub,
                                                     MIMO_mode_indicator);
  }
1261

1262
#ifdef TM5
1263

1264
  // This has to be revisited!!!!
1265
  for (CC_id = 0; CC_id < RC.nb_mac_CC[Mod_id]; CC_id++) {
Stefan's avatar
Stefan committed
1266
    COMMON_channels_t *cc = &eNB->common_channels[CC_id];
1267 1268 1269 1270
    int N_RBG = to_rbg(cc->mib->message.dl_Bandwidth);
    i1 = 0;
    i2 = 0;
    i3 = 0;
1271

1272
    for (j = 0; j < N_RBG; j++) {
1273
      if (MIMO_mode_indicator[CC_id][j] == 2) {
1274
        i1++;
1275
      } else if (MIMO_mode_indicator[CC_id][j] == 1) {
1276
        i2++;
1277
      } else if (MIMO_mode_indicator[CC_id][j] == 0) {
1278
        i3++;
1279
      }
1280
    }
1281

Stefan's avatar
Stefan committed
1282 1283 1284 1285 1286 1287 1288 1289
    if (i1 < N_RBG) {
      if (i2 > 0 && i3 == 0) {
        PHY_vars_eNB_g[Mod_id][CC_id]->check_for_SUMIMO_transmissions = PHY_vars_eNB_g[Mod_id][CC_id]->check_for_SUMIMO_transmissions + 1;
      } else if (i3 > 0) {
        PHY_vars_eNB_g[Mod_id][CC_id]->check_for_MUMIMO_transmissions = PHY_vars_eNB_g[Mod_id][CC_id]->check_for_MUMIMO_transmissions + 1;
      }
    } else if (i3 == N_RBG && i1 == 0 && i2 == 0) {
      PHY_vars_eNB_g[Mod_id][CC_id]->FULL_MUMIMO_transmissions = PHY_vars_eNB_g[Mod_id][CC_id]->FULL_MUMIMO_transmissions + 1;
1290
    }
Stefan's avatar
Stefan committed
1291
    PHY_vars_eNB_g[Mod_id][CC_id]->check_for_total_transmissions = PHY_vars_eNB_g[Mod_id][CC_id]->check_for_total_transmissions + 1;
1292

1293
  }
1294

1295 1296
#endif

1297 1298
  for (UE_id = UE_list->head; UE_id >= 0; UE_id = UE_list->next[UE_id]) {
    ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];
1299

1300 1301 1302
    for (i = 0; i < UE_num_active_CC(UE_list, UE_id); i++) {
      CC_id = UE_list->ordered_CCids[i][UE_id];
      //PHY_vars_eNB_g[Mod_id]->mu_mimo_mode[UE_id].dl_pow_off = dl_pow_off[UE_id];
Stefan's avatar
Stefan committed
1303
      COMMON_channels_t *cc = &eNB->common_channels[CC_id];
1304
      int N_RBG = to_rbg(cc->mib->message.dl_Bandwidth);
1305

1306
      if (ue_sched_ctl->pre_nb_available_rbs[CC_id] > 0) {
Stefan's avatar
Stefan committed
1307 1308 1309 1310 1311 1312 1313
        LOG_D(MAC, "******************DL Scheduling Information for UE%d ************************\n", 
              UE_id);
        LOG_D(MAC, "dl power offset UE%d = %d \n", 
              UE_id, 
              ue_sched_ctl->dl_pow_off[CC_id]);
        LOG_D(MAC, "***********RB Alloc for every subband for UE%d ***********\n", 
              UE_id);
1314

1315
        for (j = 0; j < N_RBG; j++) {
1316
          //PHY_vars_eNB_g[Mod_id]->mu_mimo_mode[UE_id].rballoc_sub[UE_id] = rballoc_sub_UE[CC_id][UE_id][UE_id];
Stefan's avatar
Stefan committed
1317 1318 1319
          LOG_D(MAC, "RB Alloc for UE%d and Subband%d = %d\n", 
                UE_id, j, 
                ue_sched_ctl->rballoc_sub_UE[CC_id][j]);
1320 1321 1322 1323
        }

        //PHY_vars_eNB_g[Mod_id]->mu_mimo_mode[UE_id].pre_nb_available_rbs = pre_nb_available_rbs[CC_id][UE_id];
        LOG_D(MAC, "[eNB %d][SLICE %d]Total RBs allocated for UE%d = %d\n",
Stefan's avatar
Stefan committed
1324 1325 1326
              Mod_id, 
              eNB->slice_info.dl[slice_idx].id, 
              UE_id,
1327
              ue_sched_ctl->pre_nb_available_rbs[CC_id]);
1328
      }
Raymond Knopp's avatar
 
Raymond Knopp committed
1329
    }
1330
  }
1331 1332
}

Cedric Roux's avatar
Cedric Roux committed
1333
#define SF0_LIMIT 1
1334

1335
void
1336
dlsch_scheduler_pre_processor_reset(module_id_t module_idP,
1337
                                    int slice_idx,
1338 1339 1340 1341
                                    frame_t frameP,
                                    sub_frame_t subframeP,
                                    int min_rb_unit[NFAPI_CC_MAX],
                                    uint16_t nb_rbs_required[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
1342 1343
                                    uint8_t rballoc_sub[NFAPI_CC_MAX][N_RBG_MAX],
                                    uint8_t MIMO_mode_indicator[NFAPI_CC_MAX][N_RBG_MAX],
1344
                                    int *mbsfn_flag) {
1345 1346
  int UE_id;
  uint8_t CC_id;
1347
  int i, j;
1348 1349
  UE_list_t *UE_list;
  UE_sched_ctrl *ue_sched_ctl;
1350 1351
  int N_RB_DL, RBGsize, RBGsize_last;
  int N_RBG[NFAPI_CC_MAX];
Cedric Roux's avatar
Cedric Roux committed
1352
#ifdef SF0_LIMIT
1353
  int sf0_lower, sf0_upper;
1354
#endif
1355 1356 1357 1358
  rnti_t rnti;
  uint8_t *vrb_map;
  COMMON_channels_t *cc;

1359 1360
  //
  for (CC_id = 0; CC_id < RC.nb_mac_CC[module_idP]; CC_id++) {
1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373
    LOG_D(MAC, "Running preprocessor for UE %d (%x)\n", UE_id,(int)(UE_RNTI(module_idP, UE_id)));
    // initialize harq_pid and round
    cc = &RC.mac[module_idP]->common_channels[CC_id];
    N_RBG[CC_id] = to_rbg(cc->mib->message.dl_Bandwidth);
    min_rb_unit[CC_id] = get_min_rb_unit(module_idP, CC_id);

    if (mbsfn_flag[CC_id] > 0)    // If this CC is allocated for MBSFN skip it here
      continue;

    for (UE_id = 0; UE_id < MAX_MOBILES_PER_ENB; ++UE_id) {
      UE_list = &RC.mac[module_idP]->UE_list;
      ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];
      rnti = UE_RNTI(module_idP, UE_id);
1374

1375 1376 1377 1378 1379 1380
      if (rnti == NOT_A_RNTI)
        continue;

      if (UE_list->active[UE_id] != TRUE)
        continue;

1381
      if (!ue_dl_slice_membership(module_idP, UE_id, slice_idx))
1382 1383 1384
        continue;

      LOG_D(MAC, "Running preprocessor for UE %d (%x)\n", UE_id, rnti);
1385

1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450
      // initialize harq_pid and round
      if (ue_sched_ctl->ta_timer)
        ue_sched_ctl->ta_timer--;

      /*
         eNB_UE_stats *eNB_UE_stats;

         if (eNB_UE_stats == NULL)
         return;


         mac_xface->get_ue_active_harq_pid(module_idP,CC_id,rnti,
         frameP,subframeP,
         &ue_sched_ctl->harq_pid[CC_id],
         &ue_sched_ctl->round[CC_id],
         openair_harq_DL);


         if (ue_sched_ctl->ta_timer == 0) {

         // WE SHOULD PROTECT the eNB_UE_stats with a mutex here ...

         ue_sched_ctl->ta_timer = 20;  // wait 20 subframes before taking TA measurement from PHY
         switch (N_RB_DL) {
         case 6:
         ue_sched_ctl->ta_update = eNB_UE_stats->timing_advance_update;
         break;

         case 15:
         ue_sched_ctl->ta_update = eNB_UE_stats->timing_advance_update/2;
         break;

         case 25:
         ue_sched_ctl->ta_update = eNB_UE_stats->timing_advance_update/4;
         break;

         case 50:
         ue_sched_ctl->ta_update = eNB_UE_stats->timing_advance_update/8;
         break;

         case 75:
         ue_sched_ctl->ta_update = eNB_UE_stats->timing_advance_update/12;
         break;

         case 100:
         ue_sched_ctl->ta_update = eNB_UE_stats->timing_advance_update/16;
         break;
         }
         // clear the update in case PHY does not have a new measurement after timer expiry
         eNB_UE_stats->timing_advance_update =  0;
         }
         else {
         ue_sched_ctl->ta_timer--;
         ue_sched_ctl->ta_update =0; // don't trigger a timing advance command
         }


         if (UE_id==0) {
         VCD_SIGNAL_DUMPER_DUMP_VARIABLE_BY_NAME(VCD_SIGNAL_DUMPER_VARIABLES_UE0_TIMING_ADVANCE,ue_sched_ctl->ta_update);
         }
       */
      nb_rbs_required[CC_id][UE_id] = 0;
      ue_sched_ctl->pre_nb_available_rbs[CC_id] = 0;
      ue_sched_ctl->dl_pow_off[CC_id] = 2;

1451 1452
      for (i = 0; i < N_RBG[CC_id]; i++) {
        ue_sched_ctl->rballoc_sub_UE[CC_id][i] = 0;
1453
      }
1454 1455 1456
    }

    N_RB_DL = to_prb(RC.mac[module_idP]->common_channels[CC_id].mib->message.dl_Bandwidth);
1457
#ifdef SF0_LIMIT
1458

1459 1460 1461 1462 1463
    switch (N_RBG[CC_id]) {
      case 6:
        sf0_lower = 0;
        sf0_upper = 5;
        break;
1464

1465 1466 1467 1468
      case 8:
        sf0_lower = 2;
        sf0_upper = 5;
        break;
1469

1470 1471 1472 1473
      case 13:
        sf0_lower = 4;
        sf0_upper = 7;
        break;
1474

1475 1476 1477 1478
      case 17:
        sf0_lower = 7;
        sf0_upper = 9;
        break;
1479

1480 1481 1482 1483
      case 25:
        sf0_lower = 11;
        sf0_upper = 13;
        break;
1484

1485 1486 1487
      default:
        AssertFatal(1 == 0, "unsupported RBs (%d)\n", N_RB_DL);
    }
1488

1489
#endif
Cedric Roux's avatar
Cedric Roux committed
1490

1491 1492 1493 1494 1495
    switch (N_RB_DL) {
      case 6:
        RBGsize = 1;
        RBGsize_last = 1;
        break;
1496

1497 1498 1499 1500
      case 15:
        RBGsize = 2;
        RBGsize_last = 1;
        break;
1501

1502 1503 1504 1505
      case 25:
        RBGsize = 2;
        RBGsize_last = 1;
        break;
1506

1507 1508 1509 1510
      case 50:
        RBGsize = 3;
        RBGsize_last = 2;
        break;
1511

1512 1513 1514 1515
      case 75:
        RBGsize = 4;
        RBGsize_last = 3;
        break;
1516

1517 1518 1519 1520
      case 100:
        RBGsize = 4;
        RBGsize_last = 4;
        break;
1521

1522 1523 1524 1525 1526
      default:
        AssertFatal(1 == 0, "unsupported RBs (%d)\n", N_RB_DL);
    }

    vrb_map = RC.mac[module_idP]->common_channels[CC_id].vrb_map;
1527

1528 1529 1530
    // Initialize Subbands according to VRB map
    for (i = 0; i < N_RBG[CC_id]; i++) {
      int rb_size = i == N_RBG[CC_id] - 1 ? RBGsize_last : RBGsize;
Cedric Roux's avatar
Cedric Roux committed
1531
#ifdef SF0_LIMIT
1532

1533 1534 1535 1536 1537 1538 1539
      // for avoiding 6+ PRBs around DC in subframe 0 (avoid excessive errors)
      /* TODO: make it proper - allocate those RBs, do not "protect" them, but
        * compute number of available REs and limit MCS according to the
        * TBS table 36.213 7.1.7.2.1-1 (can be done after pre-processor)
        */
      if (subframeP == 0 && i >= sf0_lower && i <= sf0_upper)
        rballoc_sub[CC_id][i] = 1;
1540

1541
#endif
1542

1543 1544 1545 1546 1547 1548
      // for SI-RNTI,RA-RNTI and P-RNTI allocations
      for (j = 0; j < rb_size; j++) {
        if (vrb_map[j + (i*RBGsize)] != 0) {
          rballoc_sub[CC_id][i] = 1;
          LOG_D(MAC, "Frame %d, subframe %d : vrb %d allocated\n", frameP, subframeP, j + (i*RBGsize));
          break;
1549
        }
1550
      }
1551

Robert Schmidt's avatar
Robert Schmidt committed
1552
      //LOG_D(MAC, "Frame %d Subframe %d CC_id %d RBG %i : rb_alloc %d\n",
1553
      //frameP, subframeP, CC_id, i, rballoc_sub[CC_id][i]);
1554
      MIMO_mode_indicator[CC_id][i] = 2;
1555
    }
1556
  }
1557 1558 1559
}


1560 1561
void
dlsch_scheduler_pre_processor_allocate(module_id_t Mod_id,
1562 1563 1564 1565 1566 1567
                                       int UE_id,
                                       uint8_t CC_id,
                                       int N_RBG,
                                       int min_rb_unit,
                                       uint16_t nb_rbs_required[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
                                       uint16_t nb_rbs_remaining[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB],
1568
                                       uint8_t rballoc_sub[NFAPI_CC_MAX][N_RBG_MAX],
1569
                                       uint8_t slice_allocation_mask[NFAPI_CC_MAX][N_RBG_MAX],
1570
                                       uint8_t MIMO_mode_indicator[NFAPI_CC_MAX][N_RBG_MAX]) {
1571
  int i;
1572
  int tm = get_tmode(Mod_id, CC_id, UE_id);
1573 1574 1575 1576 1577
  UE_list_t *UE_list = &RC.mac[Mod_id]->UE_list;
  UE_sched_ctrl *ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];
  int N_RB_DL = to_prb(RC.mac[Mod_id]->common_channels[CC_id].mib->message.dl_Bandwidth);

  for (i = 0; i < N_RBG; i++) {
1578
    if (rballoc_sub[CC_id][i] != 0) continue;
1579

1580
    if (ue_sched_ctl->rballoc_sub_UE[CC_id][i] != 0) continue;
1581

1582
    if (nb_rbs_remaining[CC_id][UE_id] <= 0) continue;
1583

1584
    if (ue_sched_ctl->pre_nb_available_rbs[CC_id] >= nb_rbs_required[CC_id][UE_id]) continue;
1585

1586
    if (ue_sched_ctl->dl_pow_off[CC_id] == 0) continue;
1587

1588
    if (slice_allocation_mask[CC_id][i] == 0) continue;
1589 1590

    if ((i == N_RBG - 1) && ((N_RB_DL == 25) || (N_RB_DL == 50))) {
1591 1592
      // Allocating last, smaller RBG
      if (nb_rbs_remaining[CC_id][UE_id] >= min_rb_unit - 1) {
1593 1594 1595
        rballoc_sub[CC_id][i] = 1;
        ue_sched_ctl->rballoc_sub_UE[CC_id][i] = 1;
        MIMO_mode_indicator[CC_id][i] = 1;
1596

1597
        if (tm == 5) {
1598 1599
          ue_sched_ctl->dl_pow_off[CC_id] = 1;
        }
1600

1601
        nb_rbs_remaining[CC_id][UE_id] = nb_rbs_remaining[CC_id][UE_id] - min_rb_unit + 1;
1602 1603 1604
        ue_sched_ctl->pre_nb_available_rbs[CC_id] = ue_sched_ctl->pre_nb_available_rbs[CC_id] + min_rb_unit - 1;
      }
    } else {
1605 1606
      // Allocating a standard-sized RBG
      if (nb_rbs_remaining[CC_id][UE_id] >= min_rb_unit) {
1607 1608 1609
        rballoc_sub[CC_id][i] = 1;
        ue_sched_ctl->rballoc_sub_UE[CC_id][i] = 1;
        MIMO_mode_indicator[CC_id][i] = 1;
1610

1611
        if (tm == 5) {
1612 1613
          ue_sched_ctl->dl_pow_off[CC_id] = 1;
        }
1614

1615
        nb_rbs_remaining[CC_id][UE_id] = nb_rbs_remaining[CC_id][UE_id] - min_rb_unit;
1616 1617
        ue_sched_ctl->pre_nb_available_rbs[CC_id] = ue_sched_ctl->pre_nb_available_rbs[CC_id] + min_rb_unit;
      }
1618
    }
1619
  }
1620 1621 1622
}


1623
/// ULSCH PRE_PROCESSOR
1624

1625
void ulsch_scheduler_pre_processor(module_id_t module_idP,
1626
                                   int slice_idx,
1627 1628
                                   int frameP,
                                   sub_frame_t subframeP,
1629
                                   unsigned char sched_subframeP,
1630 1631 1632
                                   uint16_t *first_rb) {
  int UE_id;                                   
  uint16_t n;
1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645
  uint8_t CC_id, harq_pid;
  uint16_t nb_allocated_rbs[NFAPI_CC_MAX][MAX_MOBILES_PER_ENB];
  uint16_t total_allocated_rbs[NFAPI_CC_MAX];
  uint16_t average_rbs_per_user[NFAPI_CC_MAX];
  int16_t total_remaining_rbs[NFAPI_CC_MAX];
  uint16_t total_ue_count[NFAPI_CC_MAX];
  UE_list_t *UE_list = &RC.mac[module_idP]->UE_list;
  slice_info_t *sli = &RC.mac[module_idP]->slice_info;
  UE_TEMPLATE *UE_template = 0;
  UE_sched_ctrl *ue_sched_ctl;
  int N_RB_UL = 0;
  uint16_t available_rbs, first_rb_offset;
  rnti_t rntiTable[MAX_MOBILES_PER_ENB];        // Rnti array => Add SSR 12-2018
Stefan's avatar
Stefan committed
1646 1647 1648 1649

  // sort ues
  LOG_D(MAC, "In ulsch_preprocessor: sort ue \n");
   sort_ue_ul(module_idP, slice_idx, frameP, subframeP, rntiTable);
1650
  // maximize MCS and then allocate required RB according to the buffer occupancy with the limit of max available UL RB
Stefan's avatar
Stefan committed
1651
  LOG_D(MAC, "In ulsch_preprocessor: assign max mcs min rb\n");
1652 1653 1654 1655
  assign_max_mcs_min_rb(module_idP, slice_idx, frameP, subframeP, first_rb);
  // we need to distribute RBs among UEs
  // step1:  reset the vars
  uint8_t CC_nb = (uint8_t) RC.nb_mac_CC[module_idP];
1656

1657 1658 1659 1660 1661 1662
  for (CC_id = 0; CC_id < CC_nb; CC_id++) {
    total_allocated_rbs[CC_id] = 0;
    total_remaining_rbs[CC_id] = 0;
    average_rbs_per_user[CC_id] = 0;
    total_ue_count[CC_id] = 0;
  }
1663

1664 1665 1666
  // Step 1.5: Calculate total_ue_count
  for (UE_id = UE_list->head_ul; UE_id >= 0; UE_id = UE_list->next_ul[UE_id]) {
    // This is not the actual CC_id in the list
Stefan's avatar
Stefan committed
1667 1668 1669
    for (n = 0; n < UE_list->numactiveULCCs[UE_id]; n++) {
      CC_id = UE_list->ordered_ULCCids[n][UE_id];
      UE_template = &UE_list->UE_template[CC_id][UE_id];
1670

Stefan's avatar
Stefan committed
1671 1672
      if (UE_template->pre_allocated_nb_rb_ul[slice_idx] > 0) {
        total_ue_count[CC_id]++;
1673
      }
1674
    }
1675
  }
1676

1677
  // step 2: calculate the average rb per UE
Stefan's avatar
Stefan committed
1678
  LOG_D(MAC, "In ulsch_preprocessor: step2 \n");
1679
  for (UE_id = UE_list->head_ul; UE_id >= 0; UE_id = UE_list->next_ul[UE_id]) {
sharma's avatar
sharma committed
1680 1681
    if (UE_list->UE_template[CC_id][UE_id].rach_resource_type > 0) continue;

Stefan's avatar
Stefan committed
1682 1683
    LOG_D(MAC, "In ulsch_preprocessor: handling UE %d/%x\n", 
          UE_id,
1684
          rntiTable[UE_id]);
1685

1686 1687 1688
    for (n = 0; n < UE_list->numactiveULCCs[UE_id]; n++) {
      // This is the actual CC_id in the list
      CC_id = UE_list->ordered_ULCCids[n][UE_id];
Stefan's avatar
Stefan committed
1689 1690 1691 1692
      LOG_D(MAC, "In ulsch_preprocessor: handling UE %d/%x CCid %d\n", 
            UE_id, 
            rntiTable[UE_id], 
            CC_id);
Stefan's avatar
Stefan committed
1693
      /*
1694 1695 1696 1697 1698 1699 1700
          if((mac_xface->get_nCCE_max(module_idP,CC_id,3,subframeP) - nCCE_to_be_used[CC_id])  > (1<<aggregation)) {
          nCCE_to_be_used[CC_id] = nCCE_to_be_used[CC_id] + (1<<aggregation);
          max_num_ue_to_be_scheduled+=1;
          } */
      N_RB_UL = to_prb(RC.mac[module_idP]->common_channels[CC_id].ul_Bandwidth);
      ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];
      ue_sched_ctl->max_rbs_allowed_slice_uplink[CC_id][slice_idx] =
1701
        nb_rbs_allowed_slice(sli->ul[slice_idx].pct, N_RB_UL);
1702
      first_rb_offset = UE_list->first_rb_offset[CC_id][slice_idx];
Stefan's avatar
Stefan committed
1703 1704
      available_rbs = 
        cmin(ue_sched_ctl->max_rbs_allowed_slice_uplink[CC_id][slice_idx], N_RB_UL - first_rb[CC_id] - first_rb_offset);
1705

1706 1707
      if (available_rbs < 0)
        available_rbs = 0;
1708

1709
      if (total_ue_count[CC_id] == 0) {
1710
        average_rbs_per_user[CC_id] = 0;
1711 1712 1713 1714 1715 1716
      } else if (total_ue_count[CC_id] == 1) {    // increase the available RBs, special case,
        average_rbs_per_user[CC_id] = (uint16_t) (available_rbs + 1);
      } else if (total_ue_count[CC_id] <= available_rbs) {
        average_rbs_per_user[CC_id] = (uint16_t) floor(available_rbs / total_ue_count[CC_id]);
      } else {
        average_rbs_per_user[CC_id] = 1;
Stefan's avatar
Stefan committed
1717 1718 1719 1720 1721 1722
        LOG_W(MAC, "[eNB %d] frame %d subframe %d: UE %d CC %d: can't get average rb per user (should not be here)\n",
              module_idP, 
              frameP, 
              subframeP, 
              UE_id, 
              CC_id);
1723
      }
1724

1725 1726
      if (total_ue_count[CC_id] > 0) {
        LOG_D(MAC, "[eNB %d] Frame %d subframe %d: total ue to be scheduled %d\n",
Stefan's avatar
Stefan committed
1727 1728 1729 1730
              module_idP, 
              frameP, 
              subframeP, 
              total_ue_count[CC_id]);
1731
      }
1732
    }
1733
  }
Raymond Knopp's avatar
 
Raymond Knopp committed
1734

1735 1736
  // step 3: assigne RBS
  for (UE_id = UE_list->head_ul; UE_id >= 0; UE_id = UE_list->next_ul[UE_id]) {
Stefan's avatar
Stefan committed
1737
    // if (continueTable[UE_id]) continue;
1738

1739 1740 1741 1742 1743 1744
    for (n = 0; n < UE_list->numactiveULCCs[UE_id]; n++) {
      // This is the actual CC_id in the list
      CC_id = UE_list->ordered_ULCCids[n][UE_id];
      UE_template = &UE_list->UE_template[CC_id][UE_id];
      harq_pid = subframe2harqpid(&RC.mac[module_idP]->common_channels[CC_id],
                                  frameP, sched_subframeP);
1745

1746
      //      mac_xface->get_ue_active_harq_pid(module_idP,CC_id,rnti,frameP,subframeP,&harq_pid,&round,openair_harq_UL);
1747

1748 1749 1750 1751 1752
      if (UE_list->UE_sched_ctrl[UE_id].round_UL[CC_id] > 0) {
        nb_allocated_rbs[CC_id][UE_id] = UE_list->UE_template[CC_id][UE_id].nb_rb_ul[harq_pid];
      } else {
        nb_allocated_rbs[CC_id][UE_id] =
          cmin(UE_list->UE_template[CC_id][UE_id].pre_allocated_nb_rb_ul[slice_idx], average_rbs_per_user[CC_id]);
1753
      }
1754

1755
      total_allocated_rbs[CC_id] += nb_allocated_rbs[CC_id][UE_id];
Stefan's avatar
Stefan committed
1756 1757 1758 1759 1760
      LOG_D(MAC, "In ulsch_preprocessor: assigning %d RBs for UE %d/%x CCid %d, harq_pid %d\n",
            nb_allocated_rbs[CC_id][UE_id], 
            UE_id, 
            rntiTable[UE_id], 
            CC_id,
1761
            harq_pid);
1762
    }
1763
  }
1764

1765 1766
  // step 4: assigne the remaining RBs and set the pre_allocated rbs accordingly
  for (UE_id = UE_list->head_ul; UE_id >= 0; UE_id = UE_list->next_ul[UE_id]) {
Stefan's avatar
Stefan committed
1767
    // if (continueTable[UE_id]) continue;
1768

1769
    ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];
1770

1771 1772 1773 1774 1775 1776 1777 1778
    for (n = 0; n < UE_list->numactiveULCCs[UE_id]; n++) {
      // This is the actual CC_id in the list
      CC_id = UE_list->ordered_ULCCids[n][UE_id];
      UE_template = &UE_list->UE_template[CC_id][UE_id];
      N_RB_UL = to_prb(RC.mac[module_idP]->common_channels[CC_id].ul_Bandwidth);
      first_rb_offset = UE_list->first_rb_offset[CC_id][slice_idx];
      available_rbs = cmin(ue_sched_ctl->max_rbs_allowed_slice_uplink[CC_id][slice_idx], N_RB_UL - first_rb[CC_id] - first_rb_offset);
      total_remaining_rbs[CC_id] = available_rbs - total_allocated_rbs[CC_id];
1779

1780 1781 1782
      if (total_ue_count[CC_id] == 1) {
        total_remaining_rbs[CC_id]++;
      }
1783

Stefan's avatar
Stefan committed
1784 1785 1786
      while (UE_template->pre_allocated_nb_rb_ul[slice_idx] > 0 && 
             nb_allocated_rbs[CC_id][UE_id] < UE_template->pre_allocated_nb_rb_ul[slice_idx] &&
             total_remaining_rbs[CC_id] > 0) {
1787 1788 1789
        nb_allocated_rbs[CC_id][UE_id] = cmin(nb_allocated_rbs[CC_id][UE_id] + 1, UE_template->pre_allocated_nb_rb_ul[slice_idx]);
        total_remaining_rbs[CC_id]--;
        total_allocated_rbs[CC_id]++;
1790
      }
1791 1792 1793

      UE_template->pre_allocated_nb_rb_ul[slice_idx] = nb_allocated_rbs[CC_id][UE_id];
      LOG_D(MAC, "******************UL Scheduling Information for UE%d CC_id %d ************************\n",
Stefan's avatar
Stefan committed
1794 1795
            UE_id, 
            CC_id);
1796
      LOG_D(MAC, "[eNB %d] total RB allocated for UE%d CC_id %d  = %d\n",
Stefan's avatar
Stefan committed
1797 1798 1799 1800
            module_idP, 
            UE_id, 
            CC_id, 
            UE_template->pre_allocated_nb_rb_ul[slice_idx]);
1801
    }
1802
  }
1803

1804
  return;
1805 1806
}

1807
void
Stefan's avatar
Stefan committed
1808 1809 1810 1811 1812
assign_max_mcs_min_rb(module_id_t module_idP, 
                      int slice_idx, 
                      int frameP,
                      sub_frame_t subframeP, 
                      uint16_t *first_rb) {
1813 1814 1815
  int i;
  uint16_t n, UE_id;
  uint8_t CC_id;
1816
  int mcs;
1817 1818 1819
  int rb_table_index = 0, tbs, tx_power;
  eNB_MAC_INST *eNB = RC.mac[module_idP];
  UE_list_t *UE_list = &eNB->UE_list;
Stefan's avatar
Stefan committed
1820
  slice_info_t *sli = &eNB->slice_info;
1821 1822 1823 1824
  UE_TEMPLATE *UE_template;
  UE_sched_ctrl *ue_sched_ctl;
  int Ncp;
  int N_RB_UL;
Niccolò Iardella's avatar
Niccolò Iardella committed
1825
  int first_rb_offset, available_rbs;
1826

Stefan's avatar
Stefan committed
1827
  for (i = UE_list->head_ul; i >= 0; i = UE_list->next_ul[i]) {
1828 1829 1830
    if (UE_list->UE_sched_ctrl[i].phr_received == 1) {
      /* if we've received the power headroom information the UE, we can go to
       * maximum mcs */
1831
      mcs = cmin(20, sli->ul[slice_idx].maxmcs);
1832 1833
    } else {
      /* otherwise, limit to QPSK PUSCH */
1834
      mcs = cmin(10, sli->ul[slice_idx].maxmcs);
1835
    }
1836

1837
    UE_id = i;
1838

1839 1840 1841
    for (n = 0; n < UE_list->numactiveULCCs[UE_id]; n++) {
      // This is the actual CC_id in the list
      CC_id = UE_list->ordered_ULCCids[n][UE_id];
Stefan's avatar
Stefan committed
1842 1843 1844 1845 1846
      AssertFatal(CC_id < RC.nb_mac_CC[module_idP], "CC_id %u should be < %u, loop n=%u < numactiveULCCs[%u]=%u",
                  CC_id, 
                  NFAPI_CC_MAX, 
                  n, 
                  UE_id,
1847 1848
                  UE_list->numactiveULCCs[UE_id]);
      UE_template = &UE_list->UE_template[CC_id][UE_id];
1849
      UE_template->pre_assigned_mcs_ul = mcs;
1850
      ue_sched_ctl = &UE_list->UE_sched_ctrl[UE_id];
Stefan's avatar
Stefan committed
1851 1852 1853
      Ncp = eNB->common_channels[CC_id].Ncp;
      N_RB_UL = to_prb(eNB->common_channels[CC_id].ul_Bandwidth);
      ue_sched_ctl->max_rbs_allowed_slice_uplink[CC_id][slice_idx] = nb_rbs_allowed_slice(sli->ul[slice_idx].pct, N_RB_UL);
1854

1855 1856 1857 1858
      int bytes_to_schedule = UE_template->estimated_ul_buffer - UE_template->scheduled_ul_bytes;
      if (bytes_to_schedule < 0) bytes_to_schedule = 0;
      int bits_to_schedule = bytes_to_schedule * 8;

1859
      // if this UE has UL traffic
1860
      if (bits_to_schedule > 0) {
1861
        tbs = get_TBS_UL(UE_template->pre_assigned_mcs_ul, 3) << 3; // 1 or 2 PRB with cqi enabled does not work well!
1862 1863 1864 1865
        rb_table_index = 2;
        // fixme: set use_srs flag
        tx_power = estimate_ue_tx_power(tbs, rb_table[rb_table_index], 0, Ncp, 0);

Stefan's avatar
Stefan committed
1866
        while ((UE_template->phr_info - tx_power < 0 || tbs > bits_to_schedule) && UE_template->pre_assigned_mcs_ul > 3) {
1867
          // LOG_I(MAC,"UE_template->phr_info %d tx_power %d mcs %d\n", UE_template->phr_info,tx_power, mcs);
1868 1869
          UE_template->pre_assigned_mcs_ul--;
          tbs = get_TBS_UL(UE_template->pre_assigned_mcs_ul, rb_table[rb_table_index]) << 3;
1870 1871 1872
          tx_power = estimate_ue_tx_power(tbs, rb_table[rb_table_index], 0, Ncp, 0);   // fixme: set use_srs
        }

1873
        first_rb_offset = UE_list->first_rb_offset[CC_id][slice_idx];
Stefan's avatar
Stefan committed
1874 1875
        available_rbs = 
          cmin(ue_sched_ctl->max_rbs_allowed_slice_uplink[CC_id][slice_idx], N_RB_UL - first_rb[CC_id] - first_rb_offset);
Niccolò Iardella's avatar
Niccolò Iardella committed
1876

Stefan's avatar
Stefan committed
1877 1878 1879 1880
        while (tbs < bits_to_schedule && 
               rb_table[rb_table_index] < available_rbs && 
               UE_template->phr_info - tx_power > 0 && 
               rb_table_index < 32) {
1881
          rb_table_index++;
1882
          tbs = get_TBS_UL(UE_template->pre_assigned_mcs_ul, rb_table[rb_table_index]) << 3;
1883 1884 1885
          tx_power = estimate_ue_tx_power(tbs, rb_table[rb_table_index], 0, Ncp, 0);
        }

Niccolò Iardella's avatar
Niccolò Iardella committed
1886
        if (rb_table[rb_table_index] > (available_rbs - 1)) {
1887 1888
          rb_table_index--;
        }
1889

1890 1891
        // 1 or 2 PRB with cqi enabled does not work well
        if (rb_table[rb_table_index] < 3) {
1892
          rb_table_index = 2; //3PRB
1893 1894 1895
        }

        UE_template->pre_allocated_rb_table_index_ul = rb_table_index;
1896
        UE_template->pre_allocated_nb_rb_ul[slice_idx] = rb_table[rb_table_index];
Stefan's avatar
Stefan committed
1897 1898 1899 1900 1901 1902
        LOG_D(MAC, "[eNB %d] frame %d subframe %d: for UE %d CC %d: pre-assigned mcs %d, pre-allocated rb_table[%d]=%d RBs (phr %d, tx power %d)\n",
              module_idP, 
              frameP, 
              subframeP, 
              UE_id, 
              CC_id,
1903 1904
              UE_template->pre_assigned_mcs_ul,
              UE_template->pre_allocated_rb_table_index_ul,
1905
              UE_template->pre_allocated_nb_rb_ul[slice_idx],
1906 1907 1908 1909 1910 1911 1912 1913
              UE_template->phr_info, tx_power);
      } else {
        /* if UE has pending scheduling request then pre-allocate 3 RBs */
        //if (UE_template->ul_active == 1 && UE_template->ul_SR == 1) {
        if (UE_is_to_be_scheduled(module_idP, CC_id, i)) {
          /* use QPSK mcs */
          UE_template->pre_assigned_mcs_ul = 10;
          UE_template->pre_allocated_rb_table_index_ul = 2;
1914
          UE_template->pre_allocated_nb_rb_ul[slice_idx] = 3;
1915 1916 1917
        } else {
          UE_template->pre_assigned_mcs_ul = 0;
          UE_template->pre_allocated_rb_table_index_ul = -1;
1918
          UE_template->pre_allocated_nb_rb_ul[slice_idx] = 0;
1919 1920
        }
      }
1921
    }
1922
  }
1923 1924
}

1925
struct sort_ue_ul_params {
1926 1927 1928
  int module_idP;
  int frameP;
  int subframeP;
1929 1930
};

1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943
static int ue_ul_compare(const void *_a, const void *_b, void *_params) {
  struct sort_ue_ul_params *params = _params;
  UE_list_t *UE_list = &RC.mac[params->module_idP]->UE_list;
  int UE_id1 = *(const int *) _a;
  int UE_id2 = *(const int *) _b;
  int rnti1 = UE_RNTI(params->module_idP, UE_id1);
  int pCCid1 = UE_PCCID(params->module_idP, UE_id1);
  int round1 = maxround(params->module_idP, rnti1, params->frameP,
                        params->subframeP, 1);
  int rnti2 = UE_RNTI(params->module_idP, UE_id2);
  int pCCid2 = UE_PCCID(params->module_idP, UE_id2);
  int round2 = maxround(params->module_idP, rnti2, params->frameP,
                        params->subframeP, 1);
1944

1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981
  if (round1 > round2)
    return -1;

  if (round1 < round2)
    return 1;

  if (UE_list->UE_template[pCCid1][UE_id1].ul_buffer_info[LCGID0] >
      UE_list->UE_template[pCCid2][UE_id2].ul_buffer_info[LCGID0])
    return -1;

  if (UE_list->UE_template[pCCid1][UE_id1].ul_buffer_info[LCGID0] <
      UE_list->UE_template[pCCid2][UE_id2].ul_buffer_info[LCGID0])
    return 1;

  int bytes_to_schedule1 = UE_list->UE_template[pCCid1][UE_id1].estimated_ul_buffer - UE_list->UE_template[pCCid1][UE_id1].scheduled_ul_bytes;

  if (bytes_to_schedule1 < 0) bytes_to_schedule1 = 0;

  int bytes_to_schedule2 = UE_list->UE_template[pCCid2][UE_id2].estimated_ul_buffer - UE_list->UE_template[pCCid2][UE_id2].scheduled_ul_bytes;

  if (bytes_to_schedule2 < 0) bytes_to_schedule2 = 0;

  if (bytes_to_schedule1 > bytes_to_schedule2)
    return -1;

  if (bytes_to_schedule1 < bytes_to_schedule2)
    return 1;

  if (UE_list->UE_template[pCCid1][UE_id1].pre_assigned_mcs_ul >
      UE_list->UE_template[pCCid2][UE_id2].pre_assigned_mcs_ul)
    return -1;

  if (UE_list->UE_template[pCCid1][UE_id1].pre_assigned_mcs_ul <
      UE_list->UE_template[pCCid2][UE_id2].pre_assigned_mcs_ul)
    return 1;

  return 0;
1982
}
1983

Stefan's avatar
Stefan committed
1984 1985 1986 1987 1988 1989
void 
sort_ue_ul(module_id_t module_idP, 
           int slice_idx, 
           int frameP, 
           sub_frame_t subframeP, 
           rnti_t *rntiTable) 
1990
{
1991 1992 1993 1994 1995
  int i;
  int list[MAX_MOBILES_PER_ENB];
  int list_size = 0;
  struct sort_ue_ul_params params = { module_idP, frameP, subframeP };
  UE_list_t *UE_list = &RC.mac[module_idP]->UE_list;
1996

1997
  for (i = 0; i < MAX_MOBILES_PER_ENB; i++) {
Stefan's avatar
Stefan committed
1998 1999 2000 2001 2002 2003 2004
    rntiTable[i] = UE_RNTI(module_idP, i);
    // Valid element and is not the actual CC_id in the list
    if (UE_list->active[i] == TRUE &&                           
        rntiTable[i] != NOT_A_RNTI &&                 
        UE_list->UE_sched_ctrl[i].ul_out_of_sync != 1 &&
        ue_ul_slice_membership(module_idP, i, slice_idx)) {
      list[list_size++] = i; // Add to list
2005
    }
2006
  }
2007

2008
  qsort_r(list, list_size, sizeof(int), ue_ul_compare, &params);
2009

Stefan's avatar
Stefan committed
2010
  if (list_size) {                          // At mimimum one list element
2011 2012 2013 2014 2015
    for (i = 0; i < list_size - 1; i++)
      UE_list->next_ul[list[i]] = list[i + 1];

    UE_list->next_ul[list[list_size - 1]] = -1;
    UE_list->head_ul = list[0];
Stefan's avatar
Stefan committed
2016
  } else {                                  // No element
2017 2018
    UE_list->head_ul = -1;
  }
2019
}