diff options
author | Jacob Harvey <jlharvey@us.ibm.com> | 2017-08-14 15:16:43 -0500 |
---|---|---|
committer | Daniel M. Crowell <dcrowell@us.ibm.com> | 2017-08-29 11:40:18 -0400 |
commit | ee5398301736c96d0d5ffcfd147585b2e29f5a16 (patch) | |
tree | e7a1efd99e41212711d36ec93f4a91d3954e6130 /src/import/chips/p9/procedures/hwp/memory/lib/dimm/eff_dimm.C | |
parent | 2b881ebf90218af1ce41918b214498c2574940e1 (diff) | |
download | talos-hostboot-ee5398301736c96d0d5ffcfd147585b2e29f5a16.tar.gz talos-hostboot-ee5398301736c96d0d5ffcfd147585b2e29f5a16.zip |
Implementing draminit_training_adv
Set default pattern to john's new one and backup to supernova 2.0
Change-Id: I406bb5c5652cff9fe4690e5bd9b03cc431d75f61
Reviewed-on: http://ralgit01.raleigh.ibm.com/gerrit1/44709
Dev-Ready: JACOB L. HARVEY <jlharvey@us.ibm.com>
Tested-by: Hostboot CI <hostboot-ci+hostboot@us.ibm.com>
Tested-by: FSP CI Jenkins <fsp-CI-jenkins+hostboot@us.ibm.com>
Reviewed-by: Louis Stermole <stermole@us.ibm.com>
Reviewed-by: STEPHEN GLANCY <sglancy@us.ibm.com>
Reviewed-by: Matt K. Light <mklight@us.ibm.com>
Reviewed-by: Daniel M. Crowell <dcrowell@us.ibm.com>
Tested-by: Jenkins Server <pfd-jenkins+hostboot@us.ibm.com>
Reviewed-by: Jennifer A. Stofer <stofer@us.ibm.com>
Reviewed-on: http://ralgit01.raleigh.ibm.com/gerrit1/44780
Tested-by: Jenkins OP Build CI <op-jenkins+hostboot@us.ibm.com>
Tested-by: Jenkins OP HW <op-hw-jenkins+hostboot@us.ibm.com>
Diffstat (limited to 'src/import/chips/p9/procedures/hwp/memory/lib/dimm/eff_dimm.C')
-rw-r--r-- | src/import/chips/p9/procedures/hwp/memory/lib/dimm/eff_dimm.C | 79 |
1 files changed, 75 insertions, 4 deletions
diff --git a/src/import/chips/p9/procedures/hwp/memory/lib/dimm/eff_dimm.C b/src/import/chips/p9/procedures/hwp/memory/lib/dimm/eff_dimm.C index 2f8ee1126..faf07a39f 100644 --- a/src/import/chips/p9/procedures/hwp/memory/lib/dimm/eff_dimm.C +++ b/src/import/chips/p9/procedures/hwp/memory/lib/dimm/eff_dimm.C @@ -4770,16 +4770,87 @@ fapi_try_exit: } /// +/// @brief Determines and sets the CUSTOM_TRAINING_ADV_PATTERNS settings for training advance +/// @return fapi2::FAPI2_RC_SUCCESS if okay +/// @note overwrite the attribute to default values if it's set to 0 +/// +fapi2::ReturnCode eff_dimm::training_adv_pattern() +{ + uint32_t l_special_patterns [PORTS_PER_MCS] = {}; + FAPI_TRY( custom_training_adv_patterns( iv_mcs, &(l_special_patterns[0])) ); + + // Let's not write the defaults if someone wants to overwrite the attribute + // 0 is an invalid pattern, so if it's 0, the attribute is empty + if ( l_special_patterns[mss::index(iv_mca)] == 0) + { + fapi2::buffer<uint32_t> l_temp; + + l_temp.insertFromRight<PATTERN0_START, PATTERN0_LEN> + (fapi2::ENUM_ATTR_MSS_CUSTOM_TRAINING_ADV_PATTERNS_DEFAULT_PATTERN0); + + l_temp.insertFromRight<PATTERN1_START, PATTERN1_LEN> + (fapi2::ENUM_ATTR_MSS_CUSTOM_TRAINING_ADV_PATTERNS_DEFAULT_PATTERN1); + + l_special_patterns[mss::index(iv_mca)] = l_temp; + + FAPI_INF("%s setting training_adv_pattern as 0x%08x", mss::c_str(iv_mca), l_temp); + + FAPI_TRY( FAPI_ATTR_SET(fapi2::ATTR_MSS_CUSTOM_TRAINING_ADV_PATTERNS, iv_mcs, l_special_patterns) ); + } + +fapi_try_exit: + return fapi2::current_err; +} + +/// +/// @brief Determines and sets the CUSTOM_TRAINING_ADV_BACKUP_PATTERNS settings for training advance +/// @return fapi2::FAPI2_RC_SUCCESS if okay +/// @note overwrite the attribute to default values if it's set to 0 +/// +fapi2::ReturnCode eff_dimm::training_adv_backup_pattern() +{ + uint32_t l_special_patterns [PORTS_PER_MCS] = {}; + FAPI_TRY( custom_training_adv_backup_patterns( iv_mcs, &(l_special_patterns[0])) ); + + // Let's set the backup pattern as well + if ( l_special_patterns[mss::index(iv_mca)] == 0) + { + fapi2::buffer<uint32_t> l_temp; + + l_temp.insertFromRight<PATTERN0_START, PATTERN0_LEN> + (fapi2::ENUM_ATTR_MSS_CUSTOM_TRAINING_ADV_BACKUP_PATTERNS_DEFAULT_PATTERN0); + + l_temp.insertFromRight<PATTERN1_START, PATTERN1_LEN> + (fapi2::ENUM_ATTR_MSS_CUSTOM_TRAINING_ADV_BACKUP_PATTERNS_DEFAULT_PATTERN1); + + l_special_patterns[mss::index(iv_mca)] = l_temp; + + FAPI_INF("%s setting training_adv_backup_pattern as 0x%08x", mss::c_str(iv_mca), l_temp); + + FAPI_TRY( FAPI_ATTR_SET(fapi2::ATTR_MSS_CUSTOM_TRAINING_ADV_BACKUP_PATTERNS, iv_mcs, l_special_patterns) ); + } + +fapi_try_exit: + return fapi2::current_err; +} + +/// /// @brief Determines and sets the cal_step_enable values /// @return fapi2::FAPI2_RC_SUCCESS if okay /// fapi2::ReturnCode eff_dimm::cal_step_enable() { - const uint32_t l_cal_step_value = (mss::chip_ec_feature_skip_hw_vref_cal(iv_mcs) ? - RUN_CAL_SKIP_WR_RD_2D_VREF : RUN_ALL_CAL_STEPS); + fapi2::buffer<uint32_t> l_cal_step_value = (mss::chip_ec_feature_skip_hw_vref_cal(iv_mcs) ? + RUN_CAL_SKIP_WR_RD_2D_VREF : RUN_ALL_CAL_STEPS); + + // We only run draminit training advance on DD2 modules + l_cal_step_value = l_cal_step_value.writeBit<mss::TRAINING_ADV>( !mss::chip_ec_nimbus_lt_2_0(iv_mcs) ); - FAPI_DBG("%s %s running HW VREF cal. cal_step value: 0x%0x VREF", mss::c_str(iv_mcs), - mss::chip_ec_feature_skip_hw_vref_cal(iv_mcs) ? "not" : "", l_cal_step_value); + FAPI_DBG("%s %s running HW VREF cal. cal_step value: 0x%08x VREF, running training advance %s", + mss::c_str(iv_mcs), + mss::chip_ec_feature_skip_hw_vref_cal(iv_mcs) ? "not" : "", + l_cal_step_value, + l_cal_step_value.getBit<mss::TRAINING_ADV>() ? "yes" : "no"); // Sets up the vector std::vector<uint32_t> l_cal_step(PORTS_PER_MCS, l_cal_step_value); |