summaryrefslogtreecommitdiffstats
path: root/src/build
diff options
context:
space:
mode:
Diffstat (limited to 'src/build')
-rw-r--r--src/build/models/n10_e9027_tp026_soa_sc_u285_01/base13
-rw-r--r--src/build/models/n10_e9030_tp029_soa_sc_u339_01/base19
-rw-r--r--src/build/models/n10_e9031_tp030_soa_sc_u008_01/base11
-rw-r--r--src/build/models/n10_e9031_tp030_soa_sc_u012_01/base11
-rw-r--r--src/build/models/n10_e9031_tp030_soa_sc_u022_01/base11
-rwxr-xr-xsrc/build/tools/CommitSbeImageToCMVC.py376
-rwxr-xr-xsrc/build/tools/Debug/sbe-debug.py136
-rw-r--r--src/build/tools/README66
-rwxr-xr-xsrc/build/tools/conv_rel_branch.pl339
-rwxr-xr-xsrc/build/tools/gitRelease.pm2102
-rw-r--r--src/build/tools/perl.modules/gitUtil.pm592
-rwxr-xr-xsrc/build/tools/sandbox-create60
-rwxr-xr-xsrc/build/tools/sbeCmvcConstants.py84
-rwxr-xr-xsrc/build/tools/sbeCmvcUtility.py570
-rwxr-xr-xsrc/build/tools/sbeGitTool.pl332
-rw-r--r--src/build/tools/sbePatchUtility.py179
-rwxr-xr-xsrc/build/tools/sbePrime.py296
17 files changed, 5197 insertions, 0 deletions
diff --git a/src/build/models/n10_e9027_tp026_soa_sc_u285_01/base b/src/build/models/n10_e9027_tp026_soa_sc_u285_01/base
new file mode 100644
index 00000000..a5314f90
--- /dev/null
+++ b/src/build/models/n10_e9027_tp026_soa_sc_u285_01/base
@@ -0,0 +1,13 @@
+#base commit id
+base:3e4dac85ccc9e2a89f80931fe75dd918ef45dbe9
+#cherry-picks
+# fapi delay
+cp:refs/changes/57/21157/4
+# awan workarounds
+cp:refs/changes/05/22605/4
+# startclock_chiplets
+cp:refs/changes/33/22733/1
+# new compiler
+cp:refs/changes/52/22552/9
+# assert Support
+cp:refs/changes/81/22781/2
diff --git a/src/build/models/n10_e9030_tp029_soa_sc_u339_01/base b/src/build/models/n10_e9030_tp029_soa_sc_u339_01/base
new file mode 100644
index 00000000..06c64ab4
--- /dev/null
+++ b/src/build/models/n10_e9030_tp029_soa_sc_u339_01/base
@@ -0,0 +1,19 @@
+#base commit id
+base:3e4dac85ccc9e2a89f80931fe75dd918ef45dbe9
+#cherry-picks
+# fapi delay
+cp:refs/changes/57/21157/4
+# awan workarounds
+cp:refs/changes/05/22605/4
+# startclock_chiplets
+cp:refs/changes/33/22733/1
+# new compiler
+cp:refs/changes/52/22552/9
+# assert Support
+cp:refs/changes/81/22781/2
+# PBA support
+cp:refs/changes/36/20836/9
+# Remove eabi
+cp:refs/changes/24/22924/1
+# Remove pcb_arb
+cp:refs/changes/55/22955/1
diff --git a/src/build/models/n10_e9031_tp030_soa_sc_u008_01/base b/src/build/models/n10_e9031_tp030_soa_sc_u008_01/base
new file mode 100644
index 00000000..34f0793e
--- /dev/null
+++ b/src/build/models/n10_e9031_tp030_soa_sc_u008_01/base
@@ -0,0 +1,11 @@
+#base commit id
+base:177ed87bff3c7abc300069fe75cc8b89ea7b1681
+#cherry-picks
+# fapi delay
+cp:refs/changes/57/21157/4
+# awan workarounds
+cp:refs/changes/05/22605/4
+# startclock_chiplets
+cp:refs/changes/33/22733/1
+# PBA support
+cp:refs/changes/36/20836/10
diff --git a/src/build/models/n10_e9031_tp030_soa_sc_u012_01/base b/src/build/models/n10_e9031_tp030_soa_sc_u012_01/base
new file mode 100644
index 00000000..34f0793e
--- /dev/null
+++ b/src/build/models/n10_e9031_tp030_soa_sc_u012_01/base
@@ -0,0 +1,11 @@
+#base commit id
+base:177ed87bff3c7abc300069fe75cc8b89ea7b1681
+#cherry-picks
+# fapi delay
+cp:refs/changes/57/21157/4
+# awan workarounds
+cp:refs/changes/05/22605/4
+# startclock_chiplets
+cp:refs/changes/33/22733/1
+# PBA support
+cp:refs/changes/36/20836/10
diff --git a/src/build/models/n10_e9031_tp030_soa_sc_u022_01/base b/src/build/models/n10_e9031_tp030_soa_sc_u022_01/base
new file mode 100644
index 00000000..34f0793e
--- /dev/null
+++ b/src/build/models/n10_e9031_tp030_soa_sc_u022_01/base
@@ -0,0 +1,11 @@
+#base commit id
+base:177ed87bff3c7abc300069fe75cc8b89ea7b1681
+#cherry-picks
+# fapi delay
+cp:refs/changes/57/21157/4
+# awan workarounds
+cp:refs/changes/05/22605/4
+# startclock_chiplets
+cp:refs/changes/33/22733/1
+# PBA support
+cp:refs/changes/36/20836/10
diff --git a/src/build/tools/CommitSbeImageToCMVC.py b/src/build/tools/CommitSbeImageToCMVC.py
new file mode 100755
index 00000000..b6f4eca8
--- /dev/null
+++ b/src/build/tools/CommitSbeImageToCMVC.py
@@ -0,0 +1,376 @@
+#!/usr/bin/python
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/CommitSbeImageToCMVC.py $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+'''
+###########################################################
+# @file CommitSbeImageToCMVC.py
+# @author: George Keishing <gkeishin@in.ibm.com>
+# Sangeetha TS <sangeet2@in.ibm.com>
+# @brief Main Module to support CMVC operation
+#
+# Created on March 03, 2016
+# ----------------------------------------------------
+# @version Developer Date Description
+# ----------------------------------------------------
+# 1.0 gkeishin 03/03/16 Initial create
+###########################################################
+'''
+
+#-------------------------
+# Imports
+#-------------------------
+import getopt
+import os, sys, glob
+import shutil
+
+# Libraries/utility funcs and user define const
+import sbeCmvcConstants as errorcode
+import sbeCmvcUtility as utilcode
+
+#-------------------------
+# Main Function
+#-------------------------
+def main():
+
+ #------------------------------------------
+ # Usage tool option
+ #------------------------------------------
+ def usage():
+ print " ---------------------------------------------------------------------------------------------------"
+ print " :: Command line USAGE options for Uploading FW SBE image to CMVC :: \n"
+ print " CommitSbeImageToCMVC.py -d <cmvc defect/feature id> -r <fips release> -p <SBE repo Path> -i <file1,file2.file3>"
+
+ print " \n"
+ print " +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ print " | By default user MUST pass CMVC/Release/Path input. |"
+ print " +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ print " \n ***** Options Supported *****"
+ print " \t -d,--defect = Defect CMVC number"
+ print " \t -f,--feature = Feature CMVC number"
+ print " \t -r,--release = FW fips release string EX: fips910"
+ print " \t -p,--path = Absolute path of the SBE repo"
+ print " \t -i,--input = [ Optional ] List of image or file to upload"
+ print " \t -b,--bvt = BVT xml file for CI"
+ print " \t -h,--help = Help"
+ print " ------------------------------------------------------------------------------------"
+
+ #------------------------------------------
+ # Exit from this Main
+ #------------------------------------------
+ def exit_main(rc):
+ if rc == errorcode.HELP_EXIT:
+ print " [ HELP DOCUMENTATION ]\n"
+ sys.exit(0)
+
+ if rc:
+ print "\n [ ERROR - MAIN ] Exiting with error code = ", rc
+ sys.exit(rc)
+ else:
+ print "\n SBE Image Upload to CMVC completed [ OK ] "
+ sys.exit(0)
+
+ #------------------------------------------
+ # Local var place name holder's
+ #------------------------------------------
+ defect_num = "None"
+ feature_num = "None"
+ release_name = "None"
+ path_name = "None"
+ file_name = "None"
+ bvt = "None"
+
+ #----------------------------
+ # Read command line args
+ #----------------------------
+ opts, args = getopt.getopt(sys.argv[1:],"d:f:r:p:i:b:h",['defect=', 'feature=', 'release=', 'path=', 'input=', 'bvt=', 'help'])
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage()
+ exit_main(errorcode.HELP_EXIT)
+ elif opt in ('-d', '--defect'):
+ defect_num = arg
+ elif opt in ('-f', '--feature'):
+ feature_num = arg
+ elif opt in ('-r', '--release'):
+ release_name = arg
+ elif opt in ('-p', '--path'):
+ path_name = arg
+ elif opt in ('-i', '--input'):
+ file_name = arg
+ elif opt in ('-b', '--bvt'):
+ bvt = arg
+ else:
+ usage()
+ exit_main(errorcode.ERROR_EXIT)
+
+ #----------------------------------
+ # Preping the data for Image Upload
+ #----------------------------------
+ print " \n"
+ print " ******************************************************"
+ print " ****** Stagging PPE image Files Upload to CMVC ******"
+ print " ******************************************************"
+
+ #------------------------------------------------------
+ # Make sure that it has passed atleast one arg with it
+ #------------------------------------------------------
+ if len(sys.argv)<2:
+ usage()
+ exit_main(errorcode.ERROR_SYS_EXIT)
+
+ #-------------------------------------------------------------
+ # Check user inputs and display
+ #-------------------------------------------------------------
+ def input_setting():
+ print " ---------------------------------------------------------------------------------"
+ print " [ Display User Inputs ]"
+ print " ---------------------------------------------------------------------------------"
+ # Look elsewhere for the name instead of assigning to it locally.
+ # This is now gloabal var
+ global g_cmvc_num
+ if ( defect_num != "None" ) or ( feature_num != "None" ):
+ if not defect_num == "None":
+ g_cmvc_num = 'D'+defect_num #D1234
+ print " [ CMVC Defect ] \t\t#Number\t = %s"%(g_cmvc_num)
+ else:
+ g_cmvc_num = 'F'+feature_num # F1234
+ print " [ CMVC Feature ] \t\t#Number\t = %s"%(g_cmvc_num)
+ else:
+ print " [ CMVC Defect/Feature ] Neeed CMVC number. This can't be empty "
+ exit_main(errorcode.ERROR_EXIT)
+
+ if not release_name == "None":
+ print " [ Fips Release Name ] \t#String\t = %s"%(release_name)
+ else:
+ print " [ Fips release ] Neeed fips release string. This can't be empty "
+ exit_main(errorcode.ERROR_EXIT)
+
+ if not path_name == "None":
+ print " [ Build Repo Path ] \t\t#String\t = %s"%(path_name)
+ else:
+ print " [ User Build Repo Path ] \t\t#String\t = %s"%(path_name)
+
+ # Optional, by default looks up predefined files
+ if not file_name == "None":
+ print " [ Files for Check-in - User List ]"
+ for files in file_name.split(","):
+ print " \t\t\t\t#",files
+ else:
+ print " [ Files for Check-in - Default List ]"
+ for files in errorcode.CMVC_FILE_LIST.split(","):
+ print " \t\t\t\t# ",files
+
+ print " ---------------------------------------------------------------------------------"
+
+ #-------------------------------------------------------------
+ # CMVC ENV check
+ #-------------------------------------------------------------
+ def UserCmvcENV():
+ # Assumed the CMVC cofig is there in the user bash ENV
+ # In .bashrc the CMVX ENV would look like this
+ # CMVC specific example
+ #-----------------------------------------------------------
+ #export CMVC_FAMILY=aix@auscmvc1.austin.ibm.com@2035
+ #export CMVC_BECOME=gkeishin
+ #export CMVC_AUTH_METHOD=PWD
+ #-----------------------------------------------------------
+
+ l_found_cmvc_conf = False
+ for key in os.environ.keys():
+ if "CMVC" in key:
+ print "\t %s : %s" % (key,os.environ[key])
+ l_found_cmvc_conf = True
+
+ if l_found_cmvc_conf == False:
+ print "\n [ ERROR SETTING ] : The CMVC specific ENV is not set"
+ print " Please add the following CMVC details in ~/.bashrc"
+ print " ------------------------------------------------------"
+ print " export CMVC_FAMILY=aix@<yourcmvcdomian>@<portnumber>"
+ print " export CMVC_BECOME=<your cmvc id>"
+ print " export CMVC_AUTH_METHOD=PWD"
+ print " ------------------------------------------------------"
+ return errorcode.ERROR_SETTING
+
+ return errorcode.SUCCESS_EXIT
+
+ # Testing CMVC login session.. probe
+ def CheckCmvcAccess():
+ cmd='File -view src/sbei/sbfw/img/sbe_seeprom.bin -family aix -release fips910 >/dev/null 2>&1'
+ rc = os.system(cmd)
+ if rc:
+ return errorcode.ERROR_CMVC_LOGIN
+
+ #---------------------------------------------
+ # Callling the Func defs in order
+ #---------------------------------------------
+
+ #------------------------------
+ # 1) User input params/ Check ENV
+ #------------------------------
+ input_setting()
+
+ print "\n [ Checking PPE ENV Pre-req ] "
+ # Check if User has passed the path, else get it from ENV
+ if path_name == "None":
+ # Get the PPE path
+ l_ppe_path = utilcode.utilppeSbENV("SBEROOT")
+ if l_ppe_path == "None":
+ print " PPE Repo ENV Setting Path : [ ERROR CODE: %s ] " % l_ppe_path
+ exit_main(errorcode.ERROR_SETTING)
+ else:
+ print " PPE Repo path Setting : [ %s ]"% l_ppe_path
+ path_name = l_ppe_path
+
+ print "\n [ Checking CMVC user ENV Pre-req ] "
+ rc_code = UserCmvcENV()
+ if rc_code == errorcode.SUCCESS_EXIT :
+ print " CMVC Setting : [ OK ] "
+ else:
+ print " CMVC Setting : [ ERORR CODE: %s ]"% rc_code
+
+ #------------------------------
+ # 2) Check the CMVC login access
+ #------------------------------
+ print "\n [ Checking CMVC user Login Session access ] "
+ rc_cmvc = CheckCmvcAccess()
+ if rc_cmvc == errorcode.ERROR_CMVC_LOGIN:
+ print " CMVC Login Access : [ ERORR CODE: %s ]"% rc_cmvc
+ print "\t No cmvc login was found in this session."
+ print "\t Issue the cmvclog command to establish a login and re-run."
+ print "\t Command : cmvclog -in <login user id>"
+ exit_main(rc_cmvc)
+ else:
+ print " CMVC Session Login : [ OK ] "
+
+ # Call API/Utility funcs def here
+ #------------------------------
+ # 3) Check track status
+ #------------------------------
+ print "\n [ Checking CMVC track state ] "
+ l_trackFix = utilcode.utilCheckTrackState(g_cmvc_num,release_name)
+ if l_trackFix == errorcode.SUCCESS_TRACK_STATE :
+ print " Track in fix state. Suitable to continue."
+ else :
+ print " Track not in fix state. Aborting activity."
+ return errorcode.ERROR_TRACK_STATE
+
+ #------------------------------
+ # 4) Checkout the code
+ #------------------------------
+ print "\n [ Creating Sandbox ]"
+ origDir = os.getcwd()
+ utilcode.utilCmvcChangeDir(g_cmvc_num)
+ sbDir = os.getcwd()
+
+ print "\n [ Checkout Files from CMVC ] "
+ print " ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ if file_name == "None":
+ file_name = errorcode.CMVC_FILE_LIST
+ for l_filename in file_name.split(","):
+ rc_checkout = utilcode.utilCmvcCheckout(l_filename,release_name,g_cmvc_num)
+ if rc_checkout == errorcode.ERROR_CMVC_CHECKOUT:
+ print " [ CMVC File Checkout Failed ] [Error code : %s]\t:%s"%(rc_checkout,l_filename)
+ # Undo checkout.. dont check errors just do it
+ utilcode.utilRollBack("checkout",g_cmvc_num,release_name)
+ # Return to initial directory of operation
+ os.chdir(origDir)
+ shutil.rmtree(sbDir)
+ exit_main(rc_checkout)
+ else:
+ print " CMVC File Checkout [ OK ]"
+ print " ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+
+ #------------------------------
+ # 4) Copy the binaries and file
+ #------------------------------
+ # Find the files from the repo and copy to the Checkout dir
+ print "\n [ Find files and Overide the checkout file ] "
+ print " ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ rc_copy = utilcode.utilCmvcRepoPath(path_name,g_cmvc_num,file_name)
+ if rc_copy == errorcode.ERROR_CMVC_FILE_COPY:
+ print " [ File copy Failed ] [ Error code : %s]"%rc_copy
+ # Return to initial directory of operation
+ os.chdir(origDir)
+ shutil.rmtree(sbDir)
+ exit_main(rc_copy)
+ else:
+ print " Files Copied Successfully : [ OK ] "
+
+ print " ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+
+ #---------------------
+ # 6) Checkin the files
+ #---------------------
+ print "\n [ Check-in Files from CMVC ] "
+ print " ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ if file_name == "None":
+ file_name = errorcode.CMVC_FILE_LIST
+ for files in file_name.split(","):
+ rc_checkin = utilcode.utilCmvcCheckin(files,release_name,g_cmvc_num)
+ if rc_checkin == errorcode.ERROR_CMVC_CHECKIN:
+ print " [ CMVC File Checkin Failed ] [Error code : %s]\t:%s"%(rc_checkin,files)
+ # Undo checkin.. dont check errors just do it
+ utilcode.utilRollBack("checkin",g_cmvc_num,release_name)
+ # Return to initial directory of operation
+ os.chdir(origDir)
+ shutil.rmtree(sbDir)
+ exit_main(rc_checkin)
+ else:
+ print " CMVC File Checkin [ OK ]"
+ print " ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+
+ #---------------------------------
+ # 7) Fix record the defect/release
+ #---------------------------------
+ print "\n [ Fix the Record to complete in CMVC ] "
+ utilcode.utilCmvcFixComplete(g_cmvc_num, release_name)
+
+ #---------------------------------
+ # 8) Trigger Jenkins CI Job
+ #---------------------------------
+ print "\n [ Trigering Jenkins job ] "
+ rc_ci = utilcode.utilTriggerJenkins(g_cmvc_num, release_name, bvt)
+ if rc_ci == errorcode.ERROR_CI_TRIGGER :
+ print " [ CI Trigger Failed ] [Error code : %s]\t"%(rc_ci)
+ # Return to initial directory of operation
+ os.chdir(origDir)
+ shutil.rmtree(sbDir)
+ exit_main(rc_ci)
+ else :
+ print " CI Trigger [ OK ]"
+
+ #-----------------------------------------
+ # Return to initial directory of operation
+ #-----------------------------------------
+ os.chdir(origDir)
+ shutil.rmtree(sbDir)
+
+ # Clean exit
+ print "\n [ Manually Integrate on CMVC post CI ] "
+ exit_main(errorcode.SUCCESS_EXIT)
+
+
+if __name__=="__main__":
+ main()
+
diff --git a/src/build/tools/Debug/sbe-debug.py b/src/build/tools/Debug/sbe-debug.py
new file mode 100755
index 00000000..a6c20c47
--- /dev/null
+++ b/src/build/tools/Debug/sbe-debug.py
@@ -0,0 +1,136 @@
+#!/usr/bin/python
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/Debug/sbe-debug.py $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+import os
+import subprocess
+import re
+import random
+import argparse
+import sys
+err = False
+
+syms = {};
+
+def fillSymTable(sbeObjDir):
+ symFile = sbeObjDir + "/sbe.syms"
+ f = open( symFile, 'r' )
+ for line in f:
+ words = line.split()
+ if( len( words ) == 4 ):
+ syms[words[3]] = [words[0], words[1]]
+
+def collectTrace( hwpBinDir, sbeObjDir, target, proc ):
+ cmd1 = ("."+hwpBinDir+"/p9_sbe_pibMemDump_wrap.exe " + \
+ syms['g_pk_trace_buf'][0] +\
+ " " + syms['g_pk_trace_buf'][1] + " " + target)
+ cmd2 = "." + "/ppe2fsp dumpPibMem sbetrace.bin "
+ cmd3 = ("." + "/fsp-trace -s " + sbeObjDir +\
+ "/trexStringFile sbetrace.bin > "+\
+ "sbe_"+str(proc)+"_tracMERG")
+ cmd4 = "mv dumpPibMem dumpPibMem_trace"
+ print "\ncollecting trace with commands -\n"
+ print "cmd1:", cmd1
+ rc = os.system( cmd1 )
+ if ( rc ):
+ print "ERROR running %s: %d " % ( cmd1, rc )
+ return 1
+
+ print "cmd2:", cmd2
+ rc = os.system( cmd2 )
+ if ( rc ):
+ print "ERROR running %s: %d " % ( cmd2, rc )
+ return 1
+
+ print "cmd3:", cmd3
+ rc = os.system( cmd3 )
+ if ( rc ):
+ print "ERROR running %s: %d " % ( cmd3, rc )
+ return 1
+
+ print "cmd4:", cmd4
+ rc = os.system( cmd4 )
+ if ( rc ):
+ print "ERROR running %s: %d " % ( cmd4, rc )
+ return 1
+
+def collectAttr( hwpBinDir, sbeObjDir, target, proc ):
+ cmd1 = ("."+hwpBinDir+"/p9_sbe_pibMemDump_wrap.exe " +\
+ syms['G_sbe_attrs'][0] + " " + \
+ syms['G_sbe_attrs'][1] + " " + target)
+ cmd2 = "mv dumpPibMem sbeAttr.bin"
+ cmd3 = ("."+ sbeObjDir + "/p9_xip_tool " +\
+ sbeObjDir + "/sbe_seeprom.bin -ifs attrdump sbeAttr.bin > "+\
+ "sbe_"+str(proc)+"_attrs")
+ print "\ncollecting attributes with commands -\n"
+ print "cmd1:", cmd1
+ rc = os.system( cmd1 )
+ if ( rc ):
+ print "ERROR running %s: %d " % ( cmd1, rc )
+ return 1
+
+ print "cmd2:", cmd2
+ rc = os.system( cmd2 )
+ if ( rc ):
+ print "ERROR running %s: %d " % ( cmd2, rc )
+ return 1
+
+ print "cmd3:", cmd3
+ rc = os.system( cmd3 )
+ if ( rc ):
+ print "ERROR running %s: %d " % ( cmd3, rc )
+ return 1
+
+def main( argv ):
+ parser = argparse.ArgumentParser( description = "SBE Dump Parser" )
+
+ parser.add_argument( '-hwpBinDir', type=str, default = os.getcwd(), \
+ help = 'Path of p9_sbe_pibMemDump_wrap.exe')
+ parser.add_argument( '-sbeObjDir', type=str, default = os.getcwd(), \
+ help = 'Path of sbe.syms file')
+ parser.add_argument( '-l', '--level', choices = ['all', 'trace', 'attr'],\
+ default='all', help = 'Parser level' )
+ parser.add_argument( '-t', '--target', choices = ['AWAN', 'HW'], \
+ required = 'true', help = 'Target type' )
+ parser.add_argument( '-p', '--proc', type=int , default = 0, \
+ help = 'Proc Number' )
+
+ args = parser.parse_args()
+
+ if ( args.target == 'AWAN' ):
+ target = "1"
+ elif ( args.target == 'HW' ):
+ target = "0"
+
+ fillSymTable(args.sbeObjDir)
+ if ( args.level == 'all' ):
+ print "Parsing everything"
+ collectTrace( args.hwpBinDir, args.sbeObjDir, target, args.proc )
+ collectAttr( args.hwpBinDir, args.sbeObjDir, target, args.proc )
+ elif ( args.level == 'trace' ):
+ collectTrace( args.hwpBinDir, args.sbeObjDir, target, args.proc )
+ elif ( args.level == 'attr' ):
+ collectAttr( args.hwpBinDir, args.sbeObjDir, target, args.proc )
+
+if __name__ == "__main__":
+ main( sys.argv )
diff --git a/src/build/tools/README b/src/build/tools/README
new file mode 100644
index 00000000..3f27a6b0
--- /dev/null
+++ b/src/build/tools/README
@@ -0,0 +1,66 @@
+--------------------
+CONTENT OF THIS FILE
+--------------------
+1. Tools:
+ * Automate SBE Image integration into FIPS driver
+ * Developer SBE fips tool for copy and compile
+
+
+-------------------------------------------------
+* Automate SBE Image integration into FIPS driver
+-------------------------------------------------
+ [ Files ] : CommitSbeImageToCMVC.py (Main)
+ sbeCmvcConstants.py
+ sbeCmvcUtility.py
+
+ [ Brief ] : This tool will check in the SBE FW fips files to CMVC, Fix Complete the track and trigger Jenkin Job.
+
+ [ Feature ] :
+ - By default, the tool will check in the files from a default list pre-defined for sbe
+ Refer: sbeCmvcConstants.py
+
+ - User can selectively choose which file/files it wants to upload to CMVC from the default list
+
+ - The tool roll backs all the checkout/checkin in case of CMVC Operation failure
+
+ - The tool will auto complete fix record on completion of successful code check in
+
+ - The internal of the work flow validates the Hash of the files on the SBE repo vs files to be
+ check in to be sure that the files are correctly uploaded
+
+ - The pre-req checks CMVC bash env and CMVC access session availability and provides verbose
+ help to follow on failures
+
+ - Jenkin job trigger at the end after fix completing record.
+
+
+------------------------------------------------------------------
+* Developer SBE fips tool for copy,compile & simics action patches
+------------------------------------------------------------------
+
+ [ Files ] : sbeDistribute.py (Main)
+ sbePatchUtility.py
+ sbeCmvcConstants.py
+ sbeCmvcUtility.py
+
+ [ Brief ] : This tool will copy the SBFW files from PPE repo to fips sandbox and compile.
+ Additionaly it would patch the simics action files at the end of compilation.
+
+ [ Feature ] :
+ - By default, the tool doesn't need any input as an argument
+
+ - Refers the Sandbox and Repo ENV and identifies which fips sandbox and PPE Repo needed.
+
+ - Find the list of pre-define file list in the PPE repo and copies to fips sandbox
+ Refer: sbeCmvcConstants.py
+
+ - Loads the ENV and compiles the fips sandbox code.
+
+ - User can create a fips sandbox manually and still use this tool by passing -s <sandbox name >
+ as an argument
+
+ - User can specify file as an input but -ONLY- those pre-define files selectively using option
+ -i <file1,file2>
+
+ - Sets up simics directory and patches the action files needed.
+
diff --git a/src/build/tools/conv_rel_branch.pl b/src/build/tools/conv_rel_branch.pl
new file mode 100755
index 00000000..05ef52ea
--- /dev/null
+++ b/src/build/tools/conv_rel_branch.pl
@@ -0,0 +1,339 @@
+#!/usr/bin/perl
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/conv_rel_branch.pl $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+
+use strict;
+use warnings;
+use Data::Dumper;
+use Getopt::Long qw(:config pass_through);
+
+# Globals
+my %power_platforms = ();
+my %relations = ();
+my $latest_power_platform = "";
+my $fsp_ci_jenkins_rel_file = "/gsa/ausgsa/home/f/s/fspcibld/patches/cipatch_xml";
+use constant MASTER => "master";
+my $debug = 0;
+my $help = 0;
+
+# Set local variables based on ENV variables
+my $PROJECT_ROOT = `git rev-parse --show-toplevel`;
+
+# Parse command line parameters
+GetOptions("debug!" => \$debug,
+ "help" => \$help);
+
+# List of commands tool can run
+my %commands = ( "rtob" => \&execute_rel_to_branch,
+ "btor" => \&execute_branch_to_rel,
+ );
+
+if ($help)
+{
+ execute_help();
+}
+else
+{
+ my $command = shift @ARGV;
+ if ($commands{$command})
+ {
+ system("git remote -v | grep gerrit -q");
+ die "Gerrit remote DNE, must run in repo with gerrit remote" if $?;
+ # Build release to branch relation hash.
+ build_relations();
+ &{$commands{$command}}();
+ }
+ else
+ {
+ execute_help();
+ }
+}
+
+############################## Begin Actions ###################################
+
+sub execute_rel_to_branch
+{
+ my $release = "";
+
+ GetOptions("release:s" => \$release);
+ die "Missing release" if $release eq "";
+
+ # Get power platform associated with release
+ my $power_platform = get_power_platform($release);
+ # Find release in relations hash.
+ my $branch = $relations{$power_platform}{$release};
+ die "Fips release => $release has no corresponding gerrit branch" if (!$branch);
+ print "$branch \n";
+}
+
+sub execute_branch_to_rel
+{
+ my $branch = "";
+
+ GetOptions("branch:s" => \$branch);
+ die "Missing branch" if $branch eq "";
+
+ # Get power platform associated with branch
+ my $power_platform = get_power_platform($branch);
+
+ # Find branch in relations hash.
+ my $release = "";
+ if( $power_platform )
+ {
+ foreach my $rel (sort keys %{$relations{$power_platform}})
+ {
+ if ($relations{$power_platform}{$rel} eq "$branch")
+ {
+ $release = $rel;
+ last;
+ }
+ }
+ }
+ die "Gerrit branch => $branch has no corresponding fips release" if ($release eq "");
+ print "$release \n";
+}
+
+sub execute_help
+{
+ my $command = shift @ARGV;
+
+ if ($command eq "")
+ {
+ print " Usage:\n";
+ print " conv_rel_branch <subtool> [options]\n\n";
+ print " Tool to convert fips release to branches and vice versa\n\n";
+ print " Requires:\n";
+ print " Tool to run in git repo that has gerrit remote\n\n";
+ print " Available subtools:\n";
+ foreach my $key (sort keys %commands)
+ {
+ print " $key\n";
+ }
+ print "\n";
+ print " Global options:\n";
+ print " --debug Enable debug mode.\n";
+ print " --help Display help on a specific tool.\n";
+ }
+ elsif (not defined $commands{$command})
+ {
+ die "Unknown subcommand: $command.\n";
+ }
+ else
+ {
+ my %help = (
+ "rtob" =>
+q(
+ Convert release to branch
+
+ Options:
+ --release=<release> Fips release name (e.g. fips810) [required].
+),
+ "btor" =>
+q(
+ Convert branch to fips release
+
+ Options:
+ --branch=<remote-gerrit-branch> Remote gerrit branch (e.g. release-fips910, master) [required].
+),
+ );
+
+ print "rel_branch $command:";
+ print $help{$command};
+ }
+}
+
+############################## Begin Sub Routines ##############################
+
+# sub get_release_branches
+#
+# Get all branches in gerrit that are prefixed 'release-' and remove the prefix
+# *Note branches with string 'master' are removed from this to result in direct
+# matches of fips releases only. Master branches will be dervied later.
+#
+# @return array - sorted names of branches (e.g release-fips810 ...)
+#
+sub get_release_branches
+{
+ chdir($PROJECT_ROOT);
+ die $? if ($?);
+
+ # Parse for remote gerrit branches associated directly with a release
+ my $cmd = "git branch -a | grep -e remotes/gerrit/release";
+ $cmd .= " | sed -e 's/^[ \\t]*remotes\\/gerrit\\///'";
+
+ my @release_branches = sort (split('\n',`$cmd`));
+ print "Release Branches:\n" if $debug;
+ print Dumper \@release_branches if $debug;
+ return @release_branches;
+}
+
+# sub get_fips_releases
+#
+# Get all fips releases that fsp-ci-jenkins uses in sorted order.
+#
+# @return array - sorted names of releases (e.g fips910, fips920, etc)
+#
+sub get_fips_releases
+{
+ chdir($PROJECT_ROOT);
+ die $? if ($?);
+
+ # Parse fsp-ci-jenkins xml file for releases
+ my $cmd = "cat $fsp_ci_jenkins_rel_file | grep release | ";
+ $cmd .= "sed -e 's/^[ \\t]*<release>//' -e 's/<\\/release>[ \\t]*//'";
+
+ my @fips_releases = sort (split('\n',`$cmd`));
+ print "Fips Release:\n" if $debug;
+ print Dumper \@fips_releases if $debug;
+ return @fips_releases;
+}
+
+# sub get_power_platform
+#
+# Takes a references (release or gerrit branch) and determines the power
+# platform it belongs to.
+#
+# @return string - power platform (e.g. p8, p9)
+#
+sub get_power_platform
+{
+ my $reference = shift;
+
+ my $power_platform = "";
+ if ($reference =~ m/master/)
+ {
+ # Remove prefix from older platforms (e.g. master-p8). If nothing is
+ # replaced then it is the latest power platform.
+ $reference =~ s/master-//;
+ if ($reference eq MASTER)
+ {
+ $power_platform = $latest_power_platform;
+ }
+ else
+ {
+ $power_platform = $reference;
+ }
+ }
+ else
+ {
+ ($power_platform) = $reference =~ /fips(.*)[0-9][0-9]/;
+ if ( $power_platform )
+ {
+ $power_platform = "p$power_platform";
+ $power_platforms{$power_platform} = 1;
+ }
+ }
+ return $power_platform;
+}
+
+
+# sub branchExists
+
+sub branchExists
+{
+ my $branch = shift;
+ chomp($branch);
+ my $brChk = `git branch -a | grep $branch`;
+ if ($brChk eq "")
+ {
+ return 0;
+ }
+ else
+ {
+ return 1;
+ }
+}
+
+# sub build_relations
+#
+# Build a relationship hash between fips releases and gerrit branches using
+# fsp-ci-jenkins xml and git branch command within PPE.
+#
+# Structure:
+# power_platform =>
+# fips-release => gerrit-branch
+# Example:
+# p9 =>
+# fips910 => master
+#
+sub build_relations
+{
+ my @releases = get_fips_releases();
+ my @branches = get_release_branches();
+
+ # Fill in fips release keys
+ foreach my $release (@releases)
+ {
+ my $power_platform = get_power_platform($release);
+ $relations{$power_platform}{$release} = "";
+ }
+
+ # Fill in fips release values, which are gerrit release branches.
+ foreach my $branch (@branches)
+ {
+ my $power_platform = get_power_platform($branch);
+ my $release = $branch;
+ $release =~ s/release-//;
+ $relations{$power_platform}{$release} = $branch;
+ }
+
+ # Handle master branches for each platform
+ my @platforms = sort keys %power_platforms;
+ foreach my $i (0 .. $#platforms)
+ {
+ my $power_platform = $platforms[$i];
+
+ # Lastest power platform matches branch master
+ my $master = MASTER;
+ # Previous power platforms match branch "master-$platform"
+ if ($i < $#platforms)
+ {
+ $master = MASTER."-$power_platform";
+ }
+ else
+ {
+ # Set latest power platform
+ $latest_power_platform = $power_platform;
+ }
+
+ # Check for first fips release without a gerrit branch. Due to sort
+ # order, it will be the next in order release. It is done this way
+ # to avoid issues when fips releases are ahead of gerrit branches. In
+ # other words it is possible to have fips releases past gerrit master.
+ foreach my $release (sort keys %{$relations{$power_platform}})
+ {
+ if ($relations{$power_platform}{$release} eq "")
+ {
+ if (branchExists($master))
+ {
+ $relations{$power_platform}{$release} = $master;
+ }
+ last;
+ }
+ }
+ }
+
+ print "Relations:\n" if $debug;
+ print Dumper \%relations if $debug;
+}
diff --git a/src/build/tools/gitRelease.pm b/src/build/tools/gitRelease.pm
new file mode 100755
index 00000000..e6a52e87
--- /dev/null
+++ b/src/build/tools/gitRelease.pm
@@ -0,0 +1,2102 @@
+#!/usr/bin/perl
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/gitRelease.pm $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+use strict;
+
+use Getopt::Long qw(:config pass_through);
+use POSIX;
+use Text::Wrap;
+use List::Util 'max';
+use Term::ReadKey;
+use File::Temp qw/tempfile/;
+
+use Data::Dumper;
+
+my $debug = 0;
+my $help = 0;
+
+# default globals
+my %globals = ( branch => "master");
+# Token used by git-CI-tool @execute_discover, update there too
+my $TOKEN = "!@#%^";
+
+GetOptions("debug!" => \$debug,
+ "help" => \$help,
+ "branch:s" => \$globals{"branch"});
+
+my %commands = ( "define" => \&execute_define,
+ "undef" => \&execute_undef,
+ "list-levels" => \&config_print_levels,
+ "query-gerrit" => \&execute_gerrit_query,
+ "query-git" => \&execute_git_query,
+ "query-level" => \&execute_level_query,
+ "add-patch" => \&execute_add_patch,
+ "add-forcedep" => \&execute_add_forcedep,
+ "verify-patches" => \&execute_verify_patches,
+ "release" => \&execute_release,
+ "build-name" => \&execute_build_name,
+ "gerrit-commit" => \&execute_gerrit_commit,
+ "help" => \&execute_help,
+ );
+
+if ($help)
+{
+ execute_help();
+}
+
+my $command = shift @ARGV;
+if ($commands{$command})
+{
+ &{$commands{$command}}();
+}
+else
+{
+ execute_help();
+}
+
+foreach my $arg (@ARGV)
+{
+ print "Unprocessed arg: $arg\n" if $debug;
+}
+
+############################## Begin Actions ##################################
+
+sub execute_define
+{
+ print "Defining new level...\n";
+
+ my %level = ();
+
+ GetOptions("level:s" => \$level{name},
+ "name:s" => \$level{name},
+ "baseline:s" => \$level{base},
+ "released:s" => \$level{released});
+
+ die "Missing level name" if ($level{name} eq "");
+ die "Missing baseline name" if ($level{base} eq "");
+ die "Missing released level name" if ($level{released} eq "");
+
+ print "New level: ".$level{name}.":".$level{base}.":".$level{released}."\n"
+ if $debug;
+
+ $level{base} = git_resolve_ref($level{base});
+ $level{released} = git_resolve_ref($level{released});
+
+ config_add_level(\%level);
+}
+
+sub execute_undef
+{
+ my $level = shift @ARGV;
+
+ die "Level to undefine not given" if ($level eq "");
+
+ my $levels = config_list_levels();
+ die "Level $level does not exist" if (not defined $levels->{$level});
+
+ print "Undefining level $level...\n";
+ config_del_level($level);
+}
+
+sub execute_gerrit_query
+{
+ my $project = "";
+
+ GetOptions("project:s" => \$project);
+
+ if ("" eq $project)
+ {
+ $project = config_project();
+ }
+
+ my $items = gerrit_query("status:open project:$project ".
+ "branch:".$globals{"branch"});
+
+ foreach my $item (@$items)
+ {
+ if (defined $item->{"project"})
+ {
+ print wrap("","",$item->{"subject"}) . "\n";
+ print "\t" . $item->{"id"} . "\n";
+ print "\n";
+ }
+ }
+}
+
+sub execute_git_query
+{
+ my $level = "";
+
+ GetOptions("name:s" => \$level,
+ "level:s" => \$level);
+
+ die "Missing level name" if ($level eq "");
+
+ $globals{"branch"} = git_resolve_ref($globals{"branch"});
+
+ my $level = config_get_level($level);
+
+ my $commits = git_commit_history($globals{"branch"}, $level->{base});
+
+ foreach my $commit (@{$commits})
+ {
+ my $subject = git_get_subject($commit);
+ print "$subject\n\t$commit\n\n";
+ }
+}
+
+sub execute_level_query
+{
+ my $level = "";
+
+ GetOptions("name:s" => \$level,
+ "level:s" => \$level);
+
+ die "Missing level name" if ($level eq "");
+
+ my $level_info = config_get_level($level);
+
+ print "Level $level\n";
+ print " Base: \n";
+ print " ".git_name_rev($level_info->{base})."\n";
+ print " Released:\n";
+ print " ".git_name_rev($level_info->{released})."\n";
+ if ($globals{"branch"} ne "master")
+ {
+ print " Branch:\n";
+ print " ".$globals{"branch"}."\n";
+ }
+ print " Patches:\n";
+ foreach my $patch (sort @{$level_info->{patches}})
+ {
+ print " $patch\n";
+ }
+ print " Forced Deps:\n";
+ foreach my $dep (sort keys %{$level_info->{forceDeps}})
+ {
+ my $deps = $level_info->{forceDeps};
+ print " $dep =>\n";
+ print " ".$deps->{$dep}."\n";
+ }
+}
+
+
+sub execute_add_patch
+{
+ my $level = "";
+ my $patch = "";
+
+ GetOptions("name:s" => \$level,
+ "level:s" => \$level,
+ "patch:s" => \$patch);
+
+ die "Missing level name" if ($level eq "");
+ die "Missing patch name" if ($patch eq "");
+
+ config_add_patch($level, $patch);
+}
+
+sub execute_add_forcedep
+{
+ my $level = "";
+ my $from = "";
+ my $to = "";
+
+ GetOptions("name:s" => \$level,
+ "level:s" => \$level,
+ "from:s" => \$from,
+ "to:s" => \$to);
+
+ die "Missing level name" if ($level eq "");
+ die "Missing from depend" if ($from eq "");
+ die "Missing to depend" if ($to eq "");
+
+ config_add_dep($level, $from, $to);
+}
+
+sub execute_verify_patches
+{
+ my $level = "";
+
+ GetOptions("name:s" => \$level,
+ "level:s" => \$level);
+
+ die "Missing level name" if ($level eq "");
+
+ my $level_info = config_get_level($level);
+ my $patches = $level_info->{patches};
+
+ $patches = gerrit_resolve_patches($patches);
+
+ config_verify_patches($level_info->{base}, $patches);
+}
+
+sub execute_release
+{
+ my $level = "";
+
+ GetOptions("name:s" => \$level,
+ "level:s" => \$level);
+
+ die "Missing level name" if ($level eq "");
+
+ my $level_info = config_get_level($level);
+
+ config_release($level_info,1);
+}
+
+sub execute_build_name
+{
+ # default release - 910, build letter - a, build prefix - sbe
+ my $release = "910";
+ my $build_letter = "a";
+ my $build_prefix = "sbe";
+
+ GetOptions("release:s" => \$release,
+ "letter:s" => \$build_letter,
+ "prefix:s" => \$build_prefix);
+
+ system ("date +".$build_prefix."%m%d".$build_letter."_%g%V.$release");
+}
+
+sub execute_gerrit_commit
+{
+ my $patches = "";
+
+ GetOptions("patches:s" => \$patches);
+
+ die "Missing patches" if ($patches eq "");
+
+ # Parse out csv list of patches
+ my @patches = split(/,+/, $patches);
+
+ my $commits = gerrit_resolve_patchset(\@patches);
+ foreach my $commit (@$commits)
+ {
+ print $commit;
+ print "," if( \$commit != \$commits->[-1] )
+ }
+ print "\n";
+}
+
+sub execute_help
+{
+ my $command = shift @ARGV;
+
+ if ($command eq "")
+ {
+ print "gitRelease:\n";
+ print " Prepare the git codebase for release.\n";
+ print "\n";
+ print " Syntax:\n";
+ print " gitRelease [options] <tool>\n";
+ print "\n";
+ print " Available subtools:\n";
+ foreach my $key (sort keys %commands)
+ {
+ print " $key\n";
+ }
+ print "\n";
+ print " Global options:\n";
+ print " --debug Enable debug mode.\n";
+ print " --help Display help on a specific tool.\n";
+ print " --branch Branch to use for release.\n";
+ print "\n";
+ print " Note: Generally a <commit> can be any git or gerrit\n";
+ print " reference. A git commit number, tag, branch, or\n";
+ print " a gerrit change-id are all valid.\n";
+ }
+ elsif (not defined $commands{$command})
+ {
+ die "Unknown subcommand: $command.\n";
+ }
+ else
+ {
+ my %help = (
+ "define" =>
+q(
+ Define a new level for release.
+
+ Options:
+ --level=<name> Name for the new level [required].
+ --base=<commit> Baseline commit [required].
+ --released=<commit> Commit of previous release [required].
+),
+ "undef" =>
+q(
+ Delete a previously defined release level.
+
+ Options:
+ --level=<name> Name for the level to delete [required].
+),
+ "list-levels" =>
+q(
+ Displays a list of currently defined levels.
+),
+
+ "query-gerrit" =>
+q(
+ Displays a list of open change-sets from the Gerrit server.
+),
+ "query-git" =>
+q(
+ Displays a list of merged commits which are NOT currently destined
+ for a release level.
+
+ Options:
+ --level=<name> Name for the level to query [required].
+ --branch=<commit> Branch to query against [default=master].
+),
+ "query-level" =>
+q(
+ Displays information about a defined release level.
+
+ Options:
+ --level=<name> Name for the level to query [required].
+),
+ "add-patch" =>
+q(
+ Adds a commit to the patch-list for a release.
+
+ Options:
+ --level=<name> Release to add patch to [required].
+ --patch=<commit> Commit to add to patch-list [required].
+),
+ "add-forcedep" =>
+q(
+ Add a commit-pair as a forced dependency for a release.
+
+ Options:
+ --level=<name> Release to add dependency to [required].
+ --from=<commit> Decendent commit in the dependency [required].
+ --to=<commit> Ancestor commit in the dependency [required].
+),
+ "verify-patches" =>
+q(
+ Verify patch-list to ensure all dependencies are met.
+
+ This tool will give a list of dependency candidates if an ancestor
+ commit is found modifying the same files as a commit in the
+ patch-list.
+
+ Options:
+ --level=<name> The level to verify [required].
+),
+ "release" =>
+q(
+ Create a branch / tag based on the definition of a release.
+
+ Options:
+ --level=<name> The level to release [required].
+),
+ "build-name" =>
+q(
+ Display a properly formatted build name based on the date.
+
+ Ex: sbe0402a_1412.910
+
+ Options:
+ --release=<id> Release name [default=910].
+ --letter=[a-z] Build letter [default=a].
+ --prefix=[a-z] Build prefix [default=sbe]
+),
+ "gerrit-commit" =>
+q(
+ Get commit number of gerrit change-id, patch-set pairs
+
+ Options:
+ --patches=<change-id:patchset> CSV of change-id:patchset [required].
+),
+ );
+
+ my $release = "";
+ my $level = "";
+ my $checkInDir = "";
+
+ print "gitRelease $command:";
+ print $help{$command};
+
+ }
+}
+
+
+######################### Begin Utility Subroutines ###########################
+
+
+
+# sub create_release_notes
+#
+# Generates an HTML file (releaseNotes.html) with the release notes for a
+# release.
+#
+# @param [in] level - The level name to release.
+# @param [in] level_info - The level_info hash (see config_get_level).
+#
+sub create_release_notes
+{
+ my $level = shift;
+ my $level_info = shift;
+
+ my $commits = git_commit_history("HEAD", $level_info->{released});
+
+ open RELNOTE, "> ".git_root()."/releaseNotes.html";
+ print RELNOTE "<html>\n";
+ print RELNOTE " <head><title>Release notes for $level</title></head>\n";
+ print RELNOTE <<STYLESHEET;
+ <style type="text/css">
+ table.release {
+ border-width: 1px;
+ border-spacing: 2px;
+ border-style: outset;
+ border-color: gray;
+ border-collapse: separate;
+ background-color: white;
+ }
+ table.release th {
+ border-width: 1px;
+ padding: 1px;
+ border-style: inset;
+ border-color: gray;
+ background-color: white;
+ }
+ table.release td {
+ border-width: 1px;
+ padding: 1px;
+ border-style: inset;
+ border-color: gray;
+ background-color: white;
+ }
+ </style>
+STYLESHEET
+ print RELNOTE " <body>\n";
+
+ print RELNOTE "<h1>Level: $level</h1>\n";
+ print RELNOTE "<h2>Included commits:</h2>\n";
+ print RELNOTE "<table class='release'>\n";
+ print RELNOTE " <tr>\n";
+ print RELNOTE " <th>RTC/CQ Number(s)</th>\n";
+ print RELNOTE " <th>Subject</th>\n";
+ print RELNOTE " <th>Git Commit</th>\n";
+ print RELNOTE " </tr>\n";
+
+
+ foreach my $commit (@{$commits})
+ {
+ my $subject = git_get_subject($commit);
+ my $rtc = rtc_workitem_num($commit);
+ my $rtc_hyper = "";
+ my $cq = cq_workitem_num($commit);
+ my $cq_hyper = "";
+
+ if ($rtc ne "")
+ {
+ $rtc_hyper = rtc_hyperlink($rtc);
+ $rtc_hyper = "<a href='$rtc_hyper' target='_blank'>$rtc</a>";
+ }
+ if ($cq ne "")
+ {
+ $cq_hyper = cq_hyperlink($cq);
+ $cq_hyper = "<a href='$cq_hyper' target='_blank'>$cq</a>";
+
+ if ($rtc_hyper ne "")
+ {
+ $cq_hyper = "<br>$cq_hyper";
+ }
+ }
+
+ print RELNOTE " <tr>\n";
+ print RELNOTE " <td>$rtc_hyper $cq_hyper</td>\n";
+ print RELNOTE " <td>$subject</td>\n";
+ print RELNOTE " <td>$commit</td>\n";
+ print RELNOTE " </tr>\n";
+ }
+ print RELNOTE "</table>\n";
+
+ print RELNOTE " </body>\n";
+ print RELNOTE "</html>\n";
+
+ close RELNOTE;
+
+ system "git add ".git_root()."/releaseNotes.html";
+ system "git commit -m \"Release notes for $level\"";
+
+}
+
+# sub git_resolve_ref
+#
+# Transforms a symbolic git reference into a commit number.
+#
+# @param [in] ref - The reference to resolve.
+#
+# @return string - Resolved git commit number.
+#
+sub git_resolve_ref
+{
+ my $ref = shift;
+ my $resolve = "";
+
+ if (gerrit_is_patch($ref))
+ {
+ my $gerrit = gerrit_resolve_patches([$ref]);
+ $resolve = @{$gerrit}[0];
+ }
+ else
+ {
+ open COMMAND, "git log -n1 --pretty=\"%H\" $ref |";
+ $resolve = <COMMAND>;
+ close COMMAND;
+ chomp $resolve;
+ }
+
+ die "Unable to resolve ref $ref" if ($resolve eq "");
+ print "Resolved $ref as $resolve\n" if $debug;
+
+ return $resolve;
+}
+
+# sub git_root
+#
+# Determines the path of the root of the git repository.
+#
+# @return string - Root of the git repository.
+sub git_root
+{
+ return $globals{git_root} if (defined $globals{git_root});
+
+ open COMMAND, "git rev-parse --show-toplevel |";
+ my $root = <COMMAND>;
+ close COMMAND;
+ chomp $root;
+
+ die "Unable to determine git_root" if ($root eq "");
+ print "Found git_root at $root\n" if $debug;
+
+ $globals{git_root} = $root;
+ return $root;
+}
+
+# sub git_commit_history
+#
+# Determines all the commits between two points in git history.
+#
+# @param[in] start - Beginning commit.
+# @param[in, optional] not_including - Starting point to exclude.
+#
+# @return array - Commit history.
+#
+sub git_commit_history
+{
+ my $start = shift;
+ my $not_including = shift;
+
+ my @commits = ();
+
+ unless ($not_including eq "") { $not_including = "^".$not_including; }
+
+ open COMMAND, "git rev-list --cherry-pick $start $not_including |";
+ while (my $line = <COMMAND>)
+ {
+ chomp $line;
+ push @commits, $line;
+ }
+ close COMMAND;
+
+ return \@commits;
+}
+
+# sub git_log_changeId
+#
+# Determines if a changeId exists in the base
+#
+# @param[in] base
+# @param[in] changeId
+#
+# @return bool - True if in commit history, False otherwise.
+#
+sub git_log_changeId
+{
+ my $base = shift;
+ my $changeId = shift;
+ my $exists = 0;
+ open COMMAND, "git log $base | grep \'Change-Id: $changeId\' |";
+ if(<COMMAND> ne "")
+ {
+ $exists = 1;
+ }
+ close COMMAND;
+
+ return $exists;
+}
+
+# sub git_name_rev
+#
+# Transforms a git commit number to a symbolic name for human readability.
+#
+# @param[in] rev - Git revision (commit number) to name.
+# @return string - The symbolic name git uses for that commit number.
+#
+sub git_name_rev
+{
+ my $rev = shift;
+
+ open COMMAND, "git name-rev $rev |";
+ my $line = <COMMAND>; chomp $line;
+ close COMMAND;
+
+ return $line;
+}
+
+# sub git_commit_deps
+#
+# Determines a list of dependent commits based on common files touched.
+#
+# @param[in] base - The end point, in git history, of commits to compare.
+# @param[in] commit - The commit to find dependents of.
+#
+# @return array - List of dependent commits.
+#
+sub git_commit_deps
+{
+ my $base = shift;
+ my $commit = shift;
+ chomp($base);
+ chomp($commit);
+
+ my @deps = ();
+
+ print "Searching for deps for $commit against $base\n" if $debug;
+
+ my @files = split('\n',`git diff-tree --name-only --no-commit-id -r $commit`);
+ foreach my $file (@files)
+ {
+ # If a commit introduces a new file, don't run rev-list as it fails
+ # when the file does not exists in base.
+ my $file_in_base = `git log $base -n1 --oneline -- $file`;
+ next if ($file_in_base eq "");
+
+ my $dep_commit = `git rev-list $commit~1 ^$base $file`;
+ if ($dep_commit ne "")
+ {
+ print "Found dep: $dep_commit" if $debug;
+
+ chomp $dep_commit;
+ push @deps, $dep_commit;
+ }
+ }
+
+ return \@deps;
+}
+
+# sub git_commit_files
+#
+# Find the files touched by a commit.
+#
+# @param[in] commit - The commit to examine.
+# @return array - List of files touched by the commit.
+#
+sub git_commit_files
+{
+ my $commit = shift;
+
+ my @files = ();
+ open COMMAND, "git diff-tree --name-only --no-commit-id -r $commit |";
+ while (my $line = <COMMAND>)
+ {
+ chomp $line;
+ push @files, $line;
+ }
+ close COMMAND;
+
+ return \@files;
+}
+
+# sub git_get_subject
+#
+# Get the subject of the commit message associated with a commit.
+# See git log --oneline.
+#
+# @param[in] commit - The commit to examine.
+# @return string - The subject of the commit.
+#
+sub git_get_subject
+{
+ my $commit = shift;
+
+ open COMMAND, "git log -n1 --pretty=\"%s\" $commit |";
+ my $subject = <COMMAND>; chomp($subject);
+ close COMMAND;
+
+ return $subject;
+}
+
+# sub git_commit_msg
+#
+# Get the entire commit message associated with a commit.
+#
+# @param[in] commit - The commit to examine.
+# @return string - The commit message.
+#
+sub git_commit_msg
+{
+ my $commit = shift;
+
+ open COMMAND, "git log -n1 --pretty=%B $commit |";
+ my $message = "";
+ while (my $line = <COMMAND>)
+ {
+ $message = $message.$line;
+ }
+ close COMMAND;
+
+ return $message;
+}
+
+# sub git_create_branch
+#
+# Create a branch for a release-level.
+#
+# @param[in] level - The release-level to use as basis for the branch name.
+# @param[in] base - The commit to use as the base for the new branch.
+#
+sub git_create_branch
+{
+ my $level = shift;
+ my $base = shift;
+
+ system("git checkout -b __sbeRelease_$level $base");
+ die "Could not create branch for $level" if ($?);
+}
+
+# sub git_create_tag
+#
+# Create a tag for a release-level.
+#
+# @param[in] level - The release-level to create a tag for.
+# @param[in] level_info - The level-info associated with the level.
+#
+sub git_create_tag
+{
+ my $level = shift;
+ my $level_info = shift;
+
+ # Create an annotated tag, taking annotation from stdin.
+ open COMMAND, "| git tag -a $level -F -" || die;
+
+ # Add information about the level to the tag.
+ print COMMAND "Release: $level\n\n";
+ print COMMAND "Base: ".$level_info->{base}."\n";
+ print COMMAND "Previous-Release: ".$level_info->{released}."\n";
+ print COMMAND "Branch: ".$globals{"branch"}."\n";
+ print COMMAND "\n";
+ foreach my $patch (@{$level_info->{patches}})
+ {
+ print COMMAND "Patch: $patch\n";
+ }
+ my $forceDeps = $level_info->{forceDeps};
+ foreach my $from (keys %{$forceDeps})
+ {
+ print COMMAND "Forced-Dep: $from:".$forceDeps->{$from}."\n";
+ }
+
+ # Commit annotated tag.
+ close COMMAND;
+}
+
+# sub git_cherry_pick
+#
+# Cherry-pick a commit onto the current branch.
+#
+# @param[in] commit - The commit to cherry-pick.
+#
+# @retval false - Error occurred during cherry-pick.
+sub git_cherry_pick
+{
+ my $commit = shift;
+
+ system("git cherry-pick -x $commit");
+ return ($? == 0);
+}
+
+# sub git_order_commits
+#
+# Order a list of commits so that they are in a good order with regard to
+# dependencies. The order returned should be the most likely to not fail
+# a cherry-pick sequence.
+#
+# @param[in] patches - The list of commits to order.
+# @param[in] level_info - The level_info for the release-level being created.
+#
+# @return array - Re-ordered list of commits (from patches).
+#
+sub git_order_commits
+{
+ my $patches = shift;
+ my $level_info = shift;
+ my $forceDeps = $level_info->{forceDeps};
+ my %patch_dep = ();
+
+ # Create patch -> { distance -> 0, deps -> [] } hash.
+ my %patch_hash =
+ map { $_ => \{ distance => 0, deps => [] }} @{$patches};
+
+ # Determine dependencies and distance for each patch.
+ foreach my $patch (@{$patches})
+ {
+ # Add dependencies for each patch to the hash.
+ my $deps = git_commit_deps($level_info->{base}, $patch);
+ push @{${$patch_hash{$patch}}->{deps}}, @{$deps};
+
+ # Add dependencies to hash for circular depends check later
+ foreach my $dep (@{$deps})
+ {
+ $patch_dep{$patch}{$dep} = 1;
+ }
+
+ # Determine the distance from previous release for each patch.
+ ${$patch_hash{$patch}}->{distance} =
+ scalar @{git_commit_history($patch, $level_info->{released})};
+ }
+
+ # Determine forced dependencies for each patch.
+ foreach my $patch (keys %{$forceDeps})
+ {
+ my $resolve_from = @{gerrit_resolve_patches([$patch])}[0];
+ my $resolve_to =
+ @{gerrit_resolve_patches([$forceDeps->{$patch}])}[0];
+
+ print "Force dep: $resolve_from : $resolve_to\n" if ($debug);
+
+ push @{${$patch_hash{$resolve_from}}->{deps}}, $resolve_to;
+ # Add dependencies to hash for circular depends check later
+ $patch_dep{$resolve_from}{$resolve_to} = 1;
+ }
+
+ # Calculate Dijkstra's on the patches.
+ my $changed = 1;
+ while ($changed != 0)
+ {
+ $changed = 0;
+ foreach my $patch (@{$patches})
+ {
+ my $distance = 1 + max( map
+ {
+ # If patches have a circular dependency, ignore distance check.
+ next if ($patch_dep{$_}{$patch} && $patch_dep{$patch}{$_});
+ ${$patch_hash{$_}}->{distance}
+ }
+ @{${$patch_hash{$patch}}->{deps}});
+ if ($distance > ${$patch_hash{$patch}}->{distance})
+ {
+ $changed = 1;
+ ${$patch_hash{$patch}}->{distance} = $distance;
+ }
+ }
+ }
+
+ # Sort the patches based on shortest distance from previous release
+ # (after Dijkstra).
+ my @commit_order =
+ sort { ${$patch_hash{$a}}->{distance} <=>
+ ${$patch_hash{$b}}->{distance} }
+ @{$patches};
+
+ return \@commit_order;
+}
+
+# sub config_filename
+#
+# @return The location of the gitRelease config file.
+#
+sub config_filename
+{
+ return git_root()."/.git/gitRelease.config";
+}
+
+# sub config_init
+#
+# Ensures the gitRelease tool is initialized properly.
+#
+sub config_init
+{
+ return if (defined $globals{config_init});
+
+ unless (-e config_filename())
+ {
+ open COMMAND, "git config --list | grep remote.*ssh |";
+ my $url = <COMMAND>;
+ close COMMAND;
+ chomp $url;
+
+ die "Undefined git-remote 'gerrit'" if ($url eq "");
+
+ die "Unexpected url found: $url" if (not ($url =~ m/ssh:\/\/.*\/.*/));
+
+ my $server = $url;
+ my $project = $url;
+
+ # match first occurance of '/' after ssh://
+ # eg: remote.hostboot.url=ssh://hostboot.gerrit/hostboot
+ # $2 is 'hostboot.gerrit'
+ $server =~ s/(.*)ssh:\/\/(.*?)\/(.*)/$2/;
+ # eg: remote.ppe.url=ssh://hw.gerrit/hw/ppe
+ # $3 is 'hw/ppe'
+ $project =~ s/(.*)ssh:\/\/(.*?)\/(.*)/$3/;
+
+ print "Gerrit Server: ".$server."\n" if $debug;
+ print "Gerrit Project: ".$project."\n" if $debug;
+
+ open(UNUSED, ">".config_filename()) || die;
+ close UNUSED;
+
+ system("git config --file ".config_filename().
+ " --add releaseLevels.server $server");
+ system("git config --file ".config_filename().
+ " --add releaseLevels.project $project");
+ }
+ $globals{config_init} = 1;
+
+}
+
+# sub config_list_levels
+#
+# Determines the previously defined release-levels.
+#
+# @return hash - { level => 1 } for each defined level.
+#
+sub config_list_levels
+{
+ return $globals{config_list_levels}
+ if (defined $globals{config_list_levels});
+
+ config_init();
+
+ open COMMAND, "git config --file ".config_filename().
+ " --get-all releaseLevels.levelname |";
+ my $names = {};
+ while (my $line = <COMMAND>)
+ {
+ chomp $line;
+ $names->{$line} = 1;
+ }
+ close COMMAND;
+
+ $globals{config_list_levels} = $names;
+ return $names;
+}
+
+# sub config_add_level
+#
+# Add a new level definition to the config file.
+#
+# @param level_def - A level info with the name/base/released for the new level.
+#
+sub config_add_level
+{
+ config_init();
+
+ my $level_def = shift;
+ my $levels = config_list_levels();
+
+ if (defined $levels->{$level_def->{name}})
+ {
+ die "Level ".$level_def->{name}." is already defined";
+ }
+
+ system("git config --file ".config_filename().
+ " --add releaseLevels.levelname ".$level_def->{name});
+
+ system("git config --file ".config_filename().
+ " --add level.".$level_def->{name}.".base ".$level_def->{base});
+
+ system("git config --file ".config_filename().
+ " --add level.".$level_def->{name}.".released ".
+ $level_def->{released});
+
+ if ($globals{"branch"} ne "master")
+ {
+ system("git config --file ".config_filename().
+ " --add level.".$level_def->{name}.".branch ".
+ $globals{"branch"});
+ }
+}
+
+# sub config_del_level
+#
+# Delete a level definition from the config file.
+#
+# @param level - The level name to delete.
+#
+sub config_del_level
+{
+ config_init();
+
+ my $level = shift;
+
+ system("git config --file ".config_filename().
+ " --unset releaseLevels.levelname ^".$level."\$");
+
+ system("git config --file ".config_filename().
+ " --remove-section level.".$level);
+}
+
+# sub config_add_patch
+#
+# Add a patch to a level definition.
+#
+# @param level - The level to add patch to.
+# @param patch - The patch to add.
+#
+sub config_add_patch
+{
+ my $level = shift;
+ my $patch = shift;
+
+ config_get_level($level);
+
+ unless (gerrit_is_patch($patch))
+ {
+ $patch = git_resolve_ref($patch);
+ }
+ die "Unknown patch requested" if ($patch eq "");
+
+ system("git config --file ".config_filename().
+ " --add level.$level.patch $patch");
+}
+
+# sub config_add_dep
+#
+# Add a forced dependency to a level definition.
+#
+# @param level - The level to add to.
+# @param from - The decendent patch.
+# @param to - THe ancestor patch.
+#
+sub config_add_dep
+{
+ my $level = shift;
+ my $from = shift;
+ my $to = shift;
+
+ config_get_level($level);
+
+ unless (gerrit_is_patch($from))
+ {
+ $from = git_resolve_ref($from);
+ }
+ die "Unknown patch requested for 'from' dep" if ($from eq "");
+
+ unless (gerrit_is_patch($to))
+ {
+ $to = git_resolve_ref($to);
+ }
+ die "Unknown patch requested for 'to' dep" if ($to eq "");
+
+ system("git config --file ".config_filename().
+ " --add level.$level.forceDep $from:$to");
+}
+
+# sub config_get_level
+#
+# Reads a level's information from the config file.
+#
+# @param level - The level to read.
+#
+# @return hash - { name => level, base => base release,
+# released => previous release,
+# patches => array of patches,
+# forceDep => hash of { from => to } pairs }.
+#
+sub config_get_level
+{
+ config_init();
+
+ my $level = shift;
+ my %level_data = ();
+
+ open COMMAND, "git config --file ".config_filename().
+ " --get releaseLevels.levelname $level |";
+ my $found_level = <COMMAND>; chomp($found_level);
+ close COMMAND;
+
+ die "Level $level not defined" if ($found_level eq "");
+
+ $level_data{name} = $level;
+
+ open COMMAND, "git config --file ".config_filename().
+ " --get level.$level.base |";
+ my $base = <COMMAND>; chomp($base);
+ close COMMAND;
+
+ $level_data{base} = $base;
+
+ open COMMAND, "git config --file ".config_filename().
+ " --get level.$level.released |";
+ my $released = <COMMAND>; chomp($released);
+ close COMMAND;
+
+ $level_data{released} = $released;
+
+ open COMMAND, "git config --file ".config_filename().
+ " --get level.$level.branch |";
+ my $branch = <COMMAND>; chomp($branch);
+ close COMMAND;
+
+ if ($branch ne "")
+ {
+ $globals{"branch"} = $branch;
+ }
+
+ my @patches = ();
+ open COMMAND, "git config --file ".config_filename().
+ " --get-all level.$level.patch |";
+ while (my $patch = <COMMAND>)
+ {
+ chomp($patch);
+ push @patches, $patch;
+ }
+ close COMMAND;
+
+ $level_data{patches} = \@patches;
+
+ my %forceDeps = ();
+ open COMMAND, "git config --file ".config_filename().
+ " --get-all level.$level.forceDep |";
+ while (my $forceDep = <COMMAND>)
+ {
+ $forceDep =~ m/(.*):(.*)/;
+ $forceDeps{$1} = $2;
+ }
+ close COMMAND;
+
+ $level_data{forceDeps} = \%forceDeps;
+
+ return \%level_data;
+}
+
+# sub config_print_levels
+#
+# Displays the name of each defined level.
+#
+sub config_print_levels
+{
+ my $levels = config_list_levels();
+ foreach my $level (sort keys %$levels)
+ {
+ print $level."\n";
+ }
+}
+
+# sub config_server
+#
+# Gets the Gerrit server name / address from the config file.
+#
+# @return string - The location of the Gerrit server.
+#
+sub config_server
+{
+ return $globals{config_server} if (defined $globals{config_server});
+
+ config_init();
+
+ open COMMAND, "git config --file ".config_filename().
+ " --get releaseLevels.server |";
+ my $server = <COMMAND>; chomp($server);
+ close COMMAND;
+
+ die "Server config does not exist" if ($server eq "");
+
+ $globals{config_server} = $server;
+ return $server;
+
+}
+
+# sub config_project
+#
+# Gets the Gerrit project managed by this repository from the config file.
+#
+# @return string - Project managed by this repository.
+#
+sub config_project
+{
+ return $globals{config_project} if (defined $globals{config_project});
+
+ config_init();
+
+ open COMMAND, "git config --file ".config_filename().
+ " --get releaseLevels.project |";
+ my $project = <COMMAND>; chomp($project);
+ close COMMAND;
+
+ die "Project config does not exist" if ($project eq "");
+
+ $globals{config_project} = $project;
+ return $project;
+}
+
+# sub config_resolve_level_dep
+#
+# Resolves dependencies for patches by parsing the commit messages for the
+# depends-on tag and checking if there are any open parents of a commit.
+# If a patch is dependent on a patch not already in the level, the patch is
+# added.
+#
+# @param[in] - level name
+# @param[in] - Array of patches to process.
+#
+# @TODO RTC:125235 - improve this to support cross project dependencies
+sub config_resolve_level_dep
+{
+ print "Resolving level dependencies...\n";
+ my $level = shift;
+ my $base = shift;
+ my @patches = @_;
+ my %level_patches = ();
+
+ while (@patches)
+ {
+ my $patchPair = shift @patches;
+ my ($patch,$patchSet) = split (":", $patchPair);
+
+ # Check if patch has already been added to level
+ if ($level_patches{$patch})
+ {
+ print "Skipping - already added patch = $patch to level\n" if $debug;
+ next;
+ }
+ # Check if patch already exists in release base
+ if (git_log_changeId($base, $patch))
+ {
+ print "Skipping - patch = $patch already exists in release base = $base\n" if $debug;
+ next;
+ }
+
+ # Mark patch as processed
+ $level_patches{$patch} = 1;
+
+ print "\n===========\nFirst time seeing patch = $patch\n" if $debug;
+
+ # Force use of changeId's
+ if (!gerrit_is_patch($patch))
+ {
+ die "Added patch: $patch is not of type changeId\n";
+ }
+
+ # Add patch to level with resolved git commit.
+ print "Adding patch - $patchPair\n" if $debug;
+ my $commits = gerrit_resolve_patchset([$patchPair]);
+ config_add_patch($level, $commits->[0]);
+
+ # Get commit message
+ my $patchInfo = gerrit_query_commit($patch);
+ my @commitMsgArray = split(/\\n/,$patchInfo->{commitMessage});
+ print Dumper @commitMsgArray if $debug;
+
+ # Check for OPEN parent
+ my $commit_info = gerrit_query_commit($patch);
+ my $parent_commit = $commit_info->{currentPatchSet}->{parents}[0];
+ my $parent_info = gerrit_query_commit($parent_commit);
+ if ($parent_info->{status} eq "NEW")
+ {
+ my $parent_id = $parent_info->{id};
+ # Add dependency if dependency is not already in base release
+ if(!git_log_changeId($base, $parent_id))
+ {
+ print "Adding forced dependency $patch:$parent_id\n" if $debug;
+ config_add_dep($level, $patch, $parent_id);
+ }
+
+ # Add dependent patch if not already added to level
+ if (!exists($level_patches{$parent_id}) )
+ {
+ push @patches, $parent_id;
+ }
+ }
+
+ # Search commit message for dependencies
+ foreach my $line (@commitMsgArray)
+ {
+ # Check for forced dependencies
+ if ($line =~ m/depends-on:/i)
+ {
+ $line =~ s/([^:]*):\s*//;
+ chomp($line);
+ print "Found depends-on: $line\n" if $debug;
+
+ # Add dependency if dependency is not already in base release
+ if(!git_log_changeId($base, $line))
+ {
+ print "Adding forced dependency $patch:$line\n" if $debug;
+ config_add_dep($level, $patch, $line);
+ }
+
+ # Add dependent patch if not already added to level
+ if (!exists($level_patches{$line}) )
+ {
+ push @patches, $line;
+ }
+ }
+ # Print out CMVC dependencies
+ if ($line =~ m/cmvc-([a-zA-Z]+):/i)
+ {
+ print "$TOKEN Need ".$line."\n";
+ }
+ }
+ }
+}
+
+# sub config_verify_patches
+#
+# Verify patch-list to ensure all dependencies are met
+#
+# @param[in] - level base patch
+# @param[in] - Array of patches to verify.
+#
+sub config_verify_patches
+{
+ print "Verifying patches...\n";
+
+ config_init();
+
+ my $base = shift;
+ my $patches = shift;
+
+ foreach my $patch (@{$patches})
+ {
+ print "Deps for $patch\n" if $debug;
+ my $displayed_header = 0;
+
+ my $deps = git_commit_deps($base, $patch);
+
+ foreach my $dep (@{$deps})
+ {
+ unless (grep {$_ eq $dep} @{$patches})
+ {
+ unless ($displayed_header)
+ {
+ print "-------------------------------------------------\n";
+ print "Potential missing dependency for:\n";
+ print wrap(" "," ",git_get_subject($patch)."\n");
+ print "\t$patch\n\n";
+ $displayed_header = 1;
+ }
+
+ print wrap(" ", " ", git_get_subject($dep)."\n");
+ print "\t$dep\n";
+
+ my $files = array_intersect(git_commit_files($patch),
+ git_commit_files($dep));
+
+ foreach my $file (@{$files})
+ {
+ print "\t$file\n";
+ }
+
+ print "\n";
+ }
+ }
+
+ if ($displayed_header)
+ {
+ print "-------------------------------------------------\n";
+ }
+ }
+
+}
+
+# sub config_release
+#
+# Create a branch / tag based on the definition of a release.
+#
+# @param[in] - level info
+# @param[in] - bool to create tag
+#
+sub config_release
+{
+ my $level_info = shift;
+ my $create_tag = shift;
+
+ print "Creating release branch...\n";
+ git_create_branch($level_info->{name}, $level_info->{base});
+
+ my $patches = $level_info->{patches};
+
+ print "Resolving and ordering patches...\n";
+ print Dumper $level_info->{patches} if $debug;
+ $patches = gerrit_resolve_patches($level_info->{patches});
+ $patches = git_order_commits($patches, $level_info);
+
+ print "\n========\nDetermined patch order as:\n";
+ my $i = 1;
+ foreach my $patch (@{$patches})
+ {
+ print "$i. $patch\n";
+ $i++;
+ }
+
+ print "\n========\nApplying patches...\n";
+ $i = 1;
+ foreach my $patch (@{$patches})
+ {
+ print "\n$i. Cherry-picking commit = $patch.\n\n";
+ unless (git_cherry_pick($patch))
+ {
+ print `git status`;
+ system("git reset HEAD --hard");
+ die "Cherry-pick of $patch failed";
+ }
+ $i++;
+ }
+
+ print "\nGenerating release notes...\n";
+ create_release_notes($level_info->{name}, $level_info);
+
+ if ($create_tag)
+ {
+ print "\nCreating tag...\n";
+ git_create_tag($level_info->{name}, $level_info);
+ }
+}
+
+# sub gerrit_ssh_command
+#
+# Creates a properly formed ssh command based on the server address.
+#
+# @return string - The basic ssh command to connect to the server.
+#
+sub gerrit_ssh_command
+{
+ return $globals{gerrit_ssh_command}
+ if (defined $globals{gerrit_ssh_command});
+
+ my $server = config_server();
+ my $port = "";
+
+ if ($server =~ m/.*:.*/)
+ {
+ $port = $server;
+ $server =~ s/(.*):.*/$1/;
+ $port =~ s/.*:(.*)/$1/;
+
+ $port = "-p $port";
+ }
+
+ my $command = "ssh -qx $port $server gerrit";
+ print "SSH command: $command\n" if $debug;
+
+ $globals{gerrit_ssh_command} = $command;
+ return $command;
+}
+
+# sub gerrit_query
+#
+# Performs a gerrit query and parses the resulting JSON.
+#
+# @param[in] query - The query to perform.
+#
+# @return array - A list of items from the JSON query. Each item is a
+# hash (key-value pair) for the item attributes.
+#
+sub gerrit_query
+{
+ my $query = shift;
+
+ my @items = ();
+
+ open COMMAND, gerrit_ssh_command()." query $query --current-patch-set".
+ " --patch-sets --format=JSON |";
+
+ while (my $line = <COMMAND>)
+ {
+ chomp $line;
+ push @items, json_parse($line);
+ }
+
+ return \@items;
+}
+
+# sub gerrit_query_commit
+#
+# Performs a gerrit query on a specific commit.
+#
+# @param[in] commit - The commit to query.
+#
+# @return hash - The parsed JSON for the queried commit.
+#
+sub gerrit_query_commit
+{
+ my $commit = shift;
+
+ my $project = config_project();
+
+ my $query_result = gerrit_query("$commit project:$project ".
+ "branch:".$globals{"branch"});
+ foreach my $result (@{$query_result})
+ {
+ if ($result->{id} eq $commit ||
+ $result->{currentPatchSet}->{revision} =~ m/$commit/)
+ {
+ return $result;
+ }
+ else
+ {
+ # If all patchsets queried, search all of them for the commit
+ foreach my $patchset (@{$result->{patchSets}})
+ {
+ if ($patchset->{revision} =~ m/$commit/)
+ {
+ return $result;
+ }
+ }
+ }
+ }
+
+ die "Cannot find $commit in $project/$globals{\"branch\"}";
+}
+
+# sub gerrit_is_patch
+#
+# Determines if a patch identifier is a Gerrit patch or not.
+#
+# @param[in] patch - The patch to make determination about.
+#
+# @retval true - Patch is a Gerrit patch ID.
+# @retval false - Patch does not appear to be a Gerrit patch ID.
+sub gerrit_is_patch
+{
+ my $patch = shift;
+ return 1 if ($patch =~ m/I[0-9a-f]+/);
+ return 0;
+}
+
+# sub gerrit_resolve_patches
+#
+# Resolves gerrit patch IDs to git commit numbers and ensures the git
+# commits are fetched from the gerrit server.
+#
+# Any git commit number is left unchanged.
+#
+# @param[in] patches - An array of patches.
+# @return array - An array of git commit numbers.
+#
+sub gerrit_resolve_patches
+{
+ my $patches = shift;
+ my @result = ();
+
+ foreach my $patch (@{$patches})
+ {
+ if (gerrit_is_patch($patch))
+ {
+ my $patch_info = gerrit_query_commit($patch);
+ gerrit_fetch($patch_info->{currentPatchSet}->{ref});
+ push @result, $patch_info->{currentPatchSet}->{revision};
+ }
+ else
+ {
+ push @result, $patch;
+ }
+ }
+
+ return \@result;
+}
+
+# sub gerrit_resolve_patchset
+#
+# Resolves an array of gerrit change-id and patch-set pairs to git commit
+# numbers and and ensures the git commits are fetched from the gerrit server.
+#
+# @param[in] patches - An array of change-id, patch-set pairs.
+# @return array - An array of git commit numbers.
+#
+sub gerrit_resolve_patchset
+{
+ my $patches = shift;
+
+ my @result = ();
+ foreach my $patchPair (@{$patches})
+ {
+ my ($changeId,$patchSet) = split(":",$patchPair);
+
+ if (gerrit_is_patch($changeId))
+ {
+ my $patch_info = gerrit_query_commit($changeId);
+ # Fail if patchset DNE
+ if ($patchSet > $patch_info->{currentPatchSet}->{number})
+ {
+ die "$patchSet does not have patch number $patchSet";
+ }
+ # JSON creates array of patchSets in number order
+ my $index = $patchSet - 1;
+ gerrit_fetch($patch_info->{patchSets}[$index]->{ref});
+ push @result, $patch_info->{patchSets}[$index]->{revision};
+ }
+ else
+ {
+ die "Requires gerrit change-id and patch-set";
+ }
+ }
+
+ return \@result;
+}
+
+# sub gerrit_fetch
+#
+# Fetches the contents of a Gerrit revision (refs/changes/*) to the local
+# git repository.
+#
+# @param[in] ref - The revision to fetch from the Gerrit server.
+#
+sub gerrit_fetch
+{
+ my $ref = shift;
+ open COMMAND, "git config --list | grep remote.*ssh |";
+ my $projecturl = <COMMAND>;
+ close COMMAND;
+ chomp $projecturl;
+ $projecturl =~ s/(.*?)\=(.*)/$2/;
+ system("git fetch $projecturl $ref -q");
+}
+
+# sub rtc_workitem_num
+#
+# Determines the RTC WorkItem associated with a git commit.
+#
+# @param[in] commit - The git commit.
+#
+# @return string - RTC WorkItem number (or "").
+#
+sub rtc_workitem_num
+{
+ my $commit = shift;
+ my $message = git_commit_msg($commit);
+
+ if ($message =~ m/RTC:\s*([0-9]+)/)
+ {
+ return $1;
+ }
+ else
+ {
+ return "";
+ }
+}
+
+# sub cq_workitem_num
+#
+# Determine the CQ WorkItem associated with a git commit.
+#
+# @param[in] commit - The git commit.
+#
+# @return string - CQ WorkItem number (or "").
+#
+sub cq_workitem_num
+{
+ my $commit = shift;
+ my $message = git_commit_msg($commit);
+
+ if ($message =~ m/CQ:\s*([A-Z][A-Z][0-9]+)/)
+ {
+ return $1;
+ }
+ else
+ {
+ return "";
+ }
+}
+
+# sub coreq_workitem_num
+#
+# Search through a git commit for all coReq instances.
+#
+# @param[in] commit - The git commit.
+#
+# @return array of strings - CMVC-Coreq numbers or "".
+#
+
+sub coreq_workitem_num
+{
+ my $commit = shift;
+
+ my @msg_lines = split('\n',git_commit_msg($commit));
+ my @coreqs = ();
+
+ foreach my $line (@msg_lines)
+ {
+ if ($line =~ m/CMVC-Coreq:\s*([0-9]+)/i)
+ {
+ push @coreqs, $1;
+ }
+ }
+ return @coreqs;
+}
+
+# sub prereq_workitem_num
+#
+# Search through a git commit for all preReq instances.
+#
+# @param[in] commit - The git commit.
+#
+# @return array of strings - CMVC-Prereq numbers or "".
+#
+
+sub prereq_workitem_num
+{
+ my $commit = shift;
+
+ my @msg_lines = split('\n',git_commit_msg($commit));
+ my @prereqs = ();
+
+ foreach my $line (@msg_lines)
+ {
+ if($line =~ m/CMVC-Prereq:\s*([0-9]+)/i)
+ {
+ push @prereqs, $1;
+ }
+ }
+ return @prereqs;
+}
+
+# sub gerrit_changeid_num
+#
+# Determine the Gerrit Change-Id associated with a git commit.
+#
+# @param[in] commit - The git commit.
+#
+# @return string - Gerrit Change-Id number (or "").
+#
+sub gerrit_changeid_num
+{
+ my $commit = shift;
+ my $message = git_commit_msg($commit);
+
+ if ($message =~ m/Change-Id:\s*(I[0-9a-z]+)/)
+ {
+ return $1;
+ }
+ else
+ {
+ return "";
+ }
+}
+
+
+# sub rtc_hyperlink
+#
+# Turn an RTC WorkItem number into the https:// address to the RTC server.
+#
+# @param[in] workitem - RTC workitem number.
+#
+# @return string - The https:// address of the RTC item on the server.
+#
+sub rtc_hyperlink
+{
+ my $workitem = shift;
+ return "https://jazz07.rchland.ibm.com:13443/jazz/oslc/workitems/".
+ "$workitem.hover.html";
+}
+
+# sub cq_hyperlink
+#
+# Turn a CQ WorkItem number into the http:// address to the BQ server.
+#
+# @param[in] workitem - CQ workitem number.
+#
+# @return string - The http:// address of the CQ item on the server.
+#
+sub cq_hyperlink
+{
+ my $workitem = shift;
+ return "http://w3.rchland.ibm.com/projects/bestquest/?defect=$workitem";
+}
+
+# sub json_parse
+#
+# Parse a line of JSON into an hash-object.
+#
+# @param[in] line - The JSON content.
+#
+# @return hash - The parsed object.
+#
+# @note There are perl modules for doing this but they are not installed on
+# the pool machines. The parsing for JSON (at least the content from
+# the Gerrit server) isn't so bad...
+#
+sub json_parse
+{
+ my $line = shift;
+
+ die "Invalid JSON format: $line" unless ($line =~ m/^\{.*\}$/);
+ $line =~ s/^\{(.*)}$/$1/;
+
+ my %object = ();
+
+ while($line ne "")
+ {
+ my $key;
+ my $value;
+
+ ($key, $line) = json_get_string($line);
+ $key =~ s/^"(.*)"$/$1/;
+
+ $line =~ s/^://;
+ if ($line =~ m/^"/)
+ {
+ ($value, $line) = json_get_string($line);
+ $value =~ s/^"(.*)"$/$1/;
+ }
+ elsif ($line =~ m/^{/)
+ {
+ ($value, $line) = json_get_object($line);
+ $value = json_parse($value);
+ }
+ elsif ($line =~ m/^\[/)
+ {
+ ($value, $line) = json_get_array($line);
+ $value = json_parse_array($value);
+ }
+ else
+ {
+ $line =~ s/([^,]*)//;
+ $value = $1;
+ }
+
+ $object{$key} = $value;
+ }
+
+ return \%object;
+}
+
+# sub json_parse_array
+#
+# Utility function for json_parse.
+#
+sub json_parse_array
+{
+ my $line = shift;
+
+ $line =~ s/^\[(.*)\]$/$1/;
+
+ my @array = ();
+
+ while ($line ne "")
+ {
+ my $value;
+
+ if ($line =~ m/^"/)
+ {
+ ($value, $line) = json_get_string($line);
+ $value =~ s/^"(.*)"$/$1/;
+ }
+ elsif ($line =~ m/^\{/)
+ {
+ ($value, $line) = json_get_object($line);
+ $value = json_parse($value);
+ }
+ elsif ($line =~ m/^\[/)
+ {
+ ($value, $line) = json_get_array($line);
+ $value = json_parse_array($value);
+ }
+ else
+ {
+ $line =~ s/([^,]*)//;
+ $value = $1;
+ }
+
+ push @array, $value;
+ $line =~ s/^,//;
+ }
+
+ return \@array;
+}
+
+# sub json_get_string
+#
+# Utility function for json_parse.
+#
+sub json_get_string
+{
+ my $line = shift;
+
+ $line =~ /("[^"]*")(.*)/;
+ my $first = $1;
+ my $second = $2;
+
+ if ($first =~ m/\\"$/)
+ {
+ my ($more, $rest) = json_get_string($second);
+ return ($first.$more , $rest);
+ }
+ else
+ {
+ return ($first, $second);
+ }
+}
+
+# sub json_get_object
+#
+# Utility function for json_parse.
+#
+sub json_get_object
+{
+ my $line = shift;
+
+ $line =~ s/^{//;
+ my $object = "{";
+ my $frag = "";
+
+ my $found_object = 0;
+
+ until ((not $found_object) && ($object =~ m/}$/))
+ {
+ $found_object = 0;
+
+ if ($line =~ m/^\{/)
+ {
+ ($frag, $line) = json_get_object($line);
+ $object = $object.$frag;
+ $found_object = 1;
+ }
+ elsif ($line =~ m/^"/)
+ {
+ ($frag, $line) = json_get_string($line);
+ $object = $object.$frag;
+ }
+ elsif ($line =~ m/^\[/)
+ {
+ ($frag, $line) = json_get_array($line);
+ $object = $object.$frag;
+ }
+ elsif ($line =~ m/^[:,}]/)
+ {
+ $line =~ s/^([:,}])//;
+ $frag = $1;
+ $object = $object.$frag;
+ }
+ else
+ {
+ $line =~ s/([^,}]*)//;
+ $frag = $1;
+ $object = $object.$frag;
+ }
+ }
+
+ return ($object, $line);
+}
+
+# sub json_get_array
+#
+# Utility function for json_parse.
+#
+sub json_get_array
+{
+ my $line = shift;
+
+ $line =~ s/^\[//;
+ my $array = "[";
+ my $frag = "";
+
+ my $found_array = 0;
+
+ until ((not $found_array) && ($array =~ m/]$/))
+ {
+ $found_array = 0;
+
+ if ($line =~ m/^\[/)
+ {
+ ($frag, $line) = json_get_array($line);
+ $array = $array.$frag;
+ $found_array;
+ }
+ elsif ($line =~ m/^\{/)
+ {
+ ($frag, $line) = json_get_object($line);
+ $array = $array.$frag;
+ }
+ elsif ($line =~ m/^"/)
+ {
+ ($frag, $line) = json_get_string($line);
+ $array = $array.$frag;
+ }
+ elsif ($line =~ m/^[:,\]]/)
+ {
+ $line =~ s/^([:,\]])//;
+ $frag = $1;
+ $array = $array.$frag;
+ }
+ else
+ {
+ $line =~ s/([^,]*)//;
+ $frag = $1;
+ $array = $array.$frag;
+ }
+ }
+
+ return ($array, $line);
+}
+
+# sub array_intersect
+#
+# Perform set intersection on two arrays.
+#
+# @param[in] one - The first array.
+# @param[in] two - The second array.
+#
+# @return array - The set intersection.
+#
+sub array_intersect
+{
+ my $one = shift;
+ my $two = shift;
+
+ my %set = {};
+
+ map { $set{$_}++ } (@{$one}, @{$two});
+
+ my @result = map { ($set{$_} > 1) ? $_ : () } (keys %set);
+
+ return \@result;
+}
+
+# sub run_system_command
+#
+# Execute a system command, handle printing command and debug info, and return
+# system output for caller processing
+#
+# E.g. Execute a CMVC line command and return the results.
+#
+# @param[in] cmd - system command to be executed.
+#
+# @return string - output returned from running system command.
+#
+sub run_system_command
+{
+ my $cmd = shift;
+
+ print "$cmd\n";
+ my $output = `$cmd`;
+ die "failed running system command $cmd - $?" if ($?);
+ print $output if $debug;
+
+ return $output;
+}
diff --git a/src/build/tools/perl.modules/gitUtil.pm b/src/build/tools/perl.modules/gitUtil.pm
new file mode 100644
index 00000000..561c15cd
--- /dev/null
+++ b/src/build/tools/perl.modules/gitUtil.pm
@@ -0,0 +1,592 @@
+#!/usr/bin/perl
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/perl.modules/gitUtil.pm $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+
+package gitUtil;
+
+use strict;
+
+my %globals = {};
+
+# Function : gitRoot
+#
+# @brief : Determine the root of the GIT repository
+#
+# @return root : Root of GIT repository
+#
+sub gitRoot
+{
+ return $globals{git_root} if (defined $globals{git_root});
+
+ open COMMAND, "git rev-parse --show-toplevel |";
+ my $root = <COMMAND>;
+ close COMMAND;
+ chomp $root;
+
+ die "Unable to determine git_root" if ($root eq "");
+
+ $globals{git_root} = $root;
+ return $root;
+}
+
+#################### Begin Gerrit JSON Utility Subroutines #####################
+
+# @note There are perl modules for doing this but they are not installed on
+# the pool machines. The parsing for JSON (at least the content from
+# the Gerrit server) isn't so bad...
+
+# Function : jsonParse
+#
+# @brief : Parse a line of JSON into an hash-object.
+#
+# @param[in] line : The JSON content.
+#
+# @return hash : The parsed object.
+#
+sub jsonParse
+{
+ my $line = shift;
+
+ die "Invalid JSON format: $line" unless ($line =~ m/^\{.*\}$/);
+ $line =~ s/^\{(.*)}$/$1/;
+
+ my %object = ();
+
+ while($line ne "")
+ {
+ my $key;
+ my $value;
+
+ ($key, $line) = jsonGetString($line);
+ $key =~ s/^"(.*)"$/$1/;
+
+ $line =~ s/^://;
+ if ($line =~ m/^"/)
+ {
+ ($value, $line) = jsonGetString($line);
+ $value =~ s/^"(.*)"$/$1/;
+ }
+ elsif ($line =~ m/^{/)
+ {
+ ($value, $line) = jsonGetObject($line);
+ $value = jsonParse($value);
+ }
+ elsif ($line =~ m/^\[/)
+ {
+ ($value, $line) = jsonGetArray($line);
+ $value = jsonParseArray($value);
+ }
+ else
+ {
+ $line =~ s/([^,]*)//;
+ $value = $1;
+ }
+
+ $object{$key} = $value;
+ }
+
+ return \%object;
+}
+
+# Function : jsonGetString
+#
+# @brief : Utility function for jsonParse that extracts
+# the string data in a given object
+#
+# @param[in] line : The JSON line containing the strings.
+#
+# @return strings : The parsed strings.
+#
+sub jsonGetString
+{
+ my $line = shift;
+
+ $line =~ /("[^"]*")(.*)/;
+ my $first = $1;
+ my $second = $2;
+
+ if ($first =~ m/\\"$/)
+ {
+ my ($more, $rest) = jsonGetString($second);
+ return ($first.$more , $rest);
+ }
+ else
+ {
+ return ($first, $second);
+ }
+}
+
+# Function : jsonGetObject
+#
+# @brief : Utility function for jsonParse that extracts
+# the nested JSON object data in a given object
+#
+# @param[in] line : The JSON line containing the object
+#
+# @return object : The nested object
+#
+sub jsonGetObject
+{
+ my $line = shift;
+
+ $line =~ s/^{//;
+ my $object = "{";
+ my $frag = "";
+
+ my $found_object = 0;
+
+ until ((not $found_object) && ($object =~ m/}$/))
+ {
+ $found_object = 0;
+
+ if ($line =~ m/^\{/)
+ {
+ ($frag, $line) = jsonGetObject($line);
+ $object = $object.$frag;
+ $found_object = 1;
+ }
+ elsif ($line =~ m/^"/)
+ {
+ ($frag, $line) = jsonGetString($line);
+ $object = $object.$frag;
+ }
+ elsif ($line =~ m/^\[/)
+ {
+ ($frag, $line) = jsonGetArray($line);
+ $object = $object.$frag;
+ }
+ elsif ($line =~ m/^[:,}]/)
+ {
+ $line =~ s/^([:,}])//;
+ $frag = $1;
+ $object = $object.$frag;
+ }
+ else
+ {
+ $line =~ s/([^,}]*)//;
+ $frag = $1;
+ $object = $object.$frag;
+ }
+ }
+
+ return ($object, $line);
+}
+
+# Function : jsonGetArray
+#
+# @brief : Utility function for jsonParse that extracts
+# the array in a given object
+#
+# @param[in] line : The JSON line containing the array
+#
+# @return array : The array object
+#
+sub jsonGetArray
+{
+ my $line = shift;
+
+ $line =~ s/^\[//;
+ my $array = "[";
+ my $frag = "";
+
+ my $found_array = 0;
+
+ until ((not $found_array) && ($array =~ m/]$/))
+ {
+ $found_array = 0;
+
+ if ($line =~ m/^\[/)
+ {
+ ($frag, $line) = jsonGetArray($line);
+ $array = $array.$frag;
+ $found_array;
+ }
+ elsif ($line =~ m/^\{/)
+ {
+ ($frag, $line) = jsonGetObject($line);
+ $array = $array.$frag;
+ }
+ elsif ($line =~ m/^"/)
+ {
+ ($frag, $line) = jsonGetString($line);
+ $array = $array.$frag;
+ }
+ elsif ($line =~ m/^[:,\]]/)
+ {
+ $line =~ s/^([:,\]])//;
+ $frag = $1;
+ $array = $array.$frag;
+ }
+ else
+ {
+ $line =~ s/([^,]*)//;
+ $frag = $1;
+ $array = $array.$frag;
+ }
+ }
+
+ return ($array, $line);
+}
+
+# Function : jsonParseArray
+#
+# @brief : Utility function for jsonParse that parses
+# the array object
+#
+# @param[in] line : The array
+#
+# @return array : The parsed array object
+#
+#
+sub jsonParseArray
+{
+ my $line = shift;
+
+ $line =~ s/^\[(.*)\]$/$1/;
+
+ my @array = ();
+
+ while ($line ne "")
+ {
+ my $value;
+
+ if ($line =~ m/^"/)
+ {
+ ($value, $line) = jsonGetString($line);
+ $value =~ s/^"(.*)"$/$1/;
+ }
+ elsif ($line =~ m/^\{/)
+ {
+ ($value, $line) = jsonGetObject($line);
+ $value = jsonParse($value);
+ }
+ elsif ($line =~ m/^\[/)
+ {
+ ($value, $line) = jsonGetArray($line);
+ $value = jsonParseArray($value);
+ }
+ else
+ {
+ $line =~ s/([^,]*)//;
+ $value = $1;
+ }
+
+ push @array, $value;
+ $line =~ s/^,//;
+ }
+
+ return \@array;
+}
+
+#################### End Gerrit JSON Utility Subroutines #######################
+
+# Function : gerritIsPatch
+#
+# @brief : Determine if patch identifier is a Gerrit patch or not.
+#
+# @param[in] i_patch : The patch to make determination about.
+#
+# @retval flag : true/false (patch is/not a valid ID)
+#
+sub gerritIsPatch
+{
+ my $patch = shift;
+ return 1 if ($patch =~ m/I[0-9a-f]+/);
+ return 0;
+}
+
+# Function : configFilename
+#
+# @brief : Create the file that stroes the GIT server details
+#
+# @return : Location of the config file
+#
+sub configFilename
+{
+ return gitRoot()."/.git/gitRelease.config";
+}
+
+# Function : configInit
+#
+# @brief : Fetch & write server details to the config file
+#
+sub configInit
+{
+ return if (defined $globals{configInit});
+
+ unless (-e configFilename())
+ {
+ open COMMAND, "git config --list | grep remote.*ssh |";
+ my $url = <COMMAND>;
+ close COMMAND;
+ chomp $url;
+
+ die "Undefined git-remote 'gerrit'" if ($url eq "");
+
+ die "Unexpected url found: $url" if (not ($url =~ m/ssh:\/\/.*\/.*/));
+
+ my $server = $url;
+ my $project = $url;
+
+ # match first occurance of '/' after ssh://
+ # eg: remote.gerrit.url=ssh://hw.gerrit/hw/ppe
+ # $2 is 'hw.gerrit'
+ # $3 is 'hw/ppe'
+ $server =~ s/(.*)ssh:\/\/(.*?)\/(.*)/$2/;
+ $project =~ s/(.*)ssh:\/\/(.*?)\/(.*)/$3/;
+
+ open(UNUSED, ">".configFilename()) || die;
+ close UNUSED;
+
+ system("git config --file ".configFilename().
+ " --add releaseLevels.server $server");
+ system("git config --file ".configFilename().
+ " --add releaseLevels.project $project");
+ }
+ $globals{configInit} = 1;
+}
+
+# Function : configProject
+#
+# @brief : Fetch the project name of the current configured repository
+#
+# @return : GIT project name
+#
+sub configProject
+{
+ return $globals{config_project} if (defined $globals{config_project});
+
+ configInit();
+
+ open COMMAND, "git config --file ".configFilename().
+ " --get releaseLevels.project |";
+ my $project = <COMMAND>; chomp($project);
+ close COMMAND;
+
+ die "Project config does not exist" if ($project eq "");
+
+ $globals{config_project} = $project;
+
+ return $project;
+}
+
+# Function : configServer
+#
+# @brief : Fetch the server name of the current configured repository
+#
+# @return : GIT server location
+#
+sub configServer
+{
+ return $globals{config_server} if (defined $globals{config_server});
+
+ configInit();
+
+
+ open COMMAND, "git config --file ".configFilename().
+ " --get releaseLevels.server |";
+ my $server = <COMMAND>; chomp($server);
+ close COMMAND;
+
+ die "Server config does not exist" if ($server eq "");
+
+ $globals{config_server} = $server;
+ return $server;
+
+}
+
+# Function : gerritSSHCommand
+#
+# @brief : Creates a properly formed ssh command based on the server address
+#
+# @return : The basic ssh command to connect to the server.
+#
+sub gerritSSHCommand
+{
+ return $globals{gerrit_ssh_command}
+ if (defined $globals{gerrit_ssh_command});
+
+ my $server = configServer();
+ my $port = "";
+
+ if ($server =~ m/.*:.*/)
+ {
+ $port = $server;
+ $server =~ s/(.*):.*/$1/;
+ $port =~ s/.*:(.*)/$1/;
+
+ $port = "-p $port";
+ }
+
+ my $command = "ssh -qx $port $server gerrit";
+
+ $globals{gerrit_ssh_command} = $command;
+ return $command;
+}
+
+# Function : gerritQuery
+#
+# @brief : Performs a gerrit query and parses the resulting JSON.
+#
+# @param[in] query : The query to perform.
+#
+# @return item : A list of items from the JSON query. Each item is a
+# hash (key-value pair) for the item attributes.
+#
+sub gerritQuery
+{
+ my $query = shift;
+ my @items = ();
+
+ $query = gerritSSHCommand()." query $query --current-patch-set --patch-sets --format=JSON |";
+
+ open COMMAND, $query;
+ while (my $line = <COMMAND>)
+ {
+ chomp $line;
+ push @items, jsonParse($line);
+ }
+
+ return \@items;
+}
+
+# Function : gerritQueryReference
+#
+# @brief : Retrieves reference for a patch id, patchset number
+#
+# @param[in] changeId : Change id of the patch
+# @param[in] patchNumber : Patch set number
+#
+# @return reference : The reference string
+#
+sub gerritQueryReference
+{
+ my $changeId = shift;
+ my $patchNumber = shift;
+
+ my $project = configProject();
+
+ my $query_result = gerritQuery("$changeId project:$project");
+
+ foreach my $result (@{$query_result})
+ {
+ if ($result->{id} eq $changeId)
+ {
+ # If all patchsets queried, search all of them for the commit
+ foreach my $patchset (@{$result->{patchSets}})
+ {
+ if ($patchNumber eq " ")
+ {
+ return $patchset->{currentPatchSet}->{ref};
+ }
+ else
+ {
+ if ($patchset->{number} =~ m/$patchNumber/)
+ {
+ return $patchset->{ref};
+ }
+ }
+ }
+ }
+ }
+ die "Cannot find $changeId in $project";
+}
+
+# Function : gerritQueryCommit
+#
+# @brief : Retrieves commit for a patch id, patchset number
+#
+# @param[in] changeId : Change id of the patch
+# @param[in] patchNumber : Patch set number
+#
+# @return commit : The commit string
+#
+sub gerritQueryCommit
+{
+ my $changeId = shift;
+ my $patchNumber = shift;
+
+ my $project = configProject();
+
+ my $query_result = gerritQuery("$changeId project:$project");
+
+ foreach my $result (@{$query_result})
+ {
+ if ($result->{id} eq $changeId)
+ {
+ # If all patchsets queried, search all of them for the commit
+ foreach my $patchset (@{$result->{patchSets}})
+ {
+ if ($patchNumber eq "")
+ {
+ return $patchset->{currentPatchSet}->{revision};
+ }
+ else
+ {
+ if ($patchset->{number} =~ m/$patchNumber/)
+ {
+ return $patchset->{revision};
+ }
+ }
+ }
+ }
+ }
+ die "Cannot find $changeId in $project";
+}
+
+# Function : patchMergeStatus
+#
+# @brief : Check if given patch is merged into repository
+#
+# @param[in] changeId : Change id of the patch
+#
+# @return mergeStatus : 1 if merged; else 0
+#
+sub patchMergeStatus
+{
+ my $mergeStatus = 1;
+
+ my $changeId = shift;
+
+ my $project = configProject();
+
+ my $query_result = gerritQuery("$changeId project:$project");
+
+ foreach my $result (@{$query_result})
+ {
+ if ($result->{id} eq $changeId)
+ {
+ if ($result->{status} eq "MERGED" || $result->{status} eq "merged")
+ {
+ $mergeStatus = 1;
+ }
+ else
+ {
+ $mergeStatus = 0;
+ }
+ return $mergeStatus;
+ }
+ }
+ die "Cannot find $changeId in $project";
+}
diff --git a/src/build/tools/sandbox-create b/src/build/tools/sandbox-create
new file mode 100755
index 00000000..2f634358
--- /dev/null
+++ b/src/build/tools/sandbox-create
@@ -0,0 +1,60 @@
+#!/bin/bash
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/sandbox-create $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+
+echo " Creating ODE sandbox..."
+
+# Ensure backing build is accessible.
+if [ ! -d "$BACKING_BUILD" ];
+then
+ echo " Cannot access the backing build: $BACKING_BUILD"
+ exit -1
+fi
+
+SANDBOXRC=${SANDBOXROOT}/sbesandboxrc;
+
+# Delete existing sandbox if it exists.
+if [ -d "$SANDBOXBASE" ];
+then
+ # prompt the user to delete the sanbox or not if exist
+ echo -ne " Remove existing sandbox? {y/n}:"
+ read input
+ if [ "$input" == "y" ];then
+ echo " Executing : mksb -undo -auto -rc $SANDBOXRC -dir $SANDBOXROOT -sb $SANDBOXNAME"
+ mksb -undo -auto -rc $SANDBOXRC -dir $SANDBOXROOT -sb $SANDBOXNAME
+ else
+ exit 0
+ fi
+fi
+
+if [ -d "$SANDBOXBASE" ];
+then
+ rm -rf $SANDBOXBASE
+ rm -f $SANDBOXRC
+fi
+
+# Create sandbox.
+echo " Sandbox backing build = $BACKING_BUILD"
+echo " mksb -rc $SANDBOXRC -dir $SANDBOXROOT -back $BACKING_BUILD -sb $SANDBOXNAME -m ppc -auto"
+mksb -rc $SANDBOXRC -dir $SANDBOXROOT -back $BACKING_BUILD -sb $SANDBOXNAME -m ppc -auto || exit -1
diff --git a/src/build/tools/sbeCmvcConstants.py b/src/build/tools/sbeCmvcConstants.py
new file mode 100755
index 00000000..65a2d8a3
--- /dev/null
+++ b/src/build/tools/sbeCmvcConstants.py
@@ -0,0 +1,84 @@
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/sbeCmvcConstants.py $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+'''
+###########################################################
+# @file sbeCmvcConstants.py
+# @author George Keishing <gkeishin@in.ibm.com>
+# Sangeetha TS <sangeet2@in.ibm.com>
+# @brief Constants support tool operation
+#
+# Created on March 03, 2016
+# ----------------------------------------------------
+# @version Developer Date Description
+# ----------------------------------------------------
+# 1.0 gkeishin 03/03/16 Initial create
+###########################################################
+'''
+
+
+'''
+Generic Failure RC code
+'''
+SUCCESS_EXIT = 0
+SHELL_EXIT = 1 # Shell Exited with error
+ERROR_EXIT = 2
+HELP_EXIT = 3
+SUCCESS_DEV_EXIT = 4
+ERROR_SYS_EXIT = 100
+
+
+# This funcs specific error codes
+ERROR_SETTING = 10 # No CMV ENV set in the .bashrc or a generic error for Sandbox ENV not set
+ERROR_CMVC_LOGIN = 11 # No Cmvc login access session not established
+ERROR_CMVC_CHECKOUT = 12 # Failed file checkout
+ERROR_CMVC_CHECKIN = 13 # Failed file check in
+ERROR_CMVC_FIX_RECORD = 14 # Failed fixing complete record
+ERROR_CMVC_FILE_COPY = 15 # Copying file failed
+ERROR_TRACK_STATE = 16 # Track not in required state
+ERROR_CI_TRIGGER = 17 # CI not started successfully
+
+SUCCESS_CMVC_CHECKOUT = 50
+SUCCESS_CMVC_CHECKIN = 51
+SUCCESS_CMVC_FIX_RECORD = 52
+SUCCESS_TRACK_STATE = 53
+SUCCESS_CI_TRIGGER = 54
+
+# This is a default files list to Check-in
+CMVC_FILE_LIST ="sbe_sp_intf.H,simics.tar,sbe_pibmem.bin,sbe_seeprom.bin"
+CMVC_FILE_UNDO_LIST ="src/sbei/sbfw/sbe_sp_intf.H src/sbei/sbfw/simics.tar src/sbei/sbfw/img/sbe_pibmem.bin src/sbei/sbfw/img/sbe_seeprom.bin"
+
+CMVC_DIR_CREATE ="sandbox_"
+
+
+# Hashing specfic
+SUCCESS_HASH_CHECK = 100
+ERROR_HASH_CHECK = 101
+
+
+# For Development
+ERROR_SANDBOX_EXIST = 200 # Sandbox doesnt exist
+ERROR_FILE_INPUT = 201 # User have entered no file
+ERROR_BUILD_FAILED = 202 # Compilation failed
+ERROR_HOOKING_FILE = 203 # Error while building shell hooks
+FILE_LOOKUP_LIST ="src/sbei/sbfw/sbe_sp_intf.H,src/sbei/sbfw/simics.tar,src/sbei/sbfw/img/sbe_pibmem.bin,src/sbei/sbfw/img/sbe_seeprom.bin"
diff --git a/src/build/tools/sbeCmvcUtility.py b/src/build/tools/sbeCmvcUtility.py
new file mode 100755
index 00000000..793029af
--- /dev/null
+++ b/src/build/tools/sbeCmvcUtility.py
@@ -0,0 +1,570 @@
+#!/usr/bin/python
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/sbeCmvcUtility.py $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+'''
+###########################################################
+# @file sbeCmvcUtility.py
+# @author: George Keishing <gkeishin@in.ibm.com>
+# Sangeetha TS <sangeet2@in.ibm.com>
+# @brief Utilility Module to support CMVC operation
+#
+# Created on March 03, 2016
+# ----------------------------------------------------
+# @version Developer Date Description
+# ----------------------------------------------------
+# 1.0 gkeishin 03/03/16 Initial create
+###########################################################
+'''
+
+#-------------------------
+# Imports
+#-------------------------
+import os, sys
+import time
+import os.path
+import subprocess
+import shutil
+import hashlib
+from subprocess import Popen, PIPE
+from os.path import expanduser # for getting $HOME PATH
+import stat # for File permission op
+
+# Libraries/utility funcs and user define const
+import sbeCmvcConstants as errorcode
+
+
+##########################################################################
+# Function : utilCmvcChangeDir
+#
+# @param i_cmvcnum : CMVC defect or Feature number
+#
+# @brief Create a directory
+#
+##########################################################################
+def utilCmvcChangeDir(i_cmvcnum):
+ l_home_path= expanduser("~") + "/" + errorcode.CMVC_DIR_CREATE + i_cmvcnum
+
+ print " Sandbox path\t: ",l_home_path
+ cmd='mkdir -p ' + l_home_path
+ os.system(cmd)
+ os.chdir(l_home_path)
+
+##########################################################################
+# Function :utilCmvcRepoPath
+#
+# @param i_cmvcnum : CMVC defect or Feature number
+#
+# @param i_pathname : SBE repo Path location
+#
+# @param i_filename : Files to be copied
+#
+# @brief find the files in repo
+#
+##########################################################################
+def utilCmvcRepoPath(i_pathname, i_cmvcnum, i_filename):
+ l_home_path= expanduser("~") + "/" + errorcode.CMVC_DIR_CREATE + i_cmvcnum
+ # Strip the last string from the file path input
+
+ print " Sandbox path\t: ",l_home_path
+ print " SBE Repo path\t: ",i_pathname
+
+ if i_filename == "None":
+ i_filename = errorcode.CMVC_FILE_LIST
+
+ for l_filename in i_filename.split(","):
+
+ # Find the files and copy
+ l_sb_path = utilFindFile(l_filename,l_home_path)
+ l_repo_path = utilFindFile(l_filename,i_pathname)
+
+ if l_sb_path is None :
+ print " ERROR: Checked out sandbox does not contain " + l_filename
+ return errorcode.ERROR_CMVC_FILE_COPY
+ if l_repo_path is None :
+ print " ERROR: File Not Found in SBE repo " + l_filename
+ return errorcode.ERROR_CMVC_FILE_COPY
+
+ cp_cmd = 'cp ' + l_repo_path + ' ' + l_sb_path
+ rc=os.system(cp_cmd )
+ if rc:
+ return errorcode.ERROR_CMVC_FILE_COPY
+ else:
+ # validate the copied files via hashing
+ l_src = l_repo_path
+ l_dest = l_sb_path
+ hash_err = utilCheckFileHash(l_src,l_dest)
+ if hash_err == errorcode.ERROR_HASH_CHECK:
+ return hash_err
+
+ # Returned success
+ return errorcode.SUCCESS_EXIT
+
+##########################################################################
+# Function :utilFindFile
+#
+# @param i_filename : File Name
+#
+# @param i_path : Directory to search in
+#
+# @brief Finds a given file and returns the absoulte path
+#
+##########################################################################
+def utilFindFile(i_filename, i_path):
+ for root, dirs, files in os.walk(i_path):
+ if i_filename in files:
+ return os.path.join(root, i_filename)
+
+##########################################################################
+# Function :utilFindFilePPE
+#
+# @param i_filename : File Name
+#
+# @param i_path : Directory to search in
+#
+# @brief Finds a given file ins PPE repo and returns the absoulte path
+#
+##########################################################################
+def utilFindFilePPE(i_filename, i_path, i_sandbox_name):
+ for root, dirs, files in os.walk(i_path):
+ if i_filename in files:
+ # Ignore the test sandbox files in the PPE
+ if not i_sandbox_name in root:
+ return os.path.join(root, i_filename)
+ # Consider the path, if the repository is contanied in the sandbox
+ else:
+ if i_sandbox_name in i_path:
+ return os.path.join(root, i_filename)
+
+##########################################################################
+# Function :utilCmvcCheckout
+#
+# @param i_filename : File Name
+#
+# @param i_release : Fips FW Release to checkout (ex: fips910 )
+#
+# @param i_cmvcnum : CMVC defect or Feature number
+#
+# @brief Check out a given file
+#
+##########################################################################
+def utilCmvcCheckout(i_filename, i_release, i_cmvcnum):
+ print " File Name\t: ",i_filename
+ print " Release\t: ",i_release
+
+ # The file simics.tar is not unique so provide the relative path
+ if i_filename == "simics.tar":
+ i_filename = 'src/sbei/sbfw/simics.tar'
+
+ print " CMVC #\t: ",i_cmvcnum[1:]
+ l_home_path= expanduser("~") + "/" + errorcode.CMVC_DIR_CREATE + i_cmvcnum
+
+ cmd='File -checkout ' + i_filename + ' -release '+ i_release + ' -relative ' + l_home_path
+ if i_cmvcnum[:1] == "D":
+ cmd += ' -defect ' + i_cmvcnum[1:]
+ else:
+ cmd += ' -feature ' + i_cmvcnum[1:]
+
+ print " Executing\t: ", cmd
+ rc = os.system(cmd)
+ if rc:
+ # rc 256 File not found in CMVC
+ if rc == 256:
+ print " * File was not found or Error operation in CMVC"
+ return errorcode.ERROR_CMVC_CHECKOUT
+ else:
+ return errorcode.SUCCESS_CMVC_CHECKOUT
+
+
+##########################################################################
+# Function :utilCmvcCheckin
+#
+# @param i_filename : Relative Path of the File
+#
+# @param i_release : Fips FW Release to checkout (ex: fips910 )
+#
+# @param i_cmvcnum : CMVC defect or Feature number
+#
+# @brief Check in a given file
+#
+##########################################################################
+def utilCmvcCheckin(i_filename, i_release, i_cmvcnum):
+ print " File Name\t: ",i_filename
+ print " Release\t: ",i_release
+ print " CMVC #\t: ",i_cmvcnum[1:]
+
+ l_home_path= expanduser("~") + "/" + errorcode.CMVC_DIR_CREATE + i_cmvcnum
+ l_base_path = utilFindFile(i_filename,l_home_path)
+ # This will give the ablsolute path, strip it from src
+ for l_var in l_base_path.split("/src"):
+ if i_filename in l_var:
+ l_str = 'src' + l_var
+
+ cmd='File -checkin ' + l_str + ' -release '+ i_release + ' -relative ' + l_home_path
+ if i_cmvcnum[:1] == "D":
+ cmd += ' -defect ' + i_cmvcnum[1:]
+ else:
+ cmd += ' -feature ' + i_cmvcnum[1:]
+
+ print " Executing\t: ", cmd
+ rc = os.system(cmd)
+ if rc:
+ return errorcode.ERROR_CMVC_CHECKIN
+ else:
+ return errorcode.SUCCESS_CMVC_CHECKIN
+
+
+##########################################################################
+# Function :utilCmvcFixComplete
+#
+# @param i_cmvcnum : CMVC defect or Feature number
+#
+# @param i_release : Fips Release string
+#
+# @brief Fix the record to complete
+#
+##########################################################################
+def utilCmvcFixComplete(i_cmvcnum, i_release):
+ print " CMVC #\t: ",i_cmvcnum
+ l_cmvcnum =i_cmvcnum[1:]
+
+ if i_cmvcnum[:1] == "D":
+ cmd='Fix -complete ' + ' -defect ' + l_cmvcnum + ' -r ' + i_release + ' -component esw_sbei'
+ else:
+ cmd='Fix -complete ' + ' -feature ' + l_cmvcnum + ' -r ' + i_release + ' -component esw_sbei'
+
+ print " Executing\t: ", cmd
+ rc = os.system(cmd)
+ if rc:
+ return errorcode.ERROR_CMVC_FIX_RECORD
+ else:
+ return errorcode.SUCCESS_CMVC_FIX_RECORD
+
+##########################################################################
+# Function :utilCheckTrackState
+#
+# @param i_cmvcnum : CMVC defect or Feature number
+#
+# @param i_release : Fips Release string
+#
+# @brief Check if the track is in fix state or not
+#
+##########################################################################
+def utilCheckTrackState(i_cmvcnum, i_release):
+ print " CMVC #\t: ",i_cmvcnum
+ print " Release\t: ",i_release
+
+ l_cmvcnum =i_cmvcnum[1:]
+ cmd = 'Track -view -release ' + i_release
+ if i_cmvcnum[:1] == "D":
+ cmd += ' -defect ' + l_cmvcnum
+ else:
+ cmd += ' -feature ' + l_cmvcnum
+ cmd += ' | grep state '
+
+ print " Executing\t: ", cmd
+ ex_cmd = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
+ for line in ex_cmd.stdout :
+ if not line: continue
+ line = line.strip()
+ if "fix" in line :
+ return errorcode.SUCCESS_TRACK_STATE
+ return errorcode.ERROR_TRACK_STATE
+
+##########################################################################
+# Function :utilCheckFileHash
+#
+# @param i_src : Source location of the file to be copy from
+#
+# @param i_dest : Destination location of the file to be copied to
+#
+# @brief Make sure after copying files, the hash matches
+#
+##########################################################################
+def utilCheckFileHash(i_src, i_dest):
+ print " Source\t: ",i_src
+ print " Destination\t: ",i_dest
+
+ sha_orig = hashlib.sha256()
+ sha_orig.update(file(i_src).read())
+ orig_hash=sha_orig.hexdigest()
+ print " * Orig Hash\t [ %s : %s ] "% (os.path.basename(i_src),orig_hash)
+
+ sha_copy = hashlib.sha256()
+ sha_copy.update(file(i_dest).read())
+ copied_hash=sha_copy.hexdigest()
+ print " * Copied Hash\t [ %s : %s ] "% (os.path.basename(i_dest),copied_hash)
+
+ if orig_hash == copied_hash:
+ print " -> Hash Match .. Continue\n"
+ return errorcode.SUCCESS_HASH_CHECK
+ else:
+ print " Mismatch Hash.. Abort"
+ return errorcode.ERROR_HASH_CHECK
+
+##########################################################################
+# Function :utilRollBack
+#
+# @param i_action : Undo Checkout or checkin
+#
+# @param i_cmvcnum : CMVC defect or Feature number
+#
+# @param i_release : Fips FW Release to checkout (ex: fips910 )
+#
+# @brief Brute force undo of all files
+#
+##########################################################################
+def utilRollBack(i_action, i_cmvcnum, i_release):
+ print "\n * Action request\t: ",i_action
+ if i_action == "checkin":
+ print " * Undoing All default File list"
+ if i_cmvcnum[:1] == "D":
+ cmd = 'File -undo ' + errorcode.CMVC_FILE_UNDO_LIST + ' -release ' + i_release + ' -defect ' + i_cmvcnum[1:] + ' >/dev/null 2>&1'
+ else:
+ cmd = 'File -undo ' + errorcode.CMVC_FILE_UNDO_LIST + ' -release ' + i_release + ' -feature ' + i_cmvcnum[1:] + ' >/dev/null 2>&1'
+ else:
+ print " * Unlocking All default Files list"
+ if i_cmvcnum[:1] == "D":
+ cmd = 'File -unlock ' + errorcode.CMVC_FILE_UNDO_LIST + ' -release ' + i_release + ' >/dev/null 2>&1'
+ else:
+ cmd = 'File -unlock ' + errorcode.CMVC_FILE_UNDO_LIST + ' -release ' + i_release + ' >/dev/null 2>&1'
+
+ #print " Executing : ", cmd
+ rc = os.system(cmd)
+ # CMVC throws this rc even if its successfull
+ if rc != 1024 and rc !=0:
+ print "\n Error [ %s ] in Undoing/Unlocking Files.. Please check manually"%rc
+ else:
+ print "\n Roll Back Successfull.. Please Revisit your inputs [ OK ] "
+
+
+##########################################################################
+# Function :utilTriggerJenkins
+#
+# @param i_cmvcnum : CMVC defect or Feature number
+#
+# @param i_release : Fips FW Release to checkout (ex: fips910 )
+#
+# @param i_bvt : Fips BVT xml file
+#
+# @brief Trigger Jenkins CI job
+#
+##########################################################################
+def utilTriggerJenkins(i_cmvcnum,i_release,i_bvt):
+ print " Jenkins job for %s" % i_cmvcnum
+ #PATH : /afs/austin.ibm.com/projects/esw/bin/fsp-CI-jenkins
+ cmd = "fsp-CI-jenkins -r " + i_release + " -t " + i_cmvcnum[1:] + " --test_on_hardware=y"
+ if not i_bvt == "None" :
+ cmd += " -b " + i_bvt
+ rc = os.system(cmd)
+ if rc :
+ return errorcode.ERROR_CI_TRIGGER
+ else :
+ return errorcode.SUCCESS_CI_TRIGGER
+
+
+##########################################################################
+# Function :utilCopyFileToSandbox
+#
+# @param i_pathname : SBE repo Path location
+#
+# @param i_sandboxname : Sandbox repo Path location
+#
+# @param i_filename : Files to be copied
+#
+# @brief find the files in repo and copy to the sandbox
+#
+##########################################################################
+def utilCopyFileToSandbox(i_pathname,i_sandboxname,i_filename):
+
+ print "\n *** For Development *** \n"
+ print " Sandbox path\t: ",i_sandboxname
+ print " SBE Repo path\t: ",i_pathname
+ print "\n"
+
+ if i_filename == "None":
+ i_filename = errorcode.CMVC_FILE_LIST
+ #print " No User supplied Files to copy"
+ #return errorcode.ERROR_FILE_INPUT
+
+ for l_filename in i_filename.split(","):
+ # Find the files and copy
+ l_sandbox_name = os.path.basename(i_sandboxname)
+ l_repo_path = utilFindFilePPE(l_filename,i_pathname,l_sandbox_name)
+ if l_repo_path is None :
+ print " File [ %s ] not found in Repo..\t Check your repo and retry "%l_filename
+ return errorcode.ERROR_CMVC_FILE_COPY
+ else:
+ for files in errorcode.FILE_LOOKUP_LIST.split(","):
+ if l_filename in files:
+ break
+
+ # Append this src path and to the sanbox base and copy the file
+ sb_cp_path = i_sandboxname + '/' + files
+
+ # Check if this path exist in the sandbox , if not create and copy
+ if os.path.exists(sb_cp_path) == False:
+ cmd='mkdir -p ' + os.path.dirname(sb_cp_path)
+ #print " Creating dir %s "%cmd
+ os.system(cmd)
+
+ # Copy the file
+ copy_cmd = 'cp -rf ' + l_repo_path + ' ' + sb_cp_path
+ rc = os.system(copy_cmd)
+ if rc:
+ print " RC code :",rc
+ print " ERROR : Copying file : ",copy_cmd
+ return errorcode.ERROR_CMVC_FILE_COPY
+ else:
+ print " Copied file : ",copy_cmd
+
+ return errorcode.SUCCESS_DEV_EXIT
+
+
+##########################################################################
+# Function :utilppeSbENV
+#
+# @param i_env_key : ENV paramter
+#
+# @brief find the PPE Repo path from ENV and returns the path string
+#
+##########################################################################
+def utilppeSbENV(i_env_key):
+ #-----------------------------------------------------------
+ # SBEROOT=/gsa/ausgsa/projects/i/indiateam04/gkeishin/PPE_CHANGES
+ #-----------------------------------------------------------
+ l_found_ppe_conf = False
+ env_ppe_path="None"
+ for key in os.environ.keys():
+ if i_env_key in key:
+ #print "\t %s : %s" % (key,os.environ[key])
+ env_ppe_path = os.environ[key]
+ l_found_ppe_conf = True
+
+ if l_found_ppe_conf == False:
+ print " ---------------------------------------------------------"
+ print " | [ ERROR SETTING ] : The PPE Repository ENV is not set |"
+ print " | Please do ./sb workon to set/load the PPE repo ENV |"
+ print " ---------------------------------------------------------"
+
+ return env_ppe_path
+
+##########################################################################
+# Function : utilFind_sb_base
+#
+# @param i_sb_name : Sandbox name
+#
+# @brief find the sandbox base path
+#
+##########################################################################
+def utilFind_sb_base(i_sb_name):
+ out_str= "None"
+
+ # workon -m ppc sbeisb -c 'env | grep SANDBOXBASE'
+ find_sb_base = 'workon -m ppc ' + i_sb_name + " -c 'env | grep SANDBOXBASE ' | grep SANDBOXBASE"
+
+ # SANDBOXBASE=/gsa/ausgsa/projects/i/indiateam04/gkeishin/sbeisb
+ out_str = os.popen(find_sb_base).read()
+
+ if not out_str:
+ return "None"
+ else:
+ return out_str.strip('SANDBOXBASE=')
+
+##########################################################################
+# Function : utilFind_sb_rc
+#
+# @param i_sb_name : Sandbox RC path
+#
+# @brief find the sandbox RC path
+#
+##########################################################################
+def utilFind_sb_rc(i_sb_name):
+ out_str= "None"
+
+ # workon -m ppc sbeisb -c 'env | grep SANDBOXRC'
+ find_sb_rc = 'workon -m ppc ' + i_sb_name + " -c 'env | grep SANDBOXRC ' | grep SANDBOXRC"
+ # SANDBOXRC=/gsa/ausgsa/projects/i/indiateam04/gkeishin/.sandboxrc
+ out_str = os.popen(find_sb_rc).read()
+
+ if not out_str:
+ return "None"
+ else:
+ return os.path.dirname(out_str.strip('SANDBOXRC='))
+
+##########################################################################
+# Function : utilFind_ENV_string
+#
+# @param i_env_name : ENV string
+#
+# @brief find the ENV string set in the env
+#
+##########################################################################
+def utilFind_ENV_string(i_env_name):
+ out_str= "None"
+
+ find_env_name = "env | grep " + i_env_name
+
+ out_str = os.popen(find_env_name).read()
+
+ if not out_str:
+ return "None"
+ else:
+ # Strip string
+ l_strip=i_env_name +"="
+ return out_str.strip(l_strip)
+
+##########################################################################
+# Function : utilWriteShell_hooks
+#
+# @param i_sandbox_path : Sandbox full path
+#
+# @brief find the ENV string set in the env
+#
+##########################################################################
+def utilWriteShell_hooks(i_sandbox_path):
+ # Write the compile shell hook on the fips sandbox location
+ hook_file=i_sandbox_path + '/src/compilesb'
+ f = open(hook_file,'w')
+
+ # compilesb: This hook schell script will look like this
+ #
+ # #!/bin/sh
+ # cd /gsa/ausgsa/projects/i/indiateam04/gkeishin/sbeisb/src/sbei/sbfw/
+ # mk -a
+ # mk install_all
+
+ f.write('#!/bin/sh \n')
+ cd_cmd= 'cd ' + i_sandbox_path + '/src/sbei/sbfw/'
+ f.write(cd_cmd)
+ f.write('\n')
+ f.write('mk -a || exit -1 \n')
+ f.write('mk install_all || exit -1\n')
+ f.close()
+
+ # Change the file permission for execute
+ perm_st = os.stat(hook_file)
+ os.chmod(hook_file, perm_st.st_mode | stat.S_IEXEC)
+
+ return hook_file # path of the shell file
+
diff --git a/src/build/tools/sbeGitTool.pl b/src/build/tools/sbeGitTool.pl
new file mode 100755
index 00000000..4bb70348
--- /dev/null
+++ b/src/build/tools/sbeGitTool.pl
@@ -0,0 +1,332 @@
+#! /usr/bin/perl
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/sbeGitTool.pl $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+
+use strict;
+#use lib "$ENV{'PERLMODULES'}";
+use lib "sbe/build/tools/perl.modules";
+use Getopt::Long qw(:config pass_through);
+use Data::Dumper;
+use Cwd;
+use gitUtil;
+
+# Global variables & constants
+my $debug = 0;
+my $help = 0;
+my %globals = ();
+my @references = ();
+my @patchList = ();
+my @commits = ();
+my $patches = "";
+my $sbbase = "";
+my $sbname = "";
+my $sbrc = "";
+my $release = "";
+
+my %commands = ( "extract" => \&execute_extract,
+ "get-commits" => \&execute_get_commits);
+
+# Handle input command
+GetOptions("debug!" => \$debug,
+ "help" => \$help);
+
+if ($help)
+{
+ execute_help();
+}
+else
+{
+ my $command = shift @ARGV;
+ if ($commands{$command})
+ {
+ &{$commands{$command}}();
+ }
+ else
+ {
+ execute_help();
+ }
+}
+
+foreach my $arg (@ARGV)
+{
+ print "Unprocessed arg: $arg\n" if $debug;
+}
+
+############################## Begin Actions ##################################
+
+sub execute_help
+{
+ my $command = shift @ARGV;
+
+ if ($command eq "")
+ {
+ print "sbeGitTool\n";
+ print " Run FSP-CI from a set of git commits\n";
+ print "\n";
+ print " Syntax:\n";
+ print " sbeGitTool <tool> [options]\n";
+ print "\n";
+ print " Available subtools:\n";
+ foreach my $key (sort keys %commands)
+ {
+ print " $key\n";
+ }
+ print "\n";
+ print " Global options:\n";
+ print " --debug Enable debug mode.\n";
+ print " --help Display help on a specific tool.\n";
+ print "\n";
+ print " Note: Generally a <commit> can be any git or gerrit\n";
+ print " reference. A git commit number, tag, branch, or\n";
+ print " a gerrit change-id are all valid.\n";
+ }
+ elsif (not defined $commands{$command})
+ {
+ die "Unknown subcommand: $command.\n";
+ }
+ else
+ {
+ my %help = (
+ "extract" =>
+q(
+ Create the SBE binaries with the patch and its CMVC/GIT dependencies.
+
+ Options:
+ --patches=<changeId:patch-set> CSV of changeId:patch-set's [required].
+ --sbbase=<full-path-to-sb-base> Sandbox base for FSP-CI [required].
+ --sbname=<name> Sandbox name [required].
+ --rc=<rc file name> RC file for the sandbox with absolute path [optional].
+),
+ "get-commits" =>
+q(
+ Given a patch, find the corresponding commit id.
+
+ Options:
+ --patches=<changeId:patch-set> CSV of changeId:patch-set's [required].
+ --sbbase=<full-path-to-sb-base> Sandbox base for FSP-CI [required].
+ --sbname=<name> Sandbox name [required].
+)
+ );
+
+ print "sbeGitTool $command:";
+ print $help{$command};
+ }
+}
+
+sub execute_get_commits
+{
+
+ # Set GIT environment
+ git_environ_init();
+
+ # Obtain the list of patches
+ retrivePatchList();
+
+ # Fetch the commits for the patches
+ fetchCommits();
+
+ # Prepare commit string
+ my $commitStr = prepareCommitStr();
+
+ print "The set of commits: $commitStr" if $debug;
+ print $commitStr;
+ return $commitStr;
+}
+
+sub execute_extract
+{
+ # Set GIT environment
+ git_environ_init();
+
+ # Obtain the list of patches
+ retrivePatchList();
+
+ # Fetch the references for the patches
+ fetchRefs();
+
+ # Apply the patches on the GIT repo
+ applyRefs();
+
+ # Compile the SBE and copy binaries to sandbox
+ compileAndCopy();
+}
+
+sub git_environ_init
+{
+ # Handle the i/p to the function
+ GetOptions("patches:s" => \$patches,
+ "sbbase:s" => \$sbbase,
+ "sbname:s" => \$sbname,
+ "rc:s" => \$sbrc);
+
+ die "Missing patch list" if ($patches eq "");
+ die "Missing sandbox base path" if ($sbbase eq "");
+ die "Missing sandbox name" if ($sbname eq "");
+
+ # Set global variables
+ $globals{sandbox} = $sbbase."/".$sbname;
+ $globals{sbe_git_root} = $globals{sandbox}."/git-ci";
+
+ print "Sandbox: $globals{sandbox}\n" if $debug;
+ print "GIT repository path: $globals{sbe_git_root}\n" if $debug;
+
+ chdir($globals{sbe_git_root});
+ die "ERROR $?: Invalid GIT repository path in the sandbox" if $? ;
+}
+
+sub retrivePatchList
+{
+ # Parse out the CSV patch list
+ @patchList = split(/,+/, $patches);
+
+ print ">>>Patches\n" if $debug;
+ print Dumper @patchList if $debug;
+ print "<<<End of Patches\n" if $debug;
+}
+
+sub fetchRefs
+{
+ my $currentRef = "";
+ my $validPatchCount = 0;
+
+ foreach my $patch (@patchList)
+ {
+ my ($changeId,$patchSet) = split(":",$patch);
+ if (gitUtil::gerritIsPatch($changeId))
+ {
+ $validPatchCount = $validPatchCount + 1;
+ print "Fetching reference for the patch : $patch \n" if $debug;
+ if (gitUtil::patchMergeStatus($changeId) == 0)
+ {
+ my $currentRef = gitUtil::gerritQueryReference($changeId, $patchSet);
+ push @references, $currentRef;
+ print "(patchset -> reference) = $patch -> $currentRef\n" if $debug;
+ }
+ }
+ else
+ {
+ print "\n Warning : Patchset $patch is invalid.. Continuing to check if there is any other valid patch \n";
+ }
+ }
+ die "ERROR: No valid patches given..\n" if ($validPatchCount == 0);
+}
+
+sub applyRefs
+{
+ my $statusFile = $globals{sbe_git_root}."/patchApply.status";
+
+ foreach my $ref (@references)
+ {
+ print "Cherrypicking reference $ref \n" if $debug;
+ open SBWORKON, " | ./sb workon";
+ print SBWORKON "git fetch gerrit $ref && echo \"Fetch Done \" > $statusFile \n";
+ #print SBWORKON "git cherry-pick FETCH_HEAD && echo \"Cherry-pick Done \" >> $statusFile \n"; // will be reused once appropriate support is there
+ print SBWORKON "git checkout FETCH_HEAD && echo \"Checkout Done \" >> $statusFile \n";
+ print SBWORKON "exit \n";
+ close SBWORKON;
+
+ print "\nChecking cherrypick status for $ref...\n" if $debug;
+ my $ch_status = `cat $statusFile`;
+ if( ($ch_status =~ m/Fetch/) && ($ch_status =~ m/Checkout/))
+ {
+ print "Checkout successful\n";
+ }
+ else
+ {
+ die "ERROR: Checkout of $ref failed\n";
+ }
+ }
+}
+
+sub compileAndCopy
+{
+ my $statusFile = $globals{sbe_git_root}."/compile.status";
+ my $compile_path = $globals{sbe_git_root};
+
+ print "Compiling and copying the generated binaries to sandbox\n" if $debug;
+ open SBWORKON, " | ./sb workon";
+ print SBWORKON "cd $compile_path \n";
+ print SBWORKON "make install && echo \"Compile Passed\" > $statusFile \n";
+ print SBWORKON "export SANDBOXBASE=$globals{sandbox} \n";
+ print SBWORKON "cd $globals{sbe_git_root} \n";
+ print SBWORKON "./sb prime --sb $sbname --no_build && echo \"Prime Passed\" >> $statusFile \n";
+ print SBWORKON "exit \n";
+ close SBWORKON;
+
+ print "\nChecking compile status...\n" if $debug;
+ my $compile_status = `cat $statusFile | grep "Compile"`;
+ if ($compile_status =~ m/Compile/)
+ {
+ print "SBE compile successful\n";
+ }
+ else
+ {
+ die "ERROR: SBE compile failed\n";
+ }
+
+ print "\nChecking sandbox status...\n" if $debug;
+ my $sb_status = `cat $statusFile | grep "Prime"`;
+ if ($sb_status =~ m/Prime/)
+ {
+ print "SBE prime successful\n";
+ }
+ else
+ {
+ die "ERROR: SBE prime failed\n";
+ }
+}
+
+sub fetchCommits
+{
+ my $currentCommit = "";
+
+ foreach my $patch (@patchList)
+ {
+ my ($changeId,$patchSet) = split(":",$patch);
+ if (gitUtil::gerritIsPatch($changeId))
+ {
+ print "Fetching commit for the patch : $patch \n" if $debug;
+ my $currentCommit = gitUtil::gerritQueryCommit($changeId, $patchSet);
+ push @commits, $currentCommit;
+ print "(patchset -> commit) = $patch -> $currentCommit\n" if $debug;
+ }
+ else
+ {
+ print "\n Warning : Patchset $patch is invalid.. Continuing to check if there is any other valid patch \n";
+ }
+ }
+ die "ERROR: No valid patches given..\n" if (scalar @commits == 0);
+}
+
+sub prepareCommitStr
+{
+ my $commitStr = "";
+
+ foreach my $commit (@commits)
+ {
+ $commitStr = $commitStr.",".$commit;
+ }
+ $commitStr =~ s/^,//g;
+ return $commitStr;
+}
diff --git a/src/build/tools/sbePatchUtility.py b/src/build/tools/sbePatchUtility.py
new file mode 100644
index 00000000..b5103610
--- /dev/null
+++ b/src/build/tools/sbePatchUtility.py
@@ -0,0 +1,179 @@
+#!/usr/bin/python
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/sbePatchUtility.py $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+'''
+###########################################################
+# @file sbeCmvcUtility.py
+# @author: George Keishing <gkeishin@in.ibm.com>
+# @brief Utilility Module to Patching files for SBE simics
+#
+# Created on March 23, 2016
+# ----------------------------------------------------
+# @version Developer Date Description
+# ----------------------------------------------------
+# 1.0 gkeishin 23/03/16 Initial create
+###########################################################
+'''
+
+#-------------------------
+# Imports
+#-------------------------
+import os, sys
+import time
+import os.path
+import subprocess
+import shutil
+import hashlib
+from subprocess import Popen, PIPE
+from os.path import expanduser # for getting $HOME PATH
+import stat # for File permission op
+
+# Libraries/utility funcs and user define const
+import sbeCmvcConstants as errorcode
+import sbeCmvcUtility as utilcode
+
+##########################################################################
+# Function : utilPatchSimics
+#
+# @param i_sandbox_path : Sandbox full path
+#
+# @param i_sandbox_root : Sandbox RC root path
+#
+# @brief Patch pre-req patches for simics
+#
+##########################################################################
+def utilPatchSimics(i_sandbox_path, i_sandbox_root):
+ print "\n ... Patching simics files "
+
+ sb_name=os.path.basename(i_sandbox_path)
+
+ # Write the hooks for sim setup
+ l_sim_file = utilShell_hooks(i_sandbox_path)
+
+ if not l_sim_file:
+ return errorcode.ERROR_HOOKING_FILE
+
+ l_sim_cmd = "workon -m ppc " + sb_name + " -c " + l_sim_file + " -rc " + i_sandbox_root +"/sbesandboxrc"
+ print " ", l_sim_cmd
+
+ os.system(l_sim_cmd)
+
+ # Copy action files. As we are taking actions files from ppe, copy them here
+ # so that any workaround necessary can be applied over them in pre-simsetup path
+ # mkdir -p $SANDBOXBASE/src/simu/data/cec-chip
+ # cp $SBEROOT/import/chips/p9/sw_simulation/* $SANDBOXBASE/src/simu/data/cec-chip || exit -1
+
+ print " [ Copying action files to fips Sandbox ]"
+ # Ge the Sandbox base
+ sandbox_base = utilcode.utilFind_ENV_string("SANDBOXBASE").rstrip('\n')
+ sandbox_path = sandbox_base + "/src/simu/data/cec-chip"
+ cmd = "mkdir -p " + sandbox_path
+ print " * Executing : ",cmd
+ os.system(cmd)
+
+ # Ge the ppe root
+ ppe_base = utilcode.utilFind_ENV_string("SBEROOT").rstrip('\n')
+ ppe_path = ppe_base + "/import/chips/p9/sw_simulation/"
+ p_cmd = "cp -f " + ppe_path + "* " + sandbox_path
+ print " * Executing : ",p_cmd
+ rc = os.system(p_cmd)
+ if rc:
+ print " ERROR rc :",rc
+ return rc
+
+ return errorcode.SUCCESS_EXIT
+
+##########################################################################
+# Function : utilExecuteShell
+#
+# @param i_ppe_root : Root folder for PPE. This script must be
+# from PPE repo.
+#
+# @param i_sandbox_path : fips Sandbox path
+#
+# @param i_shell_file : User defined shell script name
+#
+# @brief Apply the simics patches pre define in shell script.
+#
+##########################################################################
+def utilExecuteShell(i_ppe_root, i_sandbox_path, i_shell_file):
+ print "\n ... Executing shell : ",i_shell_file
+
+ # Sanbox name
+ if i_sandbox_path != "None":
+ sb_name=os.path.basename(i_sandbox_path)
+
+ # Find the file and execute
+ l_path_name = i_ppe_root + '/src/build/'
+ l_shell_path=utilcode.utilFindFile(i_shell_file, l_path_name)
+ print " [ %s ]"%l_shell_path
+
+ if i_sandbox_path != "None":
+ # Load the shell onto the Sandbox env and execute
+ l_shell_exec = "workon -m ppc " + sb_name + " -c " + l_shell_path + " -rc " + i_sandbox_path.replace(sb_name,"") +"/sbesandboxrc"
+ else:
+ # Execute a stand alone script
+ l_shell_exec = l_shell_path
+
+ #rc = subprocess.call([l_shell_exec])
+ rc = os.system(l_shell_exec)
+ if rc :
+ return rc
+
+ return errorcode.SUCCESS_EXIT
+
+##########################################################################
+# Function : utilShell_hooks
+#
+# @param i_sandbox_path : Sandbox full path
+#
+# @brief find the ENV string set in the env
+#
+##########################################################################
+def utilShell_hooks(i_sandbox_path):
+ # Find the simics machine from ENV
+ l_machine = os.environ['MACHINE'].rstrip('\n')
+ print " Machine : ",l_machine
+ l_cmd_exec = 'start_simics -no_start -machine ' + l_machine + ' -batch_mode '
+
+ # Write the compile shell hook on the fips sandbox location
+ hook_file=i_sandbox_path + '/src/simsb'
+ f = open(hook_file,'w')
+
+ # simsb: This hook schell script will look like this
+ #
+ # #!/bin/sh
+ # start_simics -no_start -machine NIMBUS -batch_mode
+
+ f.write('#!/bin/sh \n')
+ f.write('\n')
+ f.write(l_cmd_exec)
+ f.close()
+
+ # Change the file permission for execute
+ perm_st = os.stat(hook_file)
+ os.chmod(hook_file, perm_st.st_mode | stat.S_IEXEC)
+
+ return hook_file # path of the shell file
+
diff --git a/src/build/tools/sbePrime.py b/src/build/tools/sbePrime.py
new file mode 100755
index 00000000..479ee051
--- /dev/null
+++ b/src/build/tools/sbePrime.py
@@ -0,0 +1,296 @@
+#!/usr/bin/python
+# IBM_PROLOG_BEGIN_TAG
+# This is an automatically generated prolog.
+#
+# $Source: src/build/tools/sbePrime.py $
+#
+# OpenPOWER sbe Project
+#
+# Contributors Listed Below - COPYRIGHT 2016
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+#
+# IBM_PROLOG_END_TAG
+'''
+###########################################################
+# @file sbePrime.py
+# @author: George Keishing <gkeishin@in.ibm.com>
+# @brief Main Module to support developer compilation
+# and patching.
+#
+# Created on March 03, 2016
+# ----------------------------------------------------
+# @version Developer Date Description
+# ----------------------------------------------------
+# 1.0 gkeishin 022/03/16 Initial create
+###########################################################
+'''
+
+#-------------------------
+# Imports
+#-------------------------
+import getopt
+import os, sys, glob
+import shutil
+import stat # for File permission op
+
+# Libraries/utility funcs and user define const
+import sbeCmvcConstants as errorcode
+import sbeCmvcUtility as utilcode
+import sbePatchUtility as utilpatch
+
+#-------------------------
+# Main Function
+#-------------------------
+def main():
+
+ #------------------------------------------
+ # Usage tool option
+ #------------------------------------------
+ def usage():
+ print " \n"
+ print " :: Command line USAGE options for Copying SBE FW files for compilation :: \n"
+ print " sbeDistribute.py -s <Sandbox Name> -i <file1,file2...>"
+
+ print " \n"
+ print " +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ print " | By default NO argument is needed as an input . |"
+ print " +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
+ print " \n ***** Options Supported *****"
+ print " \t -p,--patch = [ Optional ] Patch Simics related files on Sandbox "
+ print " \t -s,--sb = [ Optional ] Sandbox base name"
+ print " \t By default it picks up the ppe Repo test_sb sandbox"
+ print " \t but if you have created a sandbox of your own manually "
+ print " \t and you want to compile, use this option then only"
+ print " \t -i,--files = [ Optional ] Firmware Files coma ',' separated input file1,file2"
+ print " \t Only the pre-define listed files bellow:"
+ print " \t sbe_sp_intf.H,simics.tar,sbe_pibmem.bin,sbe_seeprom.bin"
+ print " \t -r,--rc_file = [ Optional ] The RC file for the sandbox (with absolute path)"
+ print " \t -n,--no_build = [ Optional ] Flag to determine if sbei component should be compiled"
+ print " \t -h,--help = Help"
+ print " ------------------------------------------------------------------------------------"
+
+ #------------------------------------------
+ # Exit from this Main
+ #------------------------------------------
+ def exit_main(rc):
+ if rc == errorcode.HELP_EXIT:
+ print " [ HELP DOCUMENTATION ]\n"
+ sys.exit(0)
+
+ if rc == errorcode.ERROR_BUILD_FAILED:
+ print " Compilation Failed .. Error "
+
+ if rc == errorcode.SUCCESS_DEV_EXIT:
+ print " [ Files Copy done! ]\n"
+ sys.exit(0)
+
+ if rc:
+ print "\n [ ERROR - MAIN ] Exiting with error code = ", rc
+ sys.exit(rc)
+ else:
+ print "\n Fips Sandbox compilation and simics patching completed [ OK ] "
+ sys.exit(0)
+
+ #------------------------------------------
+ # Local var place name holder's
+ #------------------------------------------
+ sim_patch = "None"
+ sandbox_name = "None"
+ path_name = "None" # PPE Repo
+ file_name = "None"
+ rc_file = "None"
+ build = "1"
+
+ #----------------------------
+ # Read command line args
+ #----------------------------
+ opts, args = getopt.getopt(sys.argv[1:],"p:s:i:h:r:n",['patch=', 'sb=', 'files=', 'help', 'rc_file=', 'no_build'])
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage()
+ exit_main(errorcode.HELP_EXIT)
+ elif opt in ('-p', '--patch'):
+ sim_patch = arg
+ elif opt in ('-s', '--sb'):
+ sandbox_name = arg
+ elif opt in ('-i', '--files'):
+ file_name = arg
+ elif opt in ('-r', '--rc_file'):
+ rc_file = arg
+ elif opt in ('--no_build'):
+ build = "0"
+ else:
+ usage()
+ exit_main(errorcode.ERROR_EXIT)
+
+ #------------------------------------------------------
+ # Make sure that it has passed atleast one arg with it
+ #------------------------------------------------------
+ if len(sys.argv)<1:
+ usage()
+ exit_main(errorcode.ERROR_SYS_EXIT)
+
+ #---------------------------------------------
+ # Callling the Func defs in order
+ #---------------------------------------------
+
+ #------------------------------
+ # 1) User input params/ Check ENV
+ #------------------------------
+ print "\n [ Checking SBE user ENV Pre-req ] "
+ # Get it from ENV
+ if path_name == "None":
+ # Get the PPE path
+ l_ppe_path = utilcode.utilppeSbENV("SBEROOT")
+ if l_ppe_path == "None":
+ print "\n Couldn't find PPE repo info from ENV currently set... "
+ print " [ ERROR ] PPE Repo ENV Setting Path : %s " % l_ppe_path
+ exit_main(errorcode.ERROR_SETTING)
+ else:
+ print " PPE Repo path Setting\t : %s "% l_ppe_path
+ path_name = l_ppe_path
+
+ #-----------------------------------
+ # 2) Get the Sanbox and repo paths
+ #-----------------------------------
+ # Get the base path of the fips sandbox
+ if sandbox_name == "None":
+ # Find the sanbox name and base from ENV
+ # User must have done workon fips sandbox to work
+ sandbox_path = utilcode.utilFind_ENV_string("SANDBOXBASE").rstrip('\n')
+ else:
+ sandbox_path = utilcode.utilFind_sb_base(sandbox_name).rstrip('\n')
+ print " Fips Sandbox path\t : ",sandbox_path
+
+ #-----------------------------------
+ # 3) Get the Sanbox root path
+ #-----------------------------------
+ if sandbox_name == "None":
+ sandbox_root = utilcode.utilFind_ENV_string("SANDBOXROOT").rstrip('\n')
+ else:
+# sandbox_root = utilcode.utilFind_ENV_string("SANDBOXRC").rstrip('\n')
+ sandbox_root = utilcode.utilFind_sb_rc(sandbox_name).rstrip('\n')
+
+ if sandbox_root == "None":
+ print " ** [ ERROR ] Something Fishy about the ENV set -OR- Option used.. Please check manually ** "
+ usage()
+ exit_main(errorcode.ERROR_SETTING)
+ else:
+ print " Sandbox root path\t : ",sandbox_root
+
+ #---------------------------------------------
+ # sim setup if user initiates
+ #---------------------------------------------
+ if sim_patch != "None":
+ #---------------------------------------------
+ # Create sandbox for simics
+ #---------------------------------------------
+ rc_sb = utilpatch.utilExecuteShell(path_name,"None","sandbox-create")
+ if rc_sb == errorcode.SUCCESS_EXIT:
+ print " Sandbox Created.. [ OK ] \n"
+ else:
+ print " Sandbox Create.. [ ERROR ]",rc_sb
+ exit_main(rc_sb)
+
+ #----------------------------------------
+ # Patch up the simics patches files
+ #----------------------------------------
+
+ print "\n *** Update Simics patches onto Sandbox *** \n "
+ # Pre sim setup
+ rc_shell = utilpatch.utilExecuteShell(path_name,sandbox_path,"workarounds.presimsetup")
+ if rc_shell == errorcode.SUCCESS_EXIT:
+ print " presimsetup [ OK ]\n"
+ else:
+ print " presimsetup [ ERROR ] : ",rc_shell
+ exit_main(rc_shell)
+
+ # Patch the simics files
+ rc_sim = utilpatch.utilPatchSimics(sandbox_path,sandbox_root)
+ if rc_sim != errorcode.SUCCESS_EXIT:
+ exit_main(rc_sim)
+ else:
+ print " Patch the simics files on Sandbox [ OK ] \n"
+
+ # Post sim setup
+ rc_shell = utilpatch.utilExecuteShell(path_name,sandbox_path,"workarounds.postsimsetup")
+ if rc_shell == errorcode.SUCCESS_EXIT:
+ print " postsimsetup [ OK ]\n"
+ # Clean exit Get out from here
+ exit_main(errorcode.SUCCESS_EXIT)
+ else:
+ print " postsimsetup [ ERROR ] : ",rc_shell
+ exit_main(rc_shell)
+
+
+ #----------------------------------------
+ # 4) Copy the files from repo to sandbox
+ #----------------------------------------
+ # Find the files and copy to the sanbox dir
+ # Just take a quick check if the Sandbox exist or not
+ if sandbox_path != "None":
+ if os.path.isdir(sandbox_path) == True:
+ rc_copy = utilcode.utilCopyFileToSandbox(path_name,sandbox_path,file_name)
+ if rc_copy == errorcode.SUCCESS_DEV_EXIT:
+ print " Files Copied to Fips Sandbox : [ OK ]"
+ else:
+ exit_main(rc_copy)
+ else:
+ print " Sandbox : %s [ Either doesn't exist or do workon to fips sb to load the ENV.. ]" % os.path.basename(sandbox_path)
+ print " - OR - "
+ print " [ Optional ] You can specify your sandbox name as input as well"
+ print " -s <fips_sandbox Name >"
+ usage()
+ exit_main(errorcode.ERROR_SANDBOX_EXIST)
+ else:
+ print " Please Check your fips Sandbox and retry"
+ exit_main(errorcode.ERROR_SANDBOX_EXIST)
+
+ sb_name=os.path.basename(sandbox_path)
+ print "\n Sandbox :",sb_name
+
+ if build == "1":
+ #----------------------------------------
+ # 5) Wite the hook file into shell file
+ #----------------------------------------
+ # Write the compile shell hook on the fips sandbox location
+ hook_file = utilcode.utilWriteShell_hooks(sandbox_path)
+
+ #----------------------------------------
+ # 6) Compile the code
+ #----------------------------------------
+ # Use the hook script to compile the code
+ if sandbox_name == "None":
+ compile_cmd="workon -m ppc " + sb_name + " -c " + hook_file + " -rc " + sandbox_root +"/sbesandboxrc"
+ else:
+ if rc_file == "None":
+ compile_cmd="workon -m ppc " + sb_name + " -c " + hook_file + " -rc " + sandbox_root +"/.sandboxrc"
+ else:
+ print " getting rc file from user \n"
+ compile_cmd="workon -m ppc " + sb_name + " -c " + hook_file + " -rc " + rc_file
+ print "\n [ COMPILE ] Executing :%s \n"%compile_cmd
+ rc = os.system(compile_cmd)
+
+ print " Compilation returned rc :",rc
+ if rc != 0:
+ exit_main(errorcode.ERROR_BUILD_FAILED)
+
+ # Clean exit
+ exit_main(errorcode.SUCCESS_EXIT)
+
+
+if __name__=="__main__":
+ main()
+
OpenPOWER on IntegriCloud