diff --git a/tools/plotting/fastnnlo_absolute.py b/tools/plotting/fastnnlo_absolute.py index f1040d8de7f49532dfaf104580707fc34f943eea..c6926ef7a7df1b2a5aa1daa582c212d53d7b443e 100755 --- a/tools/plotting/fastnnlo_absolute.py +++ b/tools/plotting/fastnnlo_absolute.py @@ -81,7 +81,6 @@ import numpy as np #from fastnlo import fastNLOLHAPDF #from fastnlo import SetGlobalVerbosity - # Style settings # Location of matplotlibrc # print mpl.matplotlib_fname() @@ -102,6 +101,7 @@ params = {'font.size': 16, 'mathtext.fontset': "stix", 'axes.labelsize': 'x-large', 'axes.titlesize': 'x-large', + #'axes.linewidth': 2, #increase default linewidth 'xtick.labelsize': 'x-large', 'ytick.labelsize': 'x-large', 'lines.linewidth': 2, @@ -295,26 +295,9 @@ xb = np.arange(1, nobs+1.e-6) #print 'Using ', ntab, 'table files.' #xs_fnla = np.array(xs_fnla) -# Evaluate cross sections from pre-evaluated fastNLO tables -#for file in [logfile]: -# if verb: print('[fastnnlo_absolute]: Reading from fastNLO logfile: {:s}'.format(file)) -# # Skip all lines starting with "#", "C", or "L" as first non-whitespace character -# with open(file, 'r') as f: -# data = re.sub(r'\s*[#CLN].*', '', f.read()) -# all_contrib = np.genfromtxt(StringIO(data), usecols=(ordcol,)) -# # Calculate number of scale variations (default nscl=1, see above) -# nscl = len(all_contrib)/nobs -# print "nscl", nscl -# # Read the nscl*nobs values into nscl arrays of nobs entries -# for i in range(nscl): # consider all scale variations given in logfile -# a = [] -# for j in range(nobs): -# ind = i*nobs+j -# a.append(all_contrib[ind]) -# xs_fnll.append(a) - # Evaluate cross sections from pre-evaluated fastNLO tables scalename = r'$\bf \mu$\_GeV' +xsunit = 12 for file in [logfile]: if verb: print('[fastnnlo_absolute]: Reading from fastNLO logfile: {:s}'.format(file)) # Skip all lines starting with "#", "C", or "L" as first non-whitespace character @@ -331,8 +314,13 @@ for file in [logfile]: murvar = [] mufvar = [] for line in lines: + # Find header line(s) matching 'Ipublunits' + if re.search(r'# Publ. x section', line): + # Extract x section publication units from this line + xsunit = int(re.sub('# Publ. x section \(10\^-Ipublunits b\)\s+','',line)) + if verb: print('[fastnnlo_scaleclosure]: Found x section unit: {}'.format(xsunit)) # Find header line(s) matching '#IObs' - if re.search(r'#IObs', line): + elif re.search(r'#IObs', line): nvar = nvar+1 # Extract scale name from within <> in this line scl = re.search('<(.*)>',line) @@ -376,9 +364,11 @@ for file in [logfile]: a.append(all_contrib[ind]) xs_fnll.append(a) +# Conversion of NNLOJET fb to publication units +fbtoxsunit = pow(10,xsunit-15) for i in range(nscl): - xs_nnlo.append(xs_all[:, 2*i+3]/1000) # Conversion of fb to pb - dxs_nnlo.append(xs_all[:, 2*i+4]/1000) + xs_nnlo.append(xs_all[:, 2*i+3]*fbtoxsunit) + dxs_nnlo.append(xs_all[:, 2*i+4]*fbtoxsunit) xs_fnll = np.array(xs_fnll) diff --git a/tools/plotting/fastnnlo_kfaccomp.py b/tools/plotting/fastnnlo_kfaccomp.py index a0bfff82eb856422b5ec4f9fda4be0f1cb6d3f55..003d1ec044c2f0c0dda4ac7ff20c62fcb0bd7d99 100755 --- a/tools/plotting/fastnnlo_kfaccomp.py +++ b/tools/plotting/fastnnlo_kfaccomp.py @@ -284,22 +284,14 @@ for order in datfiles.keys(): xl = xs_all[:, 0] # Binning must be identical for all orders xm = xs_all[:, 1] xu = xs_all[:, 2] - xs_lo = xs_all[:, 3]/1000. # Conversion of fb to pb - dxs_lo = xs_all[:, 4]/1000. + xs_lo = xs_all[:, 3] + dxs_lo = xs_all[:, 4] elif order == 'NLO': - xs_nlo = xs_all[:, 3]/1000. - dxs_nlo = xs_all[:, 4]/1000. + xs_nlo = xs_all[:, 3] + dxs_nlo = xs_all[:, 4] else: - xs_nnlo = xs_all[:, 3]/1000. - dxs_nnlo = xs_all[:, 4]/1000. - -xs_lo = np.array(xs_lo) -dxs_lo = np.array(dxs_lo) -xs_nlo = np.array(xs_nlo) -dxs_nlo = np.array(dxs_nlo) -if max_order == 'NNLO': - xs_nnlo = np.array(xs_nnlo) - dxs_nnlo = np.array(dxs_nnlo) + xs_nnlo = xs_all[:, 3] + dxs_nnlo = xs_all[:, 4] # Determine no. of observable bins nobs = xl.size @@ -311,12 +303,23 @@ xsb = np.arange(1+dx, nobs+dx+1.e-6) # Read cross sections from pre-evaluated fastNLO tables ordcol = {'LO': 6, 'NLO': 7, 'NNLO': 8} +xsunit = 12 if logfile: # Skip all lines starting with "#", "C", or "L" as first non-whitespace character # why don't we start these lines (C, L...) also with a "#" ??? # Read first nobs values for central scale result print('[fastnnlo_kfaccomp]: Reading from fastNLO log file {:s}'.format(logfile)) with open(logfile, 'r') as f: + + # Read everything line-by-line until x section unit information + lines = f.readlines() + for line in lines: + # Find header line(s) matching 'Ipublunits' + if re.search(r'# Publ. x section', line): + # Extract x section publication units from this line + xsunit = int(re.sub('# Publ. x section \(10\^-Ipublunits b\)\s+','',line)) + if verb: print('[fastnnlo_kfaccomp]: Found x section unit: {}'.format(xsunit)) + data = re.sub(r'\s*[#CLN].*', '', f.read()) lo = np.genfromtxt(StringIO(data), usecols=ordcol['LO'],) nlo = np.genfromtxt(StringIO(data), usecols=ordcol['NLO'],) @@ -337,6 +340,16 @@ if logfile: if max_order == 'NNLO': xs_fnnlo = np.array(xs_fnnlo) +# Conversion of NNLOJET fb (15) to publication units +fbtoxsunit = pow(10,xsunit-15) +xs_lo = np.array(xs_lo)*fbtoxsunit +dxs_lo = np.array(dxs_lo)*fbtoxsunit +xs_nlo = np.array(xs_nlo)*fbtoxsunit +dxs_nlo = np.array(dxs_nlo)*fbtoxsunit +if max_order == 'NNLO': + xs_nnlo = np.array(xs_nnlo)*fbtoxsunit + dxs_nnlo = np.array(dxs_nnlo)*fbtoxsunit + # Successive k factors kn_nlo = np.divide(xs_nlo, xs_lo, out=np.ones_like(xs_nlo), where=xs_lo != 0) if logfile: kf_nlo = np.divide(xs_fnlo, xs_flo, out=np.ones_like(xs_fnlo), where=xs_flo!=0) diff --git a/tools/plotting/fastnnlo_multigridclosure.py b/tools/plotting/fastnnlo_multigridclosure.py index 6bac1fd37f53b81843c8b1c0f950f2b6f765dcae..26fd4cb976fb8eb3d8d3e28347fce691d80733dc 100755 --- a/tools/plotting/fastnnlo_multigridclosure.py +++ b/tools/plotting/fastnnlo_multigridclosure.py @@ -363,7 +363,8 @@ def main(): # Evaluate cross sections from pre-evaluated fastNLO tables # assume that corresponding logfiles are located in the same directory as the datfiles log_list = [] - + nlog = 0 + xsunit = 12 nd = 0 for dfile in datfiles: nd += 1 @@ -377,13 +378,21 @@ def main(): fnlologs.sort() if (xs_nnlo is not None): # Only one specific scale choice sclind is considered - xs_fnll, nlog = Read_logfile(fnlologs, sclind, seeds, nobs, verb) + xs_fnll, nlog, xsunit = Read_logfile(fnlologs, sclind, seeds, nobs, verb) elif (xs_nnlo_list is not None): # Have list with values for all sclind variations xs_fnll_list = [] for fscl in range(1, sclind+1, 1): - xs_fnll_item, nlog = Read_logfile(fnlologs, fscl, seeds, nobs, verb) + xs_fnll_item, nlog, xsunit = Read_logfile(fnlologs, fscl, seeds, nobs, verb) xs_fnll_list.append(xs_fnll_item) + # Conversion of NNLOJET fb (15) to publication units + fbtoxsunit = pow(10,xsunit-15) + if xs_nnlo: + xs_nnlo = xs_nnlo*fbtoxsunit + if xs_nnlo_list: + for i in range(len(xs_nnlo_list)): + xs_nnlo_list[i] = xs_nnlo_list[i]*fbtoxsunit + # Check on identical file numbers, either for table or log files, dat files, and weight lines if given if ndat == nlog == nlin and ndat*nlog*nlin != 0 and wgtfile: print('[fastnnlo_multigridclosure]: OK: Have equal number of dat files, fastNLO results, and weight lines.') @@ -506,12 +515,13 @@ def hist_stats(x, weights, axis=None): # Function to read bin bounds and xs_nnlo from datfiles ################################################################################ def Read_XS(datfiles, fscl, nfil, verb): # takes list of datfiles (different seeds) and fscl - # Prepare result arrays - xl = [] # left bin border - xu = [] # right bin border - ndat = 0 - xs_nnlo = [] # NNLOJET results - seeds = [] # Seed numbers for matching + # Prepare return values + xl = [] # left bin borders + xu = [] # right bin borders + ndat = 0 # no. of dat files + xs_nnlo = [] # NNLOJET results + nobs = 0 # no. of onservable bins + seeds = [] # seed numbers for matching print('[fastnnlo_multigridclosure]: Reading datfiles for fscl=%s' % fscl) ixscol = 3 + 2 * (fscl-1) @@ -533,7 +543,11 @@ def Read_XS(datfiles, fscl, nfil, verb): # takes list of datfiles (different se print('[fastnnlo_multigridclosure]: Using ', ndat, 'dat files.') xl = np.array(xl) xu = np.array(xu) - xs_nnlo = np.array(xs_nnlo)/1000. # Conversion of fb to pb + # Conversion of NNLOJET fb to publication units + # fbtoxsunit = pow(10,xsunit-15) + # xs_nnlo = np.array(xs_nnlo)*fbtoxsunit + # Only ensure right output as np array here; conversion is done later + xs_nnlo = np.array(xs_nnlo) # Determine no. of observable bins nobs = xl.size @@ -548,8 +562,11 @@ def Read_XS(datfiles, fscl, nfil, verb): # takes list of datfiles (different se ############################################################################### def Read_logfile(fnlologs, fscl, seeds, nobs, verb): # takes list of logfiles and fscl, # as well as list of seeds and nobs (number of observable bins) - nlog = 0 - xs_fnll = [] + + # Prepare return values + xs_fnll = [] # fastNLO results + nlog = 0 # no. of log files + xsunit = 12 # default fastNLO x section unit for fnlolog in fnlologs: if verb: print('[fastnnlo_multigridclosure]: fastNLO file no. ', nlog, ' is ', fnlolog) @@ -561,9 +578,19 @@ def Read_logfile(fnlologs, fscl, seeds, nobs, verb): # takes list of logfiles a if verb: print('[fastnnlo_multigridclosure]: NNLOJET and fastNLO result correctly matched. seed is ', seeds[nlog]) - xs_tmp = np.loadtxt(fnlolog, usecols=( - 6,), comments=['#', ' #', 'C', 'L', 'N']) - #print "xs_tmp \n", xs_tmp + # Determine x section unit from first file + if nlog == 0: + with open(fnlolog, 'r') as f: + # Read everything line-by-line + lines = f.readlines() + for line in lines: + # Find header line(s) matching 'Ipublunits' + if re.search(r'# Publ. x section', line): + # Extract x section publication units from this line + xsunit = int(re.sub('# Publ. x section \(10\^-Ipublunits b\)\s+','',line)) + if verb: print('[fastnnlo_multigridclosure]: Found x section unit: {}'.format(xsunit)) + + xs_tmp = np.loadtxt(fnlolog, usecols=(6,), comments=['#', ' #', 'C', 'L', 'N']) indi = (fscl-1)*nobs # skip lines of lower fscl indf = indi + nobs # last line in logfile which belongs to certain fscl @@ -574,7 +601,7 @@ def Read_logfile(fnlologs, fscl, seeds, nobs, verb): # takes list of logfiles a break print('[fastnnlo_multigridclosure]: Using ', nlog, 'pre-evaluated table files.') xs_fnll = np.array(xs_fnll) - return xs_fnll, nlog + return xs_fnll, nlog, xsunit ############################################################################### # End of function to read XS from logfile ############################################################################### diff --git a/tools/plotting/fastnnlo_scaleclosure.py b/tools/plotting/fastnnlo_scaleclosure.py index 0877f4e30beb681845e1e1e4f2b9e6c4957a910a..d274f5de0c1c7002b13ade3dced3b75f5fb4b595 100755 --- a/tools/plotting/fastnnlo_scaleclosure.py +++ b/tools/plotting/fastnnlo_scaleclosure.py @@ -16,6 +16,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +# TODO: Adapt to change of genfromtxt in Python 3? try: from StringIO import StringIO ## for Python 2 except ImportError: @@ -89,7 +90,13 @@ import numpy as np # plt.style.use('ggplot') # plt.style.use('presentation') -params = {'legend.fontsize': 'x-large', +# Optionally set font to Computer Modern to avoid common missing font errors +#mpl.rc('font', family='serif', serif='cm10') +#mpl.rc('text', usetex=True) +#mpl.rcParams['text.latex.preamble'] = [r'\boldmath'] + +params = {'font.size': 16, + 'legend.fontsize': 'x-large', 'figure.figsize': (16, 12), 'mathtext.fontset': "stix", 'axes.labelsize': 'x-large', @@ -114,6 +121,9 @@ class SplitArgs(argparse.Action): # Some global definitions _fntrans = str.maketrans({'[': '', ']': '', '(': '', ')': '', ',': ''}) # Filename translation table _formats = {'eps': 0, 'pdf': 1, 'png': 2, 'svg': 3} +#_order_color = {'LO': 'g', 'NLO': 'b', 'NNLO': 'r'} +_order_color = {'LO': 'olivedrab', 'NLO': 'dodgerblue', 'NNLO': 'orangered'} +_order_symbol = {'LO': ',', 'NLO': 's', 'NNLO': 'o'} # Define arguments & options parser = argparse.ArgumentParser(epilog='', formatter_class=argparse.ArgumentDefaultsHelpFormatter) @@ -301,6 +311,7 @@ xb = np.arange(1, nobs+1.e-6) # Evaluate cross sections from pre-evaluated fastNLO tables scalename = r'$\bf \mu$\_GeV' +xsunit = 12 for file in [logfile]: if verb: print('[fastnnlo_scaleclosure]: Reading from fastNLO logfile: {:s}'.format(file)) # Skip all lines starting with "#", "C", or "L" as first non-whitespace character @@ -317,8 +328,13 @@ for file in [logfile]: murvar = [] mufvar = [] for line in lines: + # Find header line(s) matching 'Ipublunits' + if re.search(r'# Publ. x section', line): + # Extract x section publication units from this line + xsunit = int(re.sub('# Publ. x section \(10\^-Ipublunits b\)\s+','',line)) + if verb: print('[fastnnlo_scaleclosure]: Found x section unit: {}'.format(xsunit)) # Find header line(s) matching '#IObs' - if re.search(r'#IObs', line): + elif re.search(r'#IObs', line): nvar = nvar+1 # Extract scale name from within <> in this line scl = re.search('<(.*)>',line) @@ -362,9 +378,11 @@ for file in [logfile]: a.append(all_contrib[ind]) xs_fnll.append(a) +# Conversion of NNLOJET fb to publication units +fbtoxsunit = pow(10,xsunit-15) for i in range(nscl): - xs_nnlo.append(xs_all[:, 2*i+3]/1000) # Conversion of fb to pb - dxs_nnlo.append(xs_all[:, 2*i+4]/1000) + xs_nnlo.append(xs_all[:, 2*i+3]*fbtoxsunit) + dxs_nnlo.append(xs_all[:, 2*i+4]*fbtoxsunit) xs_fnll = np.array(xs_fnll) diff --git a/tools/plotting/fastnnlo_scaleunc.py b/tools/plotting/fastnnlo_scaleunc.py index fd66d5f60f9e571c3d7a20762b5c31d960895d2f..fa98fed171473b09e125c4c66ce46f2c3cc2ae55 100755 --- a/tools/plotting/fastnnlo_scaleunc.py +++ b/tools/plotting/fastnnlo_scaleunc.py @@ -345,6 +345,8 @@ def main(): print('[fastnnlo_scaleunc]: No statistical uncertainties requested.') elif args['datfiles'][0] == '': print('[fastnnlo_scaleunc]: Automatic filename matching is used to load statistical uncertainties from NNLOJET.') + elif args['datfiles'][0] == '0': + print('[fastnnlo_scaleunc]: Get statistical uncertainties from within fastNLO table.') else: for datfile in args['datfiles']: lstat = os.path.isfile(datfile) diff --git a/v2.5/toolkit/src/fnlo-tk-modify.cc b/v2.5/toolkit/src/fnlo-tk-modify.cc index d858dfab1480b84350c95243f77420d6c7caf97b..b9a6852796d09c4f814f2a2077b41503c6c4d3f4 100644 --- a/v2.5/toolkit/src/fnlo-tk-modify.cc +++ b/v2.5/toolkit/src/fnlo-tk-modify.cc @@ -264,6 +264,14 @@ int main(int argc, char** argv) { } // Block B's: Scenario contributions + if ( EXIST(MultCoeffFactor) ) { + double fac = DOUBLE(MultCoeffFactor); + info["fnlo-tk-modify"]<<"Multiplying all coefficients by factor " << fac << "!" << endl; + for (unsigned int iObs=0; iObs<nobs; iObs++) { + table.MultiplyBinInTable(iObs,fac); + } + } + if ( !DOUBLE_ARR(MultCoeff).empty() ) { vector<double> fac = DOUBLE_ARR(MultCoeff); info["fnlo-tk-modify"]<<"Multiplying by provided factors all coefficients of additive contributions to observable bins!"<<endl; diff --git a/v2.5/toolkit/src/fnlo-tk-yodaout.cc b/v2.5/toolkit/src/fnlo-tk-yodaout.cc index 9aa8ac36bb17b0ca4e2b3b95e0e80d5396367b8c..669cfa64b36b8d41b0ba3c5ea1f613b58a57e612 100644 --- a/v2.5/toolkit/src/fnlo-tk-yodaout.cc +++ b/v2.5/toolkit/src/fnlo-tk-yodaout.cc @@ -125,6 +125,7 @@ int main(int argc, char** argv) { man << " \"kQuadraticSum\", i.e. mur=muf=sqrt(scale1^2+scale2^2)." << endl; man << "[np]: Apply nonperturbative corrections if available, def. = no." << endl; man << " Alternatives: \"yes\" or \"np\"" << endl; + man << "[xscale]: Rescale x axis values, def. = 1" << endl; man << "[Verbosity]: Set verbosity level of table evaluation [DEBUG,INFO,WARNING,ERROR], def. = WARNING" << endl; yell << " #" << endl; man << "Use \"_\" to skip changing a default argument." << endl; @@ -137,6 +138,7 @@ int main(int argc, char** argv) { } //! --- PDF choice + string chtmp = "X"; string PDFFile = "X"; if (argc > 2) { PDFFile = (const char*) argv[2]; @@ -273,12 +275,24 @@ int main(int argc, char** argv) { shout["fnlo-tk-yodaout"] << "Apply nonperturbative corrections if available." << endl; } + //--- Set x axis rescale velue + double xrescale = 1; + if (argc > 8) { + chtmp = (const char*) argv[8]; + } + if (argc <= 8 || chtmp == "_") { + shout["fnlo-tk-yodaout"] << "No request given for x-axis rescale factor, using default value of unity." << endl; + } else { + xrescale = atof(argv[8]); + shout["fnlo-tk-yodaout"] << "Using x-axis rescale factor: " << xrescale << endl; + } + //--- Set verbosity level of table evaluation string VerbosityLevel = "WARNING"; - if (argc > 8) { + if (argc > 9) { VerbosityLevel = (const char*) argv[8]; } - if (argc <= 8 || VerbosityLevel == "_") { + if (argc <= 9 || VerbosityLevel == "_") { VerbosityLevel = "WARNING"; shout["fnlo-tk-yodaout"] << "No request given for verbosity level, using WARNING default." << endl; } else { @@ -286,7 +300,7 @@ int main(int argc, char** argv) { } //! --- Too many arguments - if (argc > 9) { + if (argc > 10) { error["fnlo-tk-yodaout"] << "Too many arguments, aborting!" << endl; exit(1); } @@ -557,9 +571,9 @@ int main(int argc, char** argv) { vector < double > eyplus; //! Loop over bins in outer (1st) dimension for (unsigned int k =0 ; k<NDimBins[0] ; k++) { - x.push_back((bins[iobs].second + bins[iobs].first)/2.0); - explus.push_back((bins[iobs].second - bins[iobs].first)/2.0); - exminus.push_back((bins[iobs].second - bins[iobs].first)/2.0); + x.push_back((bins[iobs].second + bins[iobs].first)/2.0*xrescale); + explus.push_back((bins[iobs].second - bins[iobs].first)/2.0*xrescale); + exminus.push_back((bins[iobs].second - bins[iobs].first)/2.0*xrescale); y.push_back(xs[iobs]); eyplus.push_back(dxsu[iobs]); eyminus.push_back(std::abs(dxsl[iobs])); @@ -588,9 +602,9 @@ int main(int argc, char** argv) { //! Loop over bins in inner (2nd) dimension NDimBins[1] = fnlo->GetNDim1Bins(j); for (unsigned int k = 0; k<NDimBins[1]; k++) { - x.push_back((bins[iobs].second + bins[iobs].first)/2.0); - explus.push_back((bins[iobs].second - bins[iobs].first)/2.0); - exminus.push_back((bins[iobs].second - bins[iobs].first)/2.0); + x.push_back((bins[iobs].second + bins[iobs].first)/2.0*xrescale); + explus.push_back((bins[iobs].second - bins[iobs].first)/2.0*xrescale); + exminus.push_back((bins[iobs].second - bins[iobs].first)/2.0*xrescale); y.push_back(xs[iobs]); eyplus.push_back(dxsu[iobs]); eyminus.push_back(std::abs(dxsl[iobs])); @@ -635,9 +649,9 @@ int main(int argc, char** argv) { //! Loop over bins in inner (3rd) dimension NDimBins[2] = fnlo->GetNDim2Bins(j,k); for (unsigned int l = 0; l<NDimBins[2]; l++) { - x.push_back((bins[iobs].second + bins[iobs].first)/2.0); - explus.push_back((bins[iobs].second - bins[iobs].first)/2.0); - exminus.push_back((bins[iobs].second - bins[iobs].first)/2.0); + x.push_back((bins[iobs].second + bins[iobs].first)/2.0*xrescale); + explus.push_back((bins[iobs].second - bins[iobs].first)/2.0*xrescale); + exminus.push_back((bins[iobs].second - bins[iobs].first)/2.0*xrescale); y.push_back(xs[iobs]); eyplus.push_back(dxsu[iobs]); eyminus.push_back(std::abs(dxsl[iobs]));