Skip to content

Commit

Permalink
Merge pull request #150 from gpetruc/development-merged
Browse files Browse the repository at this point in the history
Merge master into development, fix one regression
  • Loading branch information
gpetruc committed Oct 13, 2014
2 parents 4a5a6bd + 115cca0 commit 84d3d5a
Show file tree
Hide file tree
Showing 5 changed files with 119 additions and 36 deletions.
12 changes: 0 additions & 12 deletions python/VEVandEpsilon.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,22 +74,10 @@ def setup(self):
# # Ellis cv == v (mv^(2 e)/M^(1 + 2 e))
self.modelBuilder.factory_(
'expr::C%(name)s("@0 * TMath::Power(@3,2*@2) / TMath::Power(@1,1+2*@2)", SM_VEV, M, eps, M%(name)s_MSbar)' % locals() )
# # AD k = (M/v) eps m^(N(eps-1))
# self.modelBuilder.factory_(
# 'expr::C%(name)s("@1 * @2 * TMath::Power(@3,2*(@2-1)) / @0", SM_VEV, M, eps, M%(name)s_MSbar)' % locals() )
# # GP k = (v/M)^2 (m/M)^(2eps)
#self.modelBuilder.factory_(
# 'expr::C%(name)s("TMath::Power(@0/@1,2) * TMath::Power(@3/@1,2*@2)", SM_VEV, M, eps, M%(name)s_MSbar)' % locals() )
else:
# # Ellis cf == v (mf^e/M^(1 + e))
self.modelBuilder.factory_(
'expr::C%(name)s("@0 * TMath::Power(@3,@2) / TMath::Power(@1,1+@2)", SM_VEV, M, eps, M%(name)s_MSbar)' % locals() )
# AD k = (M/v) eps m^(N(eps-1))
# self.modelBuilder.factory_(
# 'expr::C%(name)s("@1 * @2 * TMath::Power(@3,@2-1) / @0", SM_VEV, M, eps, M%(name)s_MSbar)' % locals() )
# GP k = (v/M) (m/M)^eps
#self.modelBuilder.factory_(
# 'expr::C%(name)s("(@0/@1) * TMath::Power(@3/@1,@2)", SM_VEV, M, eps, M%(name)s_MSbar)' % locals() )

self.productionScaling = {
'ttH':'Ctop',
Expand Down
21 changes: 21 additions & 0 deletions src/MarkovChainMC.cc
Original file line number Diff line number Diff line change
Expand Up @@ -298,11 +298,32 @@ int MarkovChainMC::runOnce(RooWorkspace *w, RooStats::ModelConfig *mc_s, RooStat
mcInt.reset(0);
}
if (mcInt.get() == 0) return false;

// MCMCCalculator calls SetConfidenceLevel on MCMCInterval when creating it
// SetConfidenceLevel calls DetermineInterval, which compute the interval from the Markov Chain
// for a given confidence level. This results is cached, so if we change the number of burn-in steps
// after, it'll have no effect
// Clone the MCMCInterval to reset its state, set the number of burn-in steps before calling SetConfidenceLevel

MCMCInterval* oldInterval = mcInt.get();
RooStats::MarkovChain* clonedChain = slimChain(*mc_s->GetParametersOfInterest(), *oldInterval->GetChain());
MCMCInterval* newInterval = new MCMCInterval(TString("MCMCIntervalCloned_") + TString(mc.GetName()), RooArgSet(*mc_s->GetParametersOfInterest()), *clonedChain);
newInterval->SetUseKeys(oldInterval->GetUseKeys());
newInterval->SetIntervalType(oldInterval->GetIntervalType());
if (newInterval->GetIntervalType() == MCMCInterval::kTailFraction) {
newInterval->SetLeftSideTailFraction(0);
}
newInterval->SetNumBurnInSteps(burnInSteps_);

if (adaptiveBurnIn_) {
mcInt->SetNumBurnInSteps(guessBurnInSteps(*mcInt->GetChain()));
} else if (mcInt->GetChain()->Size() * burnInFraction_ > burnInSteps_) {
mcInt->SetNumBurnInSteps(mcInt->GetChain()->Size() * burnInFraction_);
}
newInterval->SetConfidenceLevel(oldInterval->ConfidenceLevel());

mcInt.reset(newInterval);

limit = mcInt->UpperLimit(*r);

if (saveChain_ || mergeChains_) {
Expand Down
1 change: 1 addition & 0 deletions src/ToyMCSamplerOpt.cc
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ toymcoptutils::SinglePdfGenInfo::generate(const RooDataSet* protoData, int force
RooDataSet *
toymcoptutils::SinglePdfGenInfo::generateAsimov(RooRealVar *&weightVar, double weightScale)
{
if (mode_ == Counting) return generateCountingAsimov();
static int nPA = runtimedef::get("TMCSO_PseudoAsimov");
if (observables_.getSize() > 1 && runtimedef::get("TMCSO_AdaptivePseudoAsimov")) {
int nbins = 1;
Expand Down
73 changes: 73 additions & 0 deletions test/makeToyBkgdHist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
########
# Author: Jessica Brinson ([email protected]), The Ohio State Unversity
# Date created: 7 Aug, 2014
##############
# Script to generate root file with a histogram containing
# the simulated number of background events for a given number of toys
##############
# Usage: First run Higgs Combine tool, for example:
# combine datacard.txt -S 0 -M GenerateOnly -t 10000 --saveToys
#
# The -M GenerateOnly option generates only toys without running the limits.
# Greatly decreases the processing time: for a single-bin counting experiment,
# can generate 10K toys in ~1 min
#
# Can use the --toysFreqentist to generate frequentist toys. See:
# https://hypernews.cern.ch/HyperNews/CMS/get/higgs-combination/572/1/2/1/1/1.html
#
##############
# Usage: python parseCombine.py name_of_output_root_file_from_combine_command ntoys
##############
#
#
#!/usr/bin/env python

import time
import os
import sys
import math
import copy
import re
import subprocess
import glob

from ROOT import TF1, TFile, TH2F, gROOT, gStyle,TH1F, TCanvas, TString, TLegend, TPaveLabel, TH2D, TPave, Double

if len(sys.argv) < 2:
print "Must specify the root file that you wish to parse."
sys.exit()

if len(sys.argv) < 3:
print "Must specify the number of toys that you ran over."
sys.exit()

inputFile = str(sys.argv[1])
nToys = int(sys.argv[2])

file = TFile.Open(inputFile)
file.cd()

for key in file.GetListOfKeys():
if (key.GetClassName() != "TDirectoryFile"):
continue
rootDirectory = key.GetName()

#loop over the toys and extract nToyBkgd
nToyBkgdList = []
for i in range (1,nToys+1):
toyTemp = file.Get(rootDirectory + "/toy_" + str(i)).Clone()
nToyBkgd = toyTemp.get(0).getRealValue("n_obs_binMyChan")
nToyBkgdList.append(nToyBkgd)

# fill histogram with the number of simulated background events for each toy
h = TH1F("nToyBkgd", ";background yield;number of toys", 100, float(min(nToyBkgdList)), float(max(nToyBkgdList)))
for item in nToyBkgdList:
h.Fill(float(item))

outputFileName = (inputFile).rstrip(".root")
outputFile = TFile(outputFileName + "_histToyNBkgd.root", "RECREATE")
h.Write()
outputFile.Write()
outputFile.Close()

print "Finished writing output to " + outputFile.GetName()
48 changes: 24 additions & 24 deletions test/validation/reference.json
Original file line number Diff line number Diff line change
Expand Up @@ -179,46 +179,46 @@
},
"status": "done"
},
"Counting_Sig_2_HybridNew": {
"Counting_Freq_Sig_2_HybridNew": {
"comment": "all 5 jobs done",
"results": {
"counting-B5p5-Obs11-StatOnly.txt": { "comment": "", "limit": 2.2776070514569495, "limitErr": 0.04169333290110222, "status": "done", "t_real": 0.026785735040903091 },
"counting-B5p5-Obs11-Syst30B.txt": { "comment": "", "limit": 1.8200591344181229, "limitErr": 0.027431268744850579, "status": "done", "t_real": 0.037475399672985077 },
"counting-B5p5-Obs11-Syst30C.txt": { "comment": "", "limit": 1.8200591344181229, "limitErr": 0.027431268744850579, "status": "done", "t_real": 0.037102766335010529 },
"counting-B5p5-Obs11-Syst30S.txt": { "comment": "", "limit": 2.3079844749459575, "limitErr": 0.04306045560354832, "status": "done", "t_real": 0.028499431908130646 },
"counting-B5p5-Obs11-Syst30U.txt": { "comment": "", "limit": 1.7866133654934699, "limitErr": 0.026728182601117423, "status": "done", "t_real": 0.049588046967983246 }
"counting-B5p5-Obs11-StatOnly.txt": { "comment": "", "limit": 1.9668474600086239, "limitErr": 0.03094861407777394, "status": "done", "t_real": 0.07762125134468079 },
"counting-B5p5-Obs11-Syst30B.txt": { "comment": "", "limit": 1.7410869889201568, "limitErr": 0.025811484800963624, "status": "done", "t_real": 0.16692283749580383 },
"counting-B5p5-Obs11-Syst30C.txt": { "comment": "", "limit": 1.7410869889201568, "limitErr": 0.025811484800963624, "status": "done", "t_real": 0.2165507823228836 },
"counting-B5p5-Obs11-Syst30S.txt": { "comment": "", "limit": 1.998114060627158, "limitErr": 0.03180700227099753, "status": "done", "t_real": 0.21559813618659973 },
"counting-B5p5-Obs11-Syst30U.txt": { "comment": "", "limit": 1.693241068090133, "limitErr": 0.02492454126755006, "status": "done", "t_real": 0.2478853464126587 }
},
"status": "done"
},
"Counting_Sig_3p5_HybridNew": {
"comment": "all 3 jobs done",
"results": {
"counting-B5p5-Obs20-Syst30B.txt": { "comment": "", "limit": 3.7455485932384569, "limitErr": 0.045518620350335492, "status": "done", "t_real": 2.4166839122772217 },
"counting-B5p5-Obs20-Syst30C.txt": { "comment": "", "limit": 3.7455485932384569, "limitErr": 0.045518620350335492, "status": "done", "t_real": 2.3150513172149658 },
"counting-B5p5-Obs20-Syst30U.txt": { "comment": "", "limit": 3.7319542690588556, "limitErr": 0.04433925796828575, "status": "done", "t_real": 1.9085551500320435 }
},
"status": "done"
},
"Counting_Freq_Sig_2_HybridNew": {
"Counting_Sig_2_HybridNew": {
"comment": "all 5 jobs done",
"results": {
"counting-B5p5-Obs11-StatOnly.txt": { "comment": "", "limit": 2.2776070514569495, "limitErr": 0.04169333290110222, "status": "done", "t_real": 0.10778608173131943 },
"counting-B5p5-Obs11-Syst30B.txt": { "comment": "", "limit": 1.7251524629226296, "limitErr": 0.025522303734902163, "status": "done", "t_real": 0.29485902190208435 },
"counting-B5p5-Obs11-Syst30C.txt": { "comment": "", "limit": 1.7251524629226296, "limitErr": 0.025522303734902163, "status": "done", "t_real": 0.29696539044380188 },
"counting-B5p5-Obs11-Syst30S.txt": { "comment": "", "limit": 2.2693014570431722, "limitErr": 0.04133141935148954, "status": "done", "t_real": 0.28087267279624939 },
"counting-B5p5-Obs11-Syst30U.txt": { "comment": "", "limit": 1.6993672959502422, "limitErr": 0.025047505122447689, "status": "done", "t_real": 0.3449137806892395 }
"counting-B5p5-Obs11-StatOnly.txt": { "comment": "", "limit": 1.9664209967178816, "limitErr": 0.030953136229774803, "status": "done", "t_real": 0.036718133836984634 },
"counting-B5p5-Obs11-Syst30B.txt": { "comment": "", "limit": 1.5412744685252837, "limitErr": 0.02249015174635627, "status": "done", "t_real": 0.04498303309082985 },
"counting-B5p5-Obs11-Syst30C.txt": { "comment": "", "limit": 1.5412744685252837, "limitErr": 0.02249015174635627, "status": "done", "t_real": 0.04984373226761818 },
"counting-B5p5-Obs11-Syst30S.txt": { "comment": "", "limit": 1.9557042822322015, "limitErr": 0.030668216699310413, "status": "done", "t_real": 0.04352619871497154 },
"counting-B5p5-Obs11-Syst30U.txt": { "comment": "", "limit": 1.5240348730572568, "limitErr": 0.02224443031938117, "status": "done", "t_real": 0.04982691630721092 }
},
"status": "done"
},
"Counting_Freq_Sig_3p5_HybridNew": {
"comment": "all 3 jobs done",
"results": {
"counting-B5p5-Obs20-Syst30B.txt": { "comment": "", "limit": 3.7319542690588556, "limitErr": 0.04433925796828575, "status": "done", "t_real": 18.953897476196289 },
"counting-B5p5-Obs20-Syst30C.txt": { "comment": "", "limit": 3.7319542690588556, "limitErr": 0.04433925796828575, "status": "done", "t_real": 18.709600448608398 },
"counting-B5p5-Obs20-Syst30U.txt": { "comment": "", "limit": 3.628565903610768, "limitErr": 0.036522131055911178, "status": "done", "t_real": 21.805641174316406 }
"counting-B5p5-Obs20-Syst30B.txt": { "comment": "", "limit": 3.7319542690588556, "limitErr": 0.04433925796798999, "status": "done", "t_real": 17.05150604248047 },
"counting-B5p5-Obs20-Syst30C.txt": { "comment": "", "limit": 3.7319542690588556, "limitErr": 0.04433925796798999, "status": "done", "t_real": 19.538915634155273 },
"counting-B5p5-Obs20-Syst30U.txt": { "comment": "", "limit": 3.628565903610768, "limitErr": 0.036522131056103024, "status": "done", "t_real": 21.426109313964844 }
},
"status": "done"
},
"Counting_Sig_3p5_HybridNew": {
"comment": "all 3 jobs done",
"results": {
"counting-B5p5-Obs20-Syst30B.txt": { "comment": "", "limit": 3.546759797347558, "limitErr": 0.03153329261659321, "status": "done", "t_real": 2.4946768283843994 },
"counting-B5p5-Obs20-Syst30C.txt": { "comment": "", "limit": 3.546759797347558, "limitErr": 0.03153329261659321, "status": "done", "t_real": 2.8525874614715576 },
"counting-B5p5-Obs20-Syst30U.txt": { "comment": "", "limit": 3.514851473001713, "limitErr": 0.029821150286336362, "status": "done", "t_real": 2.905097484588623 }
},
"status": "done"
},
"Counting_Lower_FeldmanCousins": {
"comment": "not validated",
"status": "done"
Expand Down

0 comments on commit 84d3d5a

Please sign in to comment.