-- SeoKonKang - 02 Feb 2010

[CRAB]

jobtype = cmssw
scheduler = glite
#scheduler = glitecoll
### NOTE: just setting the name of the server (pi, lnl etc etc )
###       crab will submit the jobs to the server...
#server_name = bari
#server_name = legnaro
server_name = cern

[CMSSW]

### The data you want to access (to be found on DBS)
datasetpath=/MinimumBias/BeamCommissioning09-SD_AllMinBias-Jan23Skim-v1/RAW-RECO
#datasetpath=none
#dbs_url=http://vocms31.cern.ch/cms_dbs_prod_global/servlet/DBSServlet

### The ParameterSet you want to use
pset=run_JRA_900GeV_MC_cfg.py
runSelection=123596

### Splitting parameters
total_number_of_events=-1
#total_number_of_events=2000000
number_of_jobs = 100

### The output files (comma separated list)
output_file=DataRun123596Skim01.root

[USER]

### OUTPUT files Management
##  output back into UI
return_data = 0
use_central_bossDB = 0
use_boss_rt = 0

### To use a specific name of UI directory where CRAB will create job to submit (with full path).
### the default directory will be "crab_0_data_time"
#ui_working_dir = /full/path/Name_of_Directory

### To specify the UI directory where to store the CMS executable output
### FULL path is mandatory. Default is  /res will be used.
#outputdir= /full/path/yourOutDir

### To specify the UI directory where to store the stderr, stdout and .BrokerInfo of submitted jobs
### FULL path is mandatory. Default is /res will be used.
#logdir= /full/path/yourLogDir

### OUTPUT files INTO A SE
copy_data = 1

### if you want to copy data in a "official CMS site"
### you have to specify the name as written in
#storage_element = T2_IT_Bari
### the user_remote_dir will be created under the SE mountpoint
### in the case of publication this directory is not considered
#user_remote_dir = name_directory_you_want

### if you want to copy your data at CAF
#storage_element = T2_CH_CAF
### the user_remote_dir will be created under the SE mountpoint
### in the case of publication this directory is not considered
#user_remote_dir = name_directory_you_want

### if you want to copy your data to your area in castor at cern
### or in a "not official CMS site" you have to specify the complete name of SE
storage_element=srm-cms.cern.ch
### this directory is the mountpoin of SE
storage_path=/srm/managerv2?SFN=/castor/cern.ch
### directory or tree of directory under the mounpoint
user_remote_dir = /user/s/seogoni/crab/100201/
email=seogoni@cern.ch


### To publish produced output in a local istance of DBS set publish_data = 1
#publish_data=0
### Specify the dataset name. The full path will be //USER
#publish_data_name = name_you_prefer
### Specify the URL of DBS istance where CRAB has to publish the output files
#dbs_url_for_publication = https://cmsdbsprod.cern.ch:8443/cms_dbs_caf_analysis_01_writer/servlet/DBSServlet

### To specify additional files to be put in InputSandBox
### write the full path  if the files are not in the current directory
### (wildcard * are allowed): comma separated list
#additional_input_files = file1, file2, /full/path/file3

#if server
#thresholdLevel = 100
#eMail = your@Email.address

[GRID]
#
## RB/WMS management:
rb = CERN
proxy_server            = myproxy.cern.ch
virtual_organization    = cms
#retry_count             = 0
#lcg_catalog_type        = lfc
#lfc_host                = lfc-cms-test.cern.ch
#lfc_home                = /grid/cms

##  Black and White Lists management:
## By Storage
se_black_list = T0,T1
#se_white_list =

## By ComputingElement
#ce_black_list =
#ce_white_list =

[CONDORG]

# Set this to condor to override the batchsystem defined in gridcat.
#batchsystem = condor

# Specify addition condor_g requirments
# use this requirment to run on a cms dedicated hardare
# globus_rsl = (condor_submit=(requirements 'ClusterName == \"CMS\" && (Arch == \"INTEL\" || Arch == \"X86_64\")'))
# use this requirement to run on the new hardware
#globus_rsl = (condor_submit=(requirements 'regexp(\"cms-*\",Machine)'))


This topic: Main > TWikiUsers > SeoKonKang > LogBooks > DailyLogBook > 2010 > 20100202 > CrabJetMET
Topic revision: r1 - 2010-02-02 - SeoKonKang
 
This site is powered by the TWiki collaboration platform Powered by Perl This site is powered by the TWiki collaboration platformCopyright © by the contributing authors. All material on this collaboration platform is the property of the contributing authors.
Ideas, requests, problems regarding KoreaCmsWiki? Send feedback