# for emacs: -*- mode: sh; -*- # Rabbit May 2005 Broad Assembly # # DOWNLOAD SEQUENCE ssh kkstore01 mkdir /cluster/store9/oryCun1 cd /cluster/data ln -s /cluster/store9/oryCun1 oryCun1 cd /cluster/data/oryCun1 mkdir jkStuff bed mkdir broad cd broad # ftp'ed with password from Broad # -rw-rw-r-- 1 braney protein 63059205 May 20 18:05 assembly.agp # -rw-rw-r-- 1 braney protein 2113025887 May 20 18:28 assembly.bases # -rw-rw-r-- 1 braney protein 1093803999 May 20 19:05 assembly.quals.gz # -rw-rw-r-- 1 braney protein 307971759 May 20 19:20 unplaced.fasta.gz # -rw-rw-r-- 1 braney protein 946443543 May 20 19:47 unplaced.qual.gz gunzip assembly.bases.gz faSize assembly.bases # 2076044328 bases (0 N's 2076044328 real 2076044328 upper 0 lower) in 719158 sequences in 1 files # Total size: mean 2886.8 sd 2554.1 min 83 (contig_136781) max 174175 (contig_598973) median 2137 # N count: mean 0.0 sd 0.0 # U count: mean 2886.8 sd 2554.1 # L count: mean 0.0 sd 0.0 ssh kolossus cd /cluster/data/oryCun1/broad /cluster/bin/x86_64/agpAllToFaFile assembly.agp assembly.bases ../oryCun1.fa cd .. faSize oryCun1.fa # 3464410039 bases (1388365711 N's 2076044328 real 2076044328 upper 0 lower) in 215471 sequences in 1 files # Total size: mean 16078.3 sd 33569.3 min 1000 (scaffold_79841) max 854852 (scaffold_211151) median 5354 # N count: mean 6443.4 sd 16351.0 # U count: mean 9634.9 sd 19441.8 # L count: mean 0.0 sd 0.0 /cluster/bin/scripts/agpToLift < assembly.agp > ../jkStuff/assembly.lft # PARTITION SCAFFOLDS FOR REPEATMASKER RUN # glom the tiny scaffolds up into ~50k collections ssh kkstore01 cd /cluster/data/oryCun1 mkdir chunks50k faSplit about broad/assembly.bases 50000 chunks50k/chunk_ cd chunks50k for i in 0 1 2 3 4 5 6 7 8 9; do mkdir $i; mv *$i.fa $i; done # RUN REPEAT MASKER # make the run directory, output directory, and job list ssh kkstore01 cd /cluster/data/oryCun1 tcsh cat << '_EOF_' > jkStuff/RMRabbit #!/bin/csh -fe cd $1 /bin/mkdir -p /tmp/oryCun1/$2 /bin/cp $3 /tmp/oryCun1/$2/ pushd /tmp/oryCun1/$2 /cluster/bluearc/RepeatMasker/RepeatMasker -s -species rabbit $2 popd /bin/cp /tmp/oryCun1/$2/$2.out ./ /bin/rm -fr /tmp/oryCun1/$2/* /bin/rmdir --ignore-fail-on-non-empty /tmp/oryCun1/$2 /bin/rmdir --ignore-fail-on-non-empty /tmp/oryCun1 '_EOF_' # << this line makes emacs coloring happy chmod +x jkStuff/RMRabbit # mkdir RMRun RMOut # for i in chunks800k/*.fa # do # d="/cluster/data/oryCun1" # c=`basename $i` # echo "../jkStuff/RMRabbit $d/RMOut $c { check in line+ $d/$i} {check out line+ $d/RMOut/$c.out}" # done > RMRun/RMJobs mkdir RMRun RMOut cd RMOut mkdir 0 1 2 3 4 5 6 7 8 9 cd ../chunks50k for i in */*.fa do e=`dirname $i` d="/cluster/data/oryCun1" c=`basename $i` echo "../jkStuff/RMRabbit $d/RMOut/$e $c {check in line+ $d/chunks50k/$i} {check out line+ $d/RMOut/$e/$c.out}" done > ../RMRun/RMJobs # do the run ssh kk cd /cluster/data/oryCun1/RMRun para create RMJobs para try, check, push, check,... # Completed: 39516 of 39516 jobs # CPU time in finished jobs: 17959517s 299325.28m 4988.75h 207.86d 0.569 y # IO & Wait Time: 144394s 2406.57m 40.11h 1.67d 0.005 y # Average job time: 458s 7.64m 0.13h 0.01d # Longest finished job: 2169s 36.15m 0.60h 0.03d # Submission to last job: 47286s 788.10m 13.13h 0.55d # Lift up the split-scaffold .out's to scaffold .out's ssh kkstore01 cd /cluster/data/oryCun1/RMout for i in 0 1 2 3 4 5 6 7 8 9; do echo $i; liftUp -nohead $i.out ../jkStuff/assembly.lft warn $i/*.fa.out>/dev/null; done head -3 0.out > oryCun1.out for i in 0 1 2 3 4 5 6 7 8 9; do tail +4 $i.out; done >> oryCun1.out # Load the .out files into the database with: ssh hgwdev hgLoadOut oryCun1 /cluster/data/oryCun1/RMOut/oryCun1.out # Strange perc. field -3.4 line 337578 of /cluster/data/oryCun1/RMOut/oryCun1.out # Strange perc. field -10216.6 line 667688 of /cluster/data/oryCun1/RMOut/oryCun1.out # Strange perc. field -280.2 line 667688 of /cluster/data/oryCun1/RMOut/oryCun1.out # Strange perc. field -0.7 line 1222784 of /cluster/data/oryCun1/RMOut/oryCun1.out # Strange perc. field -1.4 line 1484447 of /cluster/data/oryCun1/RMOut/oryCun1.out # Strange perc. field -7901.3 line 1806373 of /cluster/data/oryCun1/RMOut/oryCun1.out # Strange perc. field -0.2 line 2372596 of /cluster/data/oryCun1/RMOut/oryCun1.out # Strange perc. field -4.5 line 2658770 of /cluster/data/oryCun1/RMOut/oryCun1.out # Strange perc. field -1.2 line 3016373 of /cluster/data/oryCun1/RMOut/oryCun1.out # Loading up table oryCun1_rmsk # note: 6 records dropped due to repStart > repEnd hgsql oryCun1 -e 'rename table oryCun1_rmsk to rmsk' # Fix up the indices too: hgsql oryCun1 -e 'drop index bin on rmsk; \ drop index genoStart on rmsk; \ drop index genoEnd on rmsk; \ create index bin on rmsk (genoName(16), bin); \ create index genoStart on rmsk (genoName(16), genoStart); \ create index genoEnd on rmsk (genoName(16), genoEnd);' # EXTRACTING GAP INFO FROM BLOCKS OF NS (DONE 11/5/04 angie) ssh kkstore01 mkdir /cluster/data/oryCun1/bed/fakeAgp cd /cluster/data/oryCun1/bed/fakeAgp faGapSizes ../../downloads/scaffolds.fasta \ -niceSizes=5,10,20,25,30,40,50,100,250,500,1000,10000,100000 # Wow, all block of N's seem to be exactly 100bp long. # hgFakeAgp's default -minContigGap of 25 will be fine. hgFakeAgp ../../downloads/scaffolds.fasta fake.agp ssh hgwdev hgLoadGap -unsplit oryCun1 /cluster/data/oryCun1/bed/fakeAgp/fake.agp # SIMPLE REPEATS (TRF) ssh kkstore01 mkdir /cluster/data/oryCun1/bed/simpleRepeat cd /cluster/data/oryCun1/bed/simpleRepeat nice trfBig -trf=/cluster/bin/i386/trf ../../oryCun1.fa \ /dev/null -bedAt=simpleRepeat.bed -tempDir=/tmp \ |& egrep -v '^(Removed|Tandem|Copyright|Loading|Allocating|Initializing|Computing|Scanning|Freeing)' \ > trf.log & # check on this with tail -f trf.log # Load this into the database as so ssh hgwdev hgLoadBed oryCun1 simpleRepeat /cluster/data/oryCun1/bed/simpleRepeat/simpleRepeat.bed -sqlTable=$HOME/kent/src/hg/lib/simpleRepeat.sql # FILTER SIMPLE REPEATS (TRF) INTO MASK # make a filtered version of the trf output: # keep trf's with period <= 12: ssh kkstore01 cd /cluster/data/oryCun1/bed/simpleRepeat awk '{if ($5 <= 12) print;}' simpleRepeat.bed > trfMask.bed # MASK FA USING REPEATMASKER AND FILTERED TRF FILES ssh kkstore01 cd /cluster/data/oryCun1 maskOutFa -soft oryCun1.fa bed/simpleRepeat/trfMask.bed oryCun1.simple.fa maskOutFa -softAdd oryCun1.simple.fa RMOut/oryCun1.out oryCun1.masked.fa # Now clean up the unmasked split scaffolds to avoid confusion later. rm -r chunks500k scaffoldsSplit.fa jkStuff/scaffoldsSplit.lft # CREATING DATABASE # Create the database. ssh hgwdev # Make sure there is at least 5 gig free for the database df -h /var/lib/mysql Filesystem Size Used Avail Use% Mounted on # /dev/sdc1 1.8T 915G 746G 56% /var/lib/mysql hgsql '' -e 'create database oryCun1' # STORE SEQUENCE AND ASSEMBLY INFORMATION # Translate to 2bit ssh kkstore01 cd /cluster/data/oryCun1 faToTwoBit oryCun1.masked.fa oryCun1.2bit # Make chromInfo.tab. mkdir bed/chromInfo twoBitInfo oryCun1.2bit stdout | awk '{printf "%s\t%s\t/gbdb/oryCun1/oryCun1.2bit\n",$1,$2;}' \ | sort > bed/chromInfo/chromInfo.tab # Make symbolic a link from /gbdb/oryCun1 to the 2bit. ssh hgwdev mkdir -p /gbdb/oryCun1 ln -s /cluster/data/oryCun1/oryCun1.2bit /gbdb/oryCun1/ # Load chromInfo table. hgsql oryCun1 < $HOME/kent/src/hg/lib/chromInfo.sql hgsql oryCun1 -e 'load data local infile "/cluster/data/oryCun1/bed/chromInfo/chromInfo.tab" into table chromInfo' # Make chrom.sizes from chromInfo contents and check scaffold count. hgsql oryCun1 -N -e 'select chrom,size from chromInfo' > /cluster/data/oryCun1/chrom.sizes wc -l /cluster/data/oryCun1/chrom.sizes # 215471 /cluster/data/oryCun1/chrom.sizes # CREATING GRP TABLE FOR TRACK GROUPING # Copy all the data from the table "grp" # in an existing database to the new database ssh hgwdev hgsql oryCun1 -e 'create table grp (PRIMARY KEY(NAME)) select * from hg17.grp' # MAKE HGCENTRALTEST ENTRY AND TRACKDB TABLE (DONE 11/4/04 angie) # Warning: genome and organism fields must correspond # with defaultDb values echo 'INSERT INTO dbDb \ (name, description, nibPath, organism, defaultPos, active, orderKey, genome, scientificName, \ htmlPath, hgNearOk, hgPbOk, sourceName) values \ ("oryCun1", "May. 2005", "/gbdb/oryCun1", "Rabbit", \ "", 1, 57, "Rabbit", "orycun", "/gbdb/oryCun1/html/description.html", \ 0, 0, "Broad May 2005");' \ | hgsql -h genome-testdb hgcentraltest echo 'INSERT INTO defaultDb (genome, name) values ("Rabbit", "oryCun1");' \ | hgsql -h genome-testdb hgcentraltest # Make trackDb table so browser knows what tracks to expect: ssh hgwdev cd ~/kent/src/hg/makeDb/trackDb cvs up -d -P # Edit trackDb/makefile to add oryCun1 to the DBS variable. mkdir -p rabbit/oryCun1 # Create a simple rabbit/oryCun1/description.html file. cvs add drosophila/oryCun1 cvs add drosophila/oryCun1/description.html make update DBS=oryCun1 ZOO_DBS= # go public on genome-test cvs ci makefile cvs ci rabbit/oryCun1 mkdir /gbdb/oryCun1/html # in a clean, updated tree's kent/src/hg/makeDb/trackDb: make alpha # PUT SEQUENCE ON /ISCRATCH FOR BLASTZ # First, agglomerate small scaffolds into chunks of ~100k median # (many scaffolds are larger than that) so we don't have too many # files for one dir, but keep a reasonably low job run time: ssh kkstore01 cd /cluster/data/oryCun1 mkdir chunksUnsplit faSplit about scaffolds.fa 100000 chunksUnsplit/chunk_ ssh kkr1u00 mkdir /iscratch/i/oryCun1 cp -pR /cluster/data/oryCun1/chunksUnsplit /iscratch/i/oryCun1/ cp -p /cluster/data/oryCun1/oryCun1.2bit /iscratch/i/oryCun1/ iSync # PRODUCING GENSCAN PREDICTIONS (DONE 11/4/04 angie) ssh kkstore01 # Make hard-masked scaffolds and split up for processing: cd /cluster/data/oryCun1 maskOutFa scaffolds.fa hard scaffolds.fa.masked mkdir chunksUnsplitMasked faSplit about scaffolds.fa.masked 100000 chunksUnsplitMasked/chunk_ mkdir /cluster/data/oryCun1/bed/genscan cd /cluster/data/oryCun1/bed/genscan # Check out hg3rdParty/genscanlinux to get latest genscan: cvs co hg3rdParty/genscanlinux # Make 3 subdirectories for genscan to put their output files in mkdir gtf pep subopt ls -1S ../../chunksUnsplitMasked/chunk*.fa > chunks.list cat << '_EOF_' > gsub #LOOP gsBig {check in line+ $(path1)} {check out line gtf/$(root1).gtf} -trans={check out line pep/$(root1).pep} -subopt={check out line subopt/$(root1).bed} -exe=hg3rdParty/genscanlinux/genscan -par=hg3rdParty/genscanlinux/HumanIso.smat -tmp=/tmp -window=2400000 #ENDLOOP '_EOF_' # << this line keeps emacs coloring happy gensub2 chunks.list single gsub jobList ssh kki cd /cluster/data/oryCun1/bed/genscan para create jobList para try, check, push, check, ... #Completed: 463 of 463 jobs #Average job time: 12s 0.21m 0.00h 0.00d #Longest job: 317s 5.28m 0.09h 0.00d #Submission to last job: 445s 7.42m 0.12h 0.01d # If there are crashes, diagnose with "para problems". # If a job crashes due to genscan running out of memory, re-run it # manually with "-window=1200000" instead of "-window=2400000". # Concatenate scaffold-level results: ssh kkstore01 cd /cluster/data/oryCun1/bed/genscan cat gtf/*.gtf > genscan.gtf cat subopt/*.bed > genscanSubopt.bed cat pep/*.pep > genscan.pep # Clean up rm -r /cluster/data/oryCun1/chunksUnsplitMasked # Load into the database as so: ssh hgwdev cd /cluster/data/oryCun1/bed/genscan # Reloaded without -genePredExt 1/6/05: ldHgGene -gtf oryCun1 genscan genscan.gtf hgPepPred oryCun1 generic genscanPep genscan.pep hgLoadBed oryCun1 genscanSubopt genscanSubopt.bed # MAKE DOWNLOADABLE FILES (DONE 11/4/04 angie) # RECREATE BIGZIPS DOWNLOADS AND README FILE (DONE, 2006-05-05, hartera) # MOVED LIFTOVER FILES FOR DOWNLOAD (DONE, 2006-05-23, hartera) # CORRECTED LINKS FOR RABBIT BIGZIPS (DONE, 2006-06-11, hartera) ssh kkstore01 mkdir /cluster/data/oryCun1/zips cd /cluster/data/oryCun1 zip -j zips/scaffoldOut.zip RMOut/scaffolds.fa.out zip -j zips/scaffoldFa.zip scaffolds.fa zip -j zips/scaffoldFaMasked.zip scaffolds.fa.masked zip -j zips/scaffoldTrf.zip bed/simpleRepeat/trfMask.bed foreach f (zips/*.zip) echo $f unzip -t $f | tail -1 end ssh hgwdev mkdir /usr/local/apache/htdocs/goldenPath/oryCun1 cd /usr/local/apache/htdocs/goldenPath/oryCun1 mkdir bigZips database # Create README.txt files in bigZips/ and database/ to explain the files. cd bigZips cp -p /cluster/data/oryCun1/zips/*.zip . md5sum *.zip > md5sum.txt # Add more bigZips downloads. Some of the above downloads don't exist # anymore in bigZips in /usr/local/apache/htdocs/goldenPath/... on hgwdev # (2006-05-05, hartera) ssh kkstore04 mkdir /cluster/data/oryCun1/bigZips cd /cluster/data/oryCun1 # soft-masked scaffolds sequences cp -p oryCun1.masked.fa.gz ./bigZips/scaffoldSoftMask.fa.gz # assembly agp cp -p ./broad/assembly.agp ./bigZips/ # Simple Repeats cp -p ./bed/simpleRepeat/trfMask.bed ./bigZips/trf.bed # RepeatMasker output cp -p ./RMOut/oryCun1.out ./bigZips/rmsk.out # unmasked scaffolds sequences cp -p oryCun1.unmasked.fa.gz ./bigZips/scaffold.fa.gz cd bigZips gzip assembly.agp gzip trf.bed gzip rmsk.out # check integrity of files foreach f (*.gz) echo $f gunzip -t $f | tail -1 end md5sum *.gz > md5sum.txt # link the *.gz and *.txt files to hgwdev:/usr/local/apache/.... # Remake the links to rabbit bigZips - linked to dasNov1 bigZips before # (2006-06-11, hartera). ssh hgwdev set gp=/usr/local/apache/htdocs/goldenPath/oryCun1 rm -r $gp/bigZips mkdir -p $gp/bigZips ln -s /cluster/data/oryCun1/bigZips/{*.gz,*.txt} $gp/bigZips # copy over README.txt and edit for bigZips # oryCun1.mm7.over.chain.gz liftOver chain is in the md5sum.txt in the # vsMm7 directory. It should be in the liftOver directory so move it there # and re-make md5sum.txt for vsMm7 dir (hartera, 2006-05-23) ssh hgwdev set gp=/usr/local/apache/htdocs/goldenPath/oryCun1 # copy over from hgwdevold cd /cluster/data/oryCun1/bed/liftOver scp \ hgwdevold:$gp/liftOver/oryCun1ToMm7.over.chain.gz . # make link for this and oryCun1ToHg18 liftOver chains to the # goldenPath liftOver directory. ln -s /cluster/data/oryCun1/bed/liftOver/*.gz $gp/liftOver # create md5sum.txt md5sum *.gz > md5sum.txt # then re-make md5sum.txt for vsMm7 files cd $gp/vsMm7 rm md5sum.txt md5sum *.gz > md5sum.txt # check that README.txt for vsMm7 is correct. # SWAP DM1-DROANA1 BLASTZ (DONE 11/4/04 angie) ssh kkstore01 mkdir /cluster/data/oryCun1/bed/blastz.dm1.swap.2004-11-03 ln -s blastz.dm1.swap.2004-11-03 /cluster/data/oryCun1/bed/blastz.dm1 cd /cluster/data/oryCun1/bed/blastz.dm1 set aliDir = /cluster/data/dm1/bed/blastz.oryCun1 cp $aliDir/S1.len S2.len cp $aliDir/S2.len S1.len # With 11k scaffolds, we don't want a directory with one file per # scaffold. So just make one .axt with everything -- not too huge # anyway, given these little insect genomes. cat $aliDir/axtChrom/chr*.axt \ | axtSwap stdin $aliDir/S1.len $aliDir/S2.len stdout \ | axtSort stdin dm1.axt du -sh $aliDir/axtChrom dm1.axt #389M /cluster/data/dm1/bed/blastz.oryCun1/axtChrom #389M dm1.axt # CHAIN MELANOGASTER BLASTZ (DONE 11/4/04 angie) # Run axtChain on kolossus (one big dm1.axt input) ssh kolossus mkdir /cluster/data/oryCun1/bed/blastz.dm1/axtChain cd /cluster/data/oryCun1/bed/blastz.dm1/axtChain axtChain -verbose=0 ../dm1.axt /cluster/data/oryCun1/oryCun1.2bit \ /cluster/data/dm1/nib stdout \ | chainAntiRepeat /cluster/data/oryCun1/oryCun1.2bit \ /cluster/data/dm1/nib stdin stdout \ | chainMergeSort stdin > all.chain # Load chains into database ssh hgwdev cd /cluster/data/oryCun1/bed/blastz.dm1/axtChain hgLoadChain -tIndex oryCun1 chainDm1 all.chain # NET MELANOGASTER BLASTZ (DONE 11/4/04 angie) ssh kkstore01 cd /cluster/data/oryCun1/bed/blastz.dm1/axtChain chainPreNet all.chain ../S1.len ../S2.len stdout \ | chainNet stdin -minSpace=1 ../S1.len ../S2.len stdout /dev/null \ | netSyntenic stdin noClass.net # Add classification info using db tables: ssh hgwdev cd /cluster/data/oryCun1/bed/blastz.dm1/axtChain netClass -noAr noClass.net oryCun1 dm1 melanogaster.net \ |& g -v "table gap doesn't exist" # Make a 'syntenic' subset: ssh kkstore01 cd /cluster/data/oryCun1/bed/blastz.dm1/axtChain rm noClass.net netFilter -syn melanogaster.net > melanogasterSyn.net # Load the nets into database ssh hgwdev cd /cluster/data/oryCun1/bed/blastz.dm1/axtChain netFilter -minGap=10 melanogaster.net | hgLoadNet oryCun1 netDm1 stdin netFilter -minGap=10 melanogasterSyn.net \ | hgLoadNet oryCun1 netSyntenyDm1 stdin # MAKE AXTNET (DONE 11/4/04 angie) ssh kkstore01 cd /cluster/data/oryCun1/bed/blastz.dm1/axtChain netToAxt melanogaster.net all.chain /cluster/data/oryCun1/oryCun1.2bit \ /cluster/data/dm1/nib stdout \ | axtSort stdin melanogasterNet.axt # MAKE VSDM1 DOWNLOADABLES (DONE 11/4/04 angie) ssh kkstore01 cd /cluster/data/oryCun1/bed/blastz.dm1/axtChain nice gzip *.{chain,net,axt} ssh hgwdev mkdir /usr/local/apache/htdocs/goldenPath/oryCun1/vsDm1 cd /usr/local/apache/htdocs/goldenPath/oryCun1/vsDm1 cp -p /cluster/data/oryCun1/bed/blastz.dm1/axtChain/all.chain.gz \ melanogaster.chain.gz cp -p /cluster/data/oryCun1/bed/blastz.dm1/axtChain/melanogaster.net.gz . cp -p /cluster/data/oryCun1/bed/blastz.dm1/axtChain/melanogasterNet.axt.gz . # Make a README.txt which explains the files & formats. md5sum *.gz */*.gz > md5sum.txt # MAKE 11.OOC FILE FOR BLAT (DONE 11/4/04 angie) # Use -repMatch=100 (based on size -- for human we use 1024, and # fly size is ~4.4% of human judging by gapless dm1 genome size from # featureBits -- we would use 45, but bump that up a bit to be more # conservative). ssh kkr1u00 mkdir /cluster/bluearc/oryCun1 blat /cluster/data/oryCun1/oryCun1.2bit /dev/null /dev/null -tileSize=11 \ -makeOoc=/cluster/bluearc/oryCun1/11.ooc -repMatch=100 #Wrote 9721 overused 11-mers to /cluster/bluearc/oryCun1/11.ooc cp -p /cluster/bluearc/oryCun1/*.ooc /iscratch/i/oryCun1/ iSync # GET GENBANK mRNA AND EST COUNTS # Go to the latest GenBank full release dir and get an idea of how # many mRNAs and ESTs there are to align. ssh eieio cd /cluster/data/genbank/data/processed/genbank.144.0/full awk '$4 == "Rabbit" {print $4 " " $5;}' mrna.gbidx | sort | uniq -c # 9 Drosophila ananassae # 1 Drosophila mojavensis # 33 Drosophila virilis # Wow, questionable whether we should have a native mRNA track here. awk '$4 == "Drosophila" {print $4 " " $5;}' est*.gbidx | sort | uniq -c # 382439 Drosophila melanogaster # 4105 Drosophila simulans # 779 Drosophila yakuba # And a native EST track isn't even a possibility for the new flies # at this point! # AUTO UPDATE GENBANK MRNA RUN (DONE 11/16/04 angie) ssh hgwdev # Update genbank config and source in CVS: cd ~/kent/src/hg/makeDb/genbank cvsup . # Edit etc/genbank.conf and add these lines (note scaffold-browser settings): # oryCun1 (D. ananassae) oryCun1.genome = /iscratch/i/oryCun1/oryCun1.2bit oryCun1.mondoTwoBitParts = 1000 oryCun1.lift = no oryCun1.refseq.mrna.native.load = no oryCun1.refseq.mrna.xeno.load = yes oryCun1.refseq.mrna.xeno.pslReps = -minCover=0.15 -minAli=0.75 -nearTop=0.005 oryCun1.genbank.mrna.xeno.load = yes # GenBank has no D. ananassae ESTs at this point... that may change. oryCun1.genbank.est.native.load = no oryCun1.genbank.est.xeno.load = no oryCun1.downloadDir = oryCun1 oryCun1.perChromTables = no cvs ci etc/genbank.conf # Since D. ananassae is a new species for us, edit src/lib/gbGenome.c. # Pick some other browser species, & monkey-see monkey-do. cvs diff src/lib/gbGenome.c make cvs ci src/lib/gbGenome.c # Edit src/align/gbBlat to add /iscratch/i/oryCun1/11.ooc cvs diff src/align/gbBlat make cvs ci src/align/gbBlat # Install to /cluster/data/genbank: make install-server ssh `fileServer /cluster/data/genbank/` cd /cluster/data/genbank # This is an -initial run, (xeno) RefSeq only: nice bin/gbAlignStep -srcDb=refseq -type=mrna -initial oryCun1 & tail -f [its logfile] # Load results: ssh hgwdev cd /cluster/data/genbank nice bin/gbDbLoadStep -verbose=1 -drop -initialLoad oryCun1 featureBits oryCun1 xenoRefGene #16520385 bases of 165766797 (9.966%) in intersection # Clean up: rm -rf work/initial.oryCun1 # This is an -initial run, mRNA only: nice bin/gbAlignStep -srcDb=genbank -type=mrna -initial oryCun1 & tail -f [its logfile] # Load results: ssh hgwdev cd /cluster/data/genbank nice bin/gbDbLoadStep -verbose=1 -drop -initialLoad oryCun1 featureBits oryCun1 all_mrna #19602 bases of 165766797 (0.012%) in intersection featureBits oryCun1 xenoMrna #17295487 bases of 165766797 (10.434%) in intersection # Clean up: rm -rf work/initial.oryCun1 # MAKE GCPERCENT ssh hgwdev mkdir /cluster/data/oryCun1/bed/gcPercent cd /cluster/data/oryCun1/bed/gcPercent # create and load gcPercent table hgGcPercent oryCun1 /cluster/data/oryCun1 # MAKE HGCENTRALTEST BLATSERVERS ENTRY (DONE 12/?/04 heather) ssh hgwdev echo 'insert into blatServers values("oryCun1", "blat14", "17780", 1, 0); \ insert into blatServers values("oryCun1", "blat14", "17781", 0, 1);' \ | hgsql -h genome-testdb hgcentraltest # MAKE Drosophila Proteins track (DONE braney 11/17/04) ssh kkstore01 mkdir -p /cluster/data/oryCun1/blastDb cd /cluster/data/oryCun1/blastDb faSplit sequence ../scaffolds.fa 400 x for i in *.fa; do formatdb -i $i -p F 2> /dev/null; done rm *.fa *.log ssh kkr1u00 mkdir -p /iscratch/i/oryCun1/blastDb cp /cluster/data/oryCun1/blastDb/* /iscratch/i/oryCun1/blastDb (iSync) 2>&1 > sync.out mkdir -p /cluster/data/oryCun1/bed/tblastn.dm1FB cd /cluster/data/oryCun1/bed/tblastn.dm1FB ls -1S /iscratch/i/oryCun1/blastDb/*.nsq | sed "s/\.nsq//" > bug.lst exit # back to kkstore01 cd /cluster/data/oryCun1/bed/tblastn.dm1FB mkdir fbfa # calculate a reasonable number of jobs calc `wc /cluster/data/dm1/bed/blat.dm1FB/dm1FB.psl | awk "{print \\\$1}"`/\(150000/`wc bug.lst | awk "{print \\\$1}"`\) # 18735/(150000/396) = 49.460400 split -l 49 /cluster/data/dm1/bed/blat.dm1FB/dm1FB.psl fbfa/fb cd fbfa for i in *; do pslxToFa $i $i.fa; rm $i; done cd .. ls -1S fbfa/*.fa > fb.lst mkdir blastOut for i in `cat fb.lst`; do mkdir blastOut/`basename $i .fa`; done cat << '_EOF_' > blastGsub #LOOP blastSome $(path1) {check in line $(path2)} {check out exists blastOut/$(root2)/q.$(root1).psl } #ENDLOOP '_EOF_' cat << '_EOF_' > blastSome #!/bin/sh BLASTMAT=/iscratch/i/blast/data export BLASTMAT g=`basename $2` f=/tmp/`basename $3`.$g for eVal in 0.01 0.001 0.0001 0.00001 0.000001 1E-09 1E-11 do if /scratch/blast/blastall -M BLOSUM80 -m 0 -F no -e $eVal -p tblastn -d $1 -i $2 -o $f.8 then mv $f.8 $f.1 break; fi done if test -f $f.1 then if /cluster/bin/i386/blastToPsl $f.1 $f.2 then liftUp -nosort -type=".psl" -pslQ -nohead $3.tmp /iscratch/i/dm1/protein.lft warn $f.2 mv $3.tmp $3 rm -f $f.1 $f.2 exit 0 fi fi rm -f $f.1 $f.2 $3.tmp exit 1 '_EOF_' chmod +x blastSome gensub2 bug.lst fb.lst blastGsub blastSpec ssh kk cd /cluster/data/oryCun1/bed/tblastn.dm1FB para create blastSpec para try, push # Completed: 151668 of 151668 jobs # CPU time in finished jobs: 2932565s 48876.08m 814.60h 33.94d 0.093 y # IO & Wait Time: 694006s 11566.77m 192.78h 8.03d 0.022 y # Average job time: 24s 0.40m 0.01h 0.00d # Longest job: 2721s 45.35m 0.76h 0.03d # Submission to last job: 73860s 1231.00m 20.52h 0.85d cat << '_EOF_' > chainGsub #LOOP chainSome $(path1) #ENDLOOP '_EOF_' cat << '_EOF_' > chainSome (cd $1; cat q.*.psl | simpleChain -prot -outPsl -maxGap=25000 stdin ../c.`basename $1`.psl) '_EOF_' chmod +x chainSome ls -1dS `pwd`/blastOut/fb?? > chain.lst gensub2 chain.lst single chainGsub chainSpec para create chainSpec # should run this on the mini-cluster or with my shove script # so you can limit the number of jobs starting to 3 or 4 para try, push... # Completed: 383 of 383 jobs # CPU time in finished jobs: 327s 5.44m 0.09h 0.00d 0.000 y # IO & Wait Time: 8218s 136.97m 2.28h 0.10d 0.000 y # Average job time: 22s 0.37m 0.01h 0.00d # Longest job: 54s 0.90m 0.01h 0.00d # Submission to last job: 674s 11.23m 0.19h 0.01d exit # back to kkstore01 cd /cluster/data/oryCun1/bed/tblastn.dm1FB/blastOut for i in fb?? do awk "(\$13 - \$12)/\$11 > 0.6 {print}" c.$i.psl > c60.$i.psl sort -rn c60.$i.psl | pslUniq stdin u.$i.psl awk "((\$1 / \$11) ) > 0.60 { print }" c60.$i.psl > m60.$i.psl echo $i done sort -T /tmp -k 14,14 -k 16,16n -k 17,17n u.*.psl m60* | uniq > /cluster/data/oryCun1/bed/tblastn.dm1FB/blastDm1FB.psl ssh hgwdev cd /cluster/data/oryCun1/bed/tblastn.dm1FB hgLoadPsl oryCun1 blastDm1FB.psl # End tblastn # SWAP CHAINS FROM DM2, BUILD NETS ETC. (DONE 3/2/05 angie) ssh kkstore01 mkdir /cluster/data/oryCun1/bed/blastz.dm2.swap cd /cluster/data/oryCun1/bed/blastz.dm2.swap doBlastzChainNet.pl -swap /cluster/data/dm2/bed/blastz.oryCun1/DEF \ >& do.log & tail -f do.log # Add {chain,net}Dm2 to trackDb.ra if necessary. # Add /usr/local/apache/htdocs/goldenPath/oryCun1/vsDm2/README.txt ######################################################################### # BLASTZ NOTE: with the advent of Angie's script to run the # blastz process through to chains and nets loaded into the # database and download files prepared, it is now a juggling act # to see which klusters are available. The particular options to # the script to make it go to one kluster or another are to be # determined at run-time. The typical run-times listed here will # be a factor in your choice of kluster to operate on. ######################################################################### # BLASTZ HUMAN Hg17 ssh kk mkdir /cluster/data/oryCun1/bed/blastz.hg17 cd /cluster/data/oryCun1/bed/blastz.hg17 cat << '_EOF_' > DEF # mouse vs. human export PATH=/usr/bin:/bin:/usr/local/bin:/cluster/bin/penn:/cluster/home/angie/schwartzbin:/cluster/home/kent/bin/i386 ALIGN=blastz-run BLASTZ=blastz BLASTZ_H=2000 BLASTZ_ABRIDGE_REPEATS=1 # TARGET: Mouse SEQ1_DIR=/panasas/store/oryCun1/nib # not used SEQ1_RMSK=/panasas/store/oryCun1/rmsk # not used SEQ1_FLAG=-rodent SEQ1_SMSK=/panasas/store/oryCun1/linSpecRep.notInHuman SEQ1_IN_CONTIGS=0 SEQ1_CHUNK=10000000 SEQ1_LAP=10000 # QUERY: Human SEQ2_DIR=/scratch/hg/hg17/bothMaskedNibs # RMSK not currently used SEQ2_RMSK= # FLAG not currently used SEQ2_FLAG= SEQ2_SMSK=/scratch/hg/hg17/linSpecRep.notInMouse SEQ2_IN_CONTIGS=0 SEQ2_CHUNK=30000000 SEQ2_LAP=0 BASE=/cluster/data/oryCun1/bed/blastzHg17.2005_03_14 DEF=$BASE/DEF RAW=$BASE/raw CDBDIR=$BASE SEQ1_LEN=$BASE/S1.len SEQ2_LEN=$BASE/S2.len '_EOF_' # << keep emacs coloring happy cp /cluster/data/oryCun1/chrom.sizes ./S1.len sort -rn +1 /cluster/data/hg17/chrom.sizes > S2.len # establish a screen to control this job screen time /cluster/bin/scripts/doBlastzChainNet.pl `pwd`/DEF > \ blast.run.out 2>&1 & # real 993m28.547s # user 0m0.198s # sys 0m0.171s # detach from screen session: Ctrl-a Ctrl-d # to reattach to this screen session: ssh kksilo screen -d -r # STARTED - 2005-03-17 21:25 # FINISHED - 2005-03-18 14:00 # Completed: 45347 of 45347 jobs # CPU time in finished jobs: 16921981s 282033.02m 4700.55h 195.86d 0.537 y # IO & Wait Time: 2381711s 39695.18m 661.59h 27.57d 0.076 y # Average job time: 426s 7.09m 0.12h 0.00d # Longest running job: 0s 0.00m 0.00h 0.00d # Longest finished job: 9568s 159.47m 2.66h 0.11d # Submission to last job: 58695s 978.25m 16.30h 0.68d # Completed: 331 of 331 jobs # CPU time in finished jobs: 272s 4.54m 0.08h 0.00d 0.000 y # IO & Wait Time: 1145s 19.08m 0.32h 0.01d 0.000 y # Average job time: 4s 0.07m 0.00h 0.00d # Longest job: 24s 0.40m 0.01h 0.00d # Submission to last job: 265s 4.42m 0.07h 0.00d # The kki batch doChainRun.csh appears to have failed # due to underlying changes in the location of hg17 items # fixup the symlinks which are in a state of flux today, then, # to recover: ssh kki cd /cluster/data/oryCun1/bed/blastzHg17.2005_03_14/axtChain/run rm -fr chain time ./doChainRun.csh # real 22m47.917s # user 0m0.380s # sys 0m0.630s # Completed: 40 of 40 jobs # CPU time in finished jobs: 6373s 106.22m 1.77h 0.07d 0.000 y # IO & Wait Time: 552s 9.20m 0.15h 0.01d 0.000 y # Average job time: 173s 2.89m 0.05h 0.00d # Longest job: 662s 11.03m 0.18h 0.01d # Submission to last job: 1200s 20.00m 0.33h 0.01d # That was the last part of the chainRun step, can now continue: ssh kksilo cd /cluster/data/oryCun1/bed/blastzHg17.2005_03_14 time /cluster/bin/scripts/doBlastzChainNet.pl -continue chainMerge `pwd`/DEF > chainMerge.run.out 2>&1 & # STARTED - 2005-03-18 15:00 # FINISHED 2005-03-18 16:33 # checking the numbers for sanity: ssh hgwdev # expect ~ 2m30 seconds for chain measurement time featureBits oryCun1 chainHg17 # 2596946329 bases of 2597150411 (99.992%) in intersection time featureBits mm5 chainHg17 # 2507720521 bases of 2615483787 (95.880%) in intersection # expect ~ 2m30s seconds for net measurement time featureBits oryCun1 netHg17 # 2579747741 bases of 2597150411 (99.330%) in intersection time featureBits mm5 netHg17 # 2504056038 bases of 2615483787 (95.740%) in intersection ssh kolossus # expect ~ 20-22 minutes for the chainLink measurement HGDB_CONF=~/.hg.conf.read-only /usr/bin/time --portability \ featureBits oryCun1 chainHg17Link # 966916309 bases of 2597150411 (37.230%) in intersection HGDB_CONF=~/.hg.conf.read-only /usr/bin/time --portability \ featureBits mm5 chainHg17Link # 1025750185 bases of 2615483787 (39.218%) in intersection # swap results to place oryCun1 alignments onto Hg17 time /cluster/bin/scripts/doBlastzChainNet.pl -swap `pwd`/DEF > \ swap.run.out 2>&1 & # STARTED - 2005-03-29 - 15:58 # FINI - 2005-03-29 - 18:48 # real 171m26.172s # user 0m2.270s # sys 0m0.870s ssh kolossus time HGDB_CONF=~/.hg.conf.read-only featureBits hg17 chainMm6Link # 969459954 bases of 2866216770 (33.824%) in intersection time HGDB_CONF=~/.hg.conf.read-only featureBits hg17 chainMm5Link # 1020106336 bases of 2866216770 (35.591%) in intersection # A measurement script to do all featureBits combinations: cd /cluster/data/oryCun1/jkStuff cat << '_EOF_' > netChainCheck.sh #!/bin/sh usage() { echo "usage: netChainCheck.sh " echo " does: featureBits net" echo " featureBits net" echo " as well as the chain and chainLink tables," echo " and on the targetDb:" echo " featureBits net" echo " featureBits net" echo " and the chain and chainLink tables." echo -e "\texample: netChainCheck.sh oryCun1 mm5 fr1" } doOne() { db=$1 tbl=$2 echo " featureBits $db $tbl" echo -en " #\t" time featureBits $db $tbl } ucFirstLetter() { ucString="$1" fc=`echo "${ucString}" | sed -e "s/\(.\).*/\1/"` rest=`echo "${ucString}" | sed -e "s/.\(.*\)/\1/"` FC=`echo "${fc}" | tr '[a-z]' '[A-Z]'` echo "${FC}${rest}" } if [ "$#" -ne 3 ]; then usage exit 255 fi db0=$1 db1=$2 targetDb=$3 targetDB=`ucFirstLetter "${targetDb}"` DB0=`ucFirstLetter "${db0}"` DB1=`ucFirstLetter "${db1}"` export db0 db1 targetDb targetDB DB0 DB1 # echo "${db0} ${db1} ${targetDb} ${targetDB} ${DB0} ${DB1}" doOne "${db0}" net${targetDB} doOne "${db1}" net${targetDB} doOne "${db0}" chain${targetDB} doOne "${db1}" chain${targetDB} doOne "${db0}" chain${targetDB}Link doOne "${db1}" chain${targetDB}Link doOne ${targetDb} net${DB0} doOne ${targetDb} net${DB1} doOne ${targetDb} chain${DB0} doOne ${targetDb} chain${DB1} doOne ${targetDb} chain${DB0}Link doOne ${targetDb} chain${DB1}Link '_EOF_' # << keep emacs coloring happy # CHAIN AND NET SWAPPED HUMAN BLASTZ (WORKING 12/22/05 kate) # Working in Brian's blastz dir (not doced). # This procedure follows conventions in doBlastzNetChain.pl, # so it can be used to complete the processing ssh kki cd /cluster/data/oryCun1/bed/zb.hg17 ln -s `pwd` /cluster/data/hg17/bed/blastz.oryCun1 mkdir -p axtChain/run/chain ls -1S /cluster/data/oryCun1/bed/zb.hg17/axtChrom/*.axt.gz | \ sed 's/.gz//' > axtChain/run/input.lst cd axtChain/run cat > chain.csh << 'EOF' #!/bin/csh -ef zcat $1 | \ axtChain -verbose=0 -minScore=3000 -linearGap=medium stdin \ /cluster/data/hg17/nib /cluster/data/oryCun1/oryCun1.2bit stdout | \ chainAntiRepeat /cluster/data/hg17/nib /cluster/data/oryCun1/oryCun1.2bit \ stdin $2 'EOF' # << happy emacs cat > gsub << 'EOF' #LOOP chain.csh {check in exists $(path1).gz} {check out line+ chain/$(root1).chain} #ENDLOOP 'EOF' # << happy emacs chmod a+x chain.csh gensub2 input.lst single gsub jobList para create jobList para try para check para push # wait for completion para time > run.time # finish pipeline cd /cluster/data/oryCun1/bed/zb.hg17 /cluster/bin/scripts/doBlastzChainNet.pl -verbose=2 \ -continue=chainMerge \ -bigClusterHub=pk \ `pwd`/DEF >& blastz.out & # make consistent with newer builds 2006-04-09 markd cd /cluster/data/oryCun1/bed mv blastz.hg17 blastz.hg17.2005.12.22 ln -s zb.hg17 blastz.hg17 cd /cluster/data/oryCun1/bed/zb.hg17/ ln -s hg17.oryCun1.all.chain.gz zb.hg17/axtChain/oryCun1.hg17.all.chain.gz ln -s hg17.oryCun1.net.gz zb.hg17/axtChain/oryCun1.hg17.net.gz # realigned and reload genbank/refseq, since alignment was never enabled # and the old releases were dropped. (DONE 2006-04-12) ############################################################################ ## create missing hg18 nets (DONE 2006-04-12 markd) ## hg18 chains exist, but are documented in not in this file. # create the net filee ssh hgwdev cd /cluster/data/oryCun1/bed/blastz.hg18.swap/axtChain nice netClass -verbose=0 -noAr noClass.net oryCun1 hg18 oryCun1.hg18.net nice gzip oryCun1.hg18.net ############################################################################ ## create reciprocal best nets on hg18 (2006-03-09 kate) ssh kkstore05 cd /cluster/data/hg18/bed/blastz.oryCun1 ~/kent/src/hg/utils/automation/doRecipBest.pl hg18 oryCun1 >&! rbest.log & # 1.25 hrs ############################################################################ ## BLASTZ swap from mm9 alignments (2007-11-11 - markd) ssh hgwdev mkdir /cluster/data/oryCun1/bed/blastz.mm9.swap cd /cluster/data/oryCun1/bed/blastz.mm9.swap ln -s blastz.mm9.swap ../blastz.mm9 /cluster/bin/scripts/doBlastzChainNet.pl \ -swap /cluster/data/mm9/bed/blastz.oryCun1/DEF >& swap.out& # fb.oryCun1.chainMm9Link.txt: # 500397488 bases of 2076044328 (24.103%) in intersection ############################################################################ # Adding Ensembl Genes (DONE - 2008-02-22 - Hiram) ssh kkstore04 cd /cluster/data/oryCun1 cat << '_EOF_' > oryCun1.ensGene.ra # required db and ensVersion variables db oryCun1 ensVersion 48 # do we need to translate geneScaffold coordinates geneScaffolds yes '_EOF_' # << happy emacs doEnsGeneUpdate.pl oryCun1.ensGene.ra ############################################################################ # TRANSMAP vertebrate.2008-05-20 build (2008-05-24 markd) vertebrate-wide transMap alignments were built Tracks are created and loaded by a single Makefile. This is available from: svn+ssh://hgwdev.cse.ucsc.edu/projects/compbio/usr/markd/svn/projs/transMap/tags/vertebrate.2008-05-20 see doc/builds.txt for specific details. ############################################################################ ############################################################################ # TRANSMAP vertebrate.2008-06-07 build (2008-06-30 markd) vertebrate-wide transMap alignments were built Tracks are created and loaded by a single Makefile. This is available from: svn+ssh://hgwdev.cse.ucsc.edu/projects/compbio/usr/markd/svn/projs/transMap/tags/vertebrate.2008-06-30 see doc/builds.txt for specific details. ############################################################################ # oryCun1 - Rabbit - Ensembl Genes version 51 (DONE - 2008-12-03 - hiram) ssh kolossus cd /hive/data/genomes/oryCun1 cat << '_EOF_' > oryCun1.ensGene.ra # required db variable db oryCun1 # do we need to translate geneScaffold coordinates geneScaffolds yes # ignore genes that do not properly convert to a gene pred, and contig # names that are not in the UCSC assembly skipInvalid yes # ignore the three genes that have invalid structures from Ensembl: # 24994: ENSOCUT00000009485 no exonFrame on CDS exon 9 # 26897: ENSOCUT00000004627 no exonFrame on CDS exon 3 # 32794: ENSOCUT00000014840 no exonFrame on CDS exon 3 '_EOF_' # << happy emacs doEnsGeneUpdate.pl -ensVersion=51 oryCun1.ensGene.ra ssh hgwdev cd /hive/data/genomes/oryCun1/bed/ensGene.51 featureBits oryCun1 ensGene # 22733387 bases of 2076044328 (1.095%) in intersection *** All done! (through the 'makeDoc' step) *** Steps were performed in /hive/data/genomes/oryCun1/bed/ensGene.51 ############################################################################ ############################################################################ # TRANSMAP vertebrate.2009-07-01 build (2009-07-21 markd) vertebrate-wide transMap alignments were built Tracks are created and loaded by a single Makefile. This is available from: svn+ssh://hgwdev.cse.ucsc.edu/projects/compbio/usr/markd/svn/projs/transMap/tags/vertebrate.2009-07-01 see doc/builds.txt for specific details. ############################################################################ # LIFTOVER TO oryCun2 (DONE - 2010-01-19,22 - Hiram ) cd /hive/data/genomes/oryCun1 # same repMatch as for oryCun2: blat oryCun1.2bit /dev/null /dev/null -tileSize=11 \ -makeOoc=jkStuff/oryCun1.11.ooc -repMatch=920 mkdir /hive/data/genomes/oryCun1/bed/blat.oryCun2.2010-01-19 cd /hive/data/genomes/oryCun1/bed/blat.oryCun2.2010-01-19 # -debug run to create run dir, preview scripts... doSameSpeciesLiftOver.pl -debug \ -ooc=/hive/data/genomes/oryCun1/jkStuff/oryCun1.11.ooc \ oryCun1 oryCun2 # Real run: doSameSpeciesLiftOver.pl -verbose=2 \ -bigClusterHub=pk -dbHost=hgwdev -workhorse=hgwdev \ -ooc=/hive/data/genomes/oryCun1/jkStuff/oryCun1.11.ooc \ oryCun1 oryCun2 > do.log 2>&1 # power failures interrupted the align step, finished manually, then: doSameSpeciesLiftOver.pl -verbose=2 -buildDir=`pwd` \ -continue=chain -bigClusterHub=pk -dbHost=hgwdev -workhorse=hgwdev \ -ooc=/hive/data/genomes/oryCun1/jkStuff/oryCun1.11.ooc \ oryCun1 oryCun2 > chain.log 2>&1 # real 150m41.538s # failed due to out of disk space on /scratch/tmp/ on hgwdev # go to /hive/data/genomes/oryCun1/bed/blat.oryCun2.2010-01-19/run.chain # and fix the doNet.csh script to use /data/tmp/ instead and run that. # then: doSameSpeciesLiftOver.pl -verbose=2 -buildDir=`pwd` \ -continue=load -bigClusterHub=pk -dbHost=hgwdev -workhorse=hgwdev \ -ooc=/hive/data/genomes/oryCun1/jkStuff/oryCun1.11.ooc \ oryCun1 oryCun2 > load.log 2>&1 #############################################################################