加入收藏 | 设为首页 | 会员中心 | 我要投稿 李大同 (https://www.lidatong.com.cn/)- 科技、建站、经验、云计算、5G、大数据,站长网!
当前位置: 首页 > 大数据 > 正文

基因数据处理32之Avocado运行记录(人造数据集)

发布时间:2020-12-14 01:47:34 所属栏目:大数据 来源:网络整理
导读:主要是需要数据正确,如果中间缺少记录,avocado一般不会成功 1.代码: Avocado修改: /** * Licensed to Big Data Genomics (BDG) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional

主要是需要数据正确,如果中间缺少记录,avocado一般不会成功
1.代码:
Avocado修改:

/** * Licensed to Big Data Genomics (BDG) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The BDG licenses this file * to you under the Apache License,Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing,software * distributed under the License is distributed on an "AS IS" BASIS,* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
package org.bdgenomics.avocado.cli

import java.nio.file.Files
import org.apache.commons.configuration.HierarchicalConfiguration
import org.apache.commons.configuration.plist.PropertyListConfiguration
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkContext,Logging}
import org.kohsuke.args4j.{Option => option,Argument}
import org.bdgenomics.adam.models.{VariantContext,ReferenceRegion}
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.avocado.Timers._
import org.bdgenomics.avocado.discovery.Explore
import org.bdgenomics.avocado.genotyping.CallGenotypes
import org.bdgenomics.avocado.input.Input
import org.bdgenomics.avocado.models.Observation
import org.bdgenomics.avocado.preprocessing.Preprocessor
import org.bdgenomics.avocado.postprocessing.Postprocessor
import org.bdgenomics.avocado.stats.AvocadoConfigAndStats
import org.bdgenomics.formats.avro.{
Variant,AlignmentRecord,NucleotideContigFragment,Genotype
}
import org.bdgenomics.utils.cli.{
BDGSparkCommand,BDGCommandCompanion,ParquetArgs,Args4j,Args4jBase
}
import org.bdgenomics.utils.instrumentation._

object Avocado extends BDGCommandCompanion {

  val commandName = "Avocado"
  val commandDescription = "Call variants using avocado and the ADAM preprocessing pipeline."

  def apply(args: Array[String]) = {
    new Avocado(Args4j[AvocadoArgs](args))
  }
}

class AvocadoArgs extends Args4jBase with ParquetArgs {
  @Argument(metaVar = "READS",required = true,usage = "ADAM read-oriented data",index = 0)
  var readInput: String = _

  @Argument(metaVar = "REFERENCE",usage = "ADAM or FASTA reference genome data",index = 1)
  var referenceInput: String = _

  @Argument(metaVar = "VARIANTS",usage = "ADAM variant output",index = 2)
  var variantOutput: String = _

  @Argument(metaVar = "CONFIG",usage = "avocado configuration file",index = 3)
  var configFile: String = _

  @option(name = "-debug",usage = "If set,prints a higher level of debug output.")
  var debug = false

  @option(required = false,name = "-fragment_length",usage = "Sets maximum fragment length. Default value is 10,000. Values greater than 1e9 should be avoided.")
  var fragmentLength: Long = 10000L
}

class Avocado(protected val args: AvocadoArgs) extends BDGSparkCommand[AvocadoArgs] with Logging {

  // companion object to this class - needed for BDGCommand framework
  val companion = Avocado
  /** *********add by xubo 20160528 *******************/
  println("companion:" + companion)
  println("companion.commandName:" + companion.commandName)
  println("companion.commandDescription:" + companion.commandDescription)
  // println("companion.commandName:"+companion.)
  println("AvocadoArgs:")
  println("args:" + args)
  println("args.configFile:" + args.configFile)
  println("args.debug:" + args.debug)
  println("args.fragmentLength:" + args.fragmentLength)
  println("args.readInput:" + args.readInput)
  println("args.referenceInput:" + args.referenceInput)
  println("args.variantOutput:" + args.variantOutput)
  println("test endn")
  /** *********add by xubo 20160528 *******************/
  // get config off classpath and load into a temp file...
  val stream = Thread.currentThread.getContextClassLoader.getResourceAsStream(args.configFile)
  val tempPath = Files.createTempDirectory("config")
  val tempFilePath = tempPath.resolve("temp.properties")
  Files.copy(stream,tempFilePath)

  /** *********add by xubo 20160528 *******************/
  println("stream:" + stream)
  println("tempFilePath:" + tempFilePath)
  /** *********add by xubo 20160528 *******************/

  // load config
  val config: HierarchicalConfiguration = new PropertyListConfiguration(tempFilePath.toFile)
  val preprocessorNames = getStringArrayFromConfig("preprocessorNames")
  val preprocessorAlgorithms = getStringArrayFromConfig("preprocessorAlgorithms")
  assert(preprocessorNames.length == preprocessorAlgorithms.length,"Must have a 1-to-1 mapping between preprocessor names and algorithms.")
  val preprocessingStagesZippedWithNames = preprocessorNames.zip(preprocessorAlgorithms)

  val explorerName = config.getString("explorerName")
  val explorerAlgorithm = config.getString("explorerAlgorithm")

  val genotyperName = config.getString("genotyperName")
  val genotyperAlgorithm = config.getString("genotyperAlgorithm")

  val postprocessorNames = getStringArrayFromConfig("postprocessorNames")
  val postprocessorAlgorithms = getStringArrayFromConfig("postprocessorAlgorithms")
  assert(postprocessorNames.length == postprocessorAlgorithms.length,"Must have a 1-to-1 mapping between postprocessor names and algoritms.")
  val postprocessorsZipped = postprocessorNames.zip(postprocessorAlgorithms)

  val debug = args.debug
  /** *********add by xubo 20160528 *******************/
  println("config:" + config)
  println("preprocessorNames:" + preprocessorNames)
  preprocessorNames.foreach(println)
  println("preprocessorAlgorithms:" + preprocessorAlgorithms)
  preprocessorAlgorithms.foreach(println)
  println("preprocessingStagesZippedWithNames:" + preprocessingStagesZippedWithNames)
  preprocessorNames.foreach(println)
  println("explorerName:" + explorerName)
  println("explorerAlgorithm:" + explorerAlgorithm)
  println("genotyperName:" + genotyperName)
  println("genotyperAlgorithm:" + genotyperAlgorithm)
  println("postprocessorNames:" + postprocessorNames)
  postprocessorNames.foreach(println)
  println("postprocessorAlgorithms:" + postprocessorAlgorithms)
  postprocessorAlgorithms.foreach(println)
  println("postprocessorsZipped:" + postprocessorsZipped)
  postprocessorsZipped.foreach(println)
  println("stream:" + stream)
  println("stream:" + stream)
  println("stream:" + stream)
  println("stream:" + stream)

  /** *********add by xubo 20160528 *******************/
  private def getStringArrayFromConfig(name: String): Array[String] = {
    config.getStringArray(name).map(_.toString)
  }

  /** * Applies several pre-processing steps to the read pipeline. Currently,these are the default * steps in the ADAM processing pipeline. * * @param reads RDD of reads to process. * @return RDD containing reads that have been sorted and deduped. */
  def preProcessReads(reads: RDD[AlignmentRecord]): RDD[AlignmentRecord] = PreprocessReads.time {
    var processedReads = reads //.cache

    if (debug) {
      log.info("avocado: Preprocessing " + processedReads.count + " reads.")
    }

    // loop over preprocessing stages and apply
    preprocessingStagesZippedWithNames.foreach(p => {
      val (stageName,stageAlgorithm) = p

      log.info("avocado: Running " + stageName)

      // run this preprocessing stage
      processedReads = Preprocessor(processedReads,stageName,stageAlgorithm,config)
    })

    // return processed reads
    processedReads
  }

  /** * Applies variant calling algorithms to reads and pileups. Reduces down and returns called variants. * * @param reads * @param stats * @return Joined output of variant calling algorithms. */
  def callVariants(reads: RDD[AlignmentRecord],stats: AvocadoConfigAndStats): RDD[VariantContext] = CallVariants.time {
    val discoveries: RDD[Observation] = Explore(explorerAlgorithm,explorerName,reads,stats,config)
    CallGenotypes(genotyperAlgorithm,genotyperName,discoveries,config)
  }

  /** * Applies variant post-processing methods to called variants. Post-processing can * include methods which modify the information in variant calls,or alternatively,* methods that filter out spurious variant calls. * * @param variants RDD of variants to process. * @return Post-processed variants. */
  def postProcessVariants(variants: RDD[VariantContext],stats: AvocadoConfigAndStats): RDD[VariantContext] = PostprocessVariants.time {
    var rdd = variants

    // loop over post processing steps
    postprocessorsZipped.foreach(p => {
      val (ppStageName,ppAlgorithm) = p

      rdd = Postprocessor(rdd,ppStageName,ppAlgorithm,config)
    })

    rdd
  }

  /** * Main method. Implements body of variant caller. SparkContext and Hadoop Job are provided * by the ADAMSparkCommand shell. * * @param sc SparkContext for RDDs. * @param job Hadoop Job container for file I/O. */

  def run(sc: SparkContext) {

    log.info("Starting avocado...")

    // load in reference from ADAM file
    val reference: RDD[NucleotideContigFragment] = LoadContigs.time {
      sc.loadSequence(args.referenceInput,fragmentLength = args.fragmentLength)
    }

    log.info("Loading reads in from " + args.readInput)
    // load in reads from ADAM file
    val reads: RDD[AlignmentRecord] = LoadReads.time {
      Input(sc,args.readInput,reference,config)
    }

    /** *********add by xubo 20160528 *******************/
    println("readInput:")
    reads.foreach(println)
    /** *********add by xubo 20160528 *******************/
    // create stats/config item
    val stats = new AvocadoConfigAndStats(sc,args.debug,reference)

    /** *********add by xubo 20160528 *******************/
    println("stats:" + stats)
    println("stats.contigLengths:" + stats.contigLengths)
    // println("stats.coverage:" + stats.coverage)
    println("stats.debug:" + stats.debug)
    println("stats.referenceObservations:" + stats.referenceObservations)
    // println("stats.referenceSeq:" + stats.referenceSeq)
    // stats.referenceSeq.foreach(println)
    println("stats.samplesInDataset:" + stats.samplesInDataset)
    stats.samplesInDataset.foreach(println)
    println("stats.sequenceDict:" + stats.sequenceDict)
    // println("stats.contigLengths:"+stats)
    /** *********add by xubo 20160528 *******************/


    // apply read translation steps
    log.info("Processing reads.")
    var cleanedReads = preProcessReads(reads)

    /** *********add by xubo 20160528 *******************/
    println("cleanedReads:" + cleanedReads)
    println("cleanedReads.count:" + cleanedReads.count())
    cleanedReads.foreach(println)
    val cleanedReads2 = cleanedReads.map { each =>
      each.setRecordGroupSample("hello")
      each
    }
    // cleanedReads.adamGetReadGroupDictionary()
    // cleanedReads.getSequenc
    // cleanedReads.foreach(each => println(each.getSequence + " " + each.referenceLength))
    println("cleanedReads2:")
    cleanedReads2.foreach(println)

    /** *********add by xubo 20160528 *******************/

    // call variants on filtered reads and pileups
    log.info("Calling variants.")
// val calledVariants = callVariants(cleanedReads,stats)
    val calledVariants = callVariants(cleanedReads2,stats)

    /** *********add by xubo 20160528 *******************/
    println("calledVariants:" + calledVariants)
    println("calledVariants.count:" + calledVariants.count())
    // calledVariants.take(10).foreach(each=>println(each.databases+" "+each.position+" "+each.variant+" "=each.genotypes))
    // calledVariants.take(10).foreach(println)

    /** *********add by xubo 20160528 *******************/


    // post process variants
    log.info("Post-processing variants.")
    val processedGenotypes: RDD[Genotype] = postProcessVariants(calledVariants,stats).flatMap(variantContext => variantContext.genotypes)

    /** *********add by xubo 20160528 *******************/
    println("processedGenotypes:" + calledVariants)
    println("processedGenotypes.count:" + processedGenotypes.count())
    processedGenotypes.foreach(println)

    /** *********add by xubo 20160528 *******************/

    // save variants to output file
    log.info("Writing calls to disk.")
    SaveVariants.time {
      processedGenotypes.adamParquetSave(args.variantOutput,args.blockSize,args.pageSize,args.compressionCodec,args.disableDictionaryEncoding)

      /** ***********add ***************/
      processedGenotypes.foreach { each => println("processedGenotypes:" + each) }
      // processedGenotypes.
    }
  }
}

AvocadoSuite没有上传,在个人idea的package org.bdgenomics.avocado.cli中
数据为:

val samFile = "artificial.realigned.sam"
 val faFile = "artificial.fa"

2.运行记录;

D:1win7javajdkbinjava -Didea.launcher.port=7532 "-Didea.launcher.bin.path=D:1win7ideaIntelliJ IDEA Community Edition 15.0.4bin" -Dfile.encoding=UTF-8 -classpath "D:1win7javajdkjrelibcharsets.jar;D:1win7javajdkjrelibdeploy.jar;D:1win7javajdkjrelibextaccess-bridge-64.jar;D:1win7javajdkjrelibextdnsns.jar;D:1win7javajdkjrelibextjaccess.jar;D:1win7javajdkjrelibextlocaledata.jar;D:1win7javajdkjrelibextsunec.jar;D:1win7javajdkjrelibextsunjce_provider.jar;D:1win7javajdkjrelibextsunmscapi.jar;D:1win7javajdkjrelibextzipfs.jar;D:1win7javajdkjrelibjavaws.jar;D:1win7javajdkjrelibjce.jar;D:1win7javajdkjrelibjfr.jar;D:1win7javajdkjrelibjfxrt.jar;D:1win7javajdkjrelibjsse.jar;D:1win7javajdkjrelibmanagement-agent.jar;D:1win7javajdkjrelibplugin.jar;D:1win7javajdkjrelibresources.jar;D:1win7javajdkjrelibrt.jar;D:1win7scala;D:1win7scalalib;D:allideaavocado-masteravocado-clitargetscala-2.10.3test-classes;D:allideaavocado-masteravocado-clitargetscala-2.10.3classes;D:1win7scalalibscala-actors-migration.jar;D:1win7scalalibscala-actors.jar;D:1win7scalalibscala-library.jar;D:1win7scalalibscala-reflect.jar;D:1win7scalalibscala-swing.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgbdgenomicsutilsutils-cli_2.10.2.3utils-cli_2.10-0.2.3.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheparquetparquet-avro1.7.0parquet-avro-1.7.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheparquetparquet-column1.7.0parquet-column-1.7.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheparquetparquet-common1.7.0parquet-common-1.7.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheparquetparquet-encoding1.7.0parquet-encoding-1.7.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheparquetparquet-generator1.7.0parquet-generator-1.7.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheparquetparquet-hadoop1.7.0parquet-hadoop-1.7.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheparquetparquet-jackson1.7.0parquet-jackson-1.7.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheparquetparquet-format2.3.0-incubatingparquet-format-2.3.0-incubating.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgbdgenomicsutilsutils-misc_2.10.2.3utils-misc_2.10-0.2.3.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgbdgenomicsutilsutils-metrics_2.10.2.3utils-metrics_2.10-0.2.3.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgscala-langscala-library2.10.4scala-library-2.10.4.jar;D:1win7javaapache-maven-3.3.9-binRepositoryargs4jargs4j2.0.23args4j-2.0.23.jar;D:allideaavocado-masteravocado-coretargetscala-2.10.3classes;D:allideaavocado-masteravocado-coretargetscala-2.10.3test-classes;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-client2.2.0hadoop-client-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-common2.2.0hadoop-common-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycommons-clicommons-cli1.2commons-cli-1.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachecommonscommons-math2.1commons-math-2.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryxmlencxmlenc.52xmlenc-0.52.jar;D:1win7javaapache-maven-3.3.9-binRepositorycommons-httpclientcommons-httpclient3.1commons-httpclient-3.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycommons-codeccommons-codec1.4commons-codec-1.4.jar;D:1win7javaapache-maven-3.3.9-binRepositorycommons-loggingcommons-logging1.1.1commons-logging-1.1.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycommons-langcommons-lang2.5commons-lang-2.5.jar;D:1win7javaapache-maven-3.3.9-binRepositorycommons-configurationcommons-configuration1.10commons-configuration-1.10.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomgoogleprotobufprotobuf-java2.5.0protobuf-java-2.5.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-auth2.2.0hadoop-auth-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachezookeeperzookeeper3.4.5zookeeper-3.4.5.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachecommonscommons-compress1.4.1commons-compress-1.4.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-hdfs2.2.0hadoop-hdfs-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgmortbayjettyjetty-util6.1.26jetty-util-6.1.26.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-mapreduce-client-app2.2.0hadoop-mapreduce-client-app-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-mapreduce-client-common2.2.0hadoop-mapreduce-client-common-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-yarn-client2.2.0hadoop-yarn-client-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomsunjerseyjersey-test-frameworkjersey-test-framework-grizzly21.9jersey-test-framework-grizzly2-1.9.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomsunjerseyjersey-test-frameworkjersey-test-framework-core1.9jersey-test-framework-core-1.9.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomsunjerseyjersey-client1.9jersey-client-1.9.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomsunjerseyjersey-grizzly21.9jersey-grizzly2-1.9.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgglassfishgrizzlygrizzly-http2.1.2grizzly-http-2.1.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgglassfishgrizzlygrizzly-framework2.1.2grizzly-framework-2.1.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgglassfishgmbalgmbal-api-only3.0.0-b023gmbal-api-only-3.0.0-b023.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgglassfishexternalmanagement-api3.0.0-b012management-api-3.0.0-b012.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgglassfishgrizzlygrizzly-http-server2.1.2grizzly-http-server-2.1.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgglassfishgrizzlygrizzly-rcm2.1.2grizzly-rcm-2.1.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgglassfishgrizzlygrizzly-http-servlet2.1.2grizzly-http-servlet-2.1.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgglassfishjavax.servlet3.1javax.servlet-3.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomsunjerseyjersey-json1.9jersey-json-1.9.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgcodehausjettisonjettison1.1jettison-1.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorystaxstax-api1.0.1stax-api-1.0.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomsunxmlbindjaxb-impl2.2.3-1jaxb-impl-2.2.3-1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryjavaxxmlbindjaxb-api2.2.2jaxb-api-2.2.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryjavaxactivationactivation1.1activation-1.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomsunjerseycontribsjersey-guice1.9jersey-guice-1.9.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-yarn-server-common2.2.0hadoop-yarn-server-common-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-mapreduce-client-shuffle2.2.0hadoop-mapreduce-client-shuffle-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-yarn-api2.2.0hadoop-yarn-api-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-mapreduce-client-core2.2.0hadoop-mapreduce-client-core-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-yarn-common2.2.0hadoop-yarn-common-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-mapreduce-client-jobclient2.2.0hadoop-mapreduce-client-jobclient-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehadoophadoop-annotations2.2.0hadoop-annotations-2.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycommons-iocommons-io1.3.2commons-io-1.3.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgbdgenomicsadamadam-core_2.10.18.2adam-core_2.10-0.18.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgbdgenomicsutilsutils-io_2.10.2.3utils-io_2.10-0.2.3.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomesotericsoftwarekryokryo2.24.0kryo-2.24.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomesotericsoftwareminlogminlog1.2minlog-1.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgobjenesisobjenesis2.1objenesis-2.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgscoveragescalac-scoverage-plugin_2.101.1.1scalac-scoverage-plugin_2.10-1.1.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgbdgenomicsbdg-formatsbdg-formats.6.1bdg-formats-0.6.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryitunimidsifastutil6.6.5fastutil-6.6.5.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheavroavro1.7.7avro-1.7.7.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgcodehausjacksonjackson-core-asl1.9.13jackson-core-asl-1.9.13.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgcodehausjacksonjackson-mapper-asl1.9.13jackson-mapper-asl-1.9.13.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomthoughtworksparanamerparanamer2.3paranamer-2.3.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgslf4jslf4j-log4j121.7.12slf4j-log4j12-1.7.12.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheparquetparquet-scala_2.101.8.1parquet-scala_2.10-1.8.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgseqdoophadoop-bam7.1.0hadoop-bam-7.1.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomgithubsamtoolshtsjdk1.139htsjdk-1.139.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachecommonscommons-jexl2.1.1commons-jexl-2.1.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgtukaanixz1.5xz-1.5.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheantant1.8.2ant-1.8.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheantant-launcher1.8.2ant-launcher-1.8.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehttpcomponentshttpclient4.5.1httpclient-4.5.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachehttpcomponentshttpcore4.4.3httpcore-4.4.3.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomnetflixservoservo-core.10.0servo-core-0.10.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomgooglecodefindbugsannotations2.0.0annotations-2.0.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomnetflixservoservo-internal.10.0servo-internal-0.10.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomgoogleguavaguava16.0.1guava-16.0.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgbdgenomicsadamadam-cli_2.10.18.2adam-cli_2.10-0.18.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgbdgenomicsadamadam-apis_2.10.18.2adam-apis_2.10-0.18.2.jar;D:1win7javaapache-maven-3.3.9-binRepositorynetcodingwellscala-guice_2.104.0.0scala-guice_2.10-4.0.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomgoogleinjectguice4.0guice-4.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryjavaxinjectjavax.inject1javax.inject-1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryaopallianceaopalliance1.0aopalliance-1.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomgoogleinjectextensionsguice-multibindings4.0guice-multibindings-4.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachesparkspark-core_2.101.4.1spark-core_2.10-1.4.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtwitterchill_2.10.5.0chill_2.10-0.5.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtwitterchill-java.5.0chill-java-0.5.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachesparkspark-launcher_2.101.4.1spark-launcher_2.10-1.4.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachesparkspark-network-common_2.101.4.1spark-network-common_2.10-1.4.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachesparkspark-network-shuffle_2.101.4.1spark-network-shuffle_2.10-1.4.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachesparkspark-unsafe_2.101.4.1spark-unsafe_2.10-1.4.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorynetjavadevjets3tjets3t.7.1jets3t-0.7.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachecuratorcurator-recipes2.4.0curator-recipes-2.4.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachecuratorcurator-framework2.4.0curator-framework-2.4.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachecuratorcurator-client2.4.0curator-client-2.4.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgeclipsejettyorbitjavax.servlet3.0.0.v201112011016javax.servlet-3.0.0.v201112011016.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachecommonscommons-lang33.3.2commons-lang3-3.3.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachecommonscommons-math33.4.1commons-math3-3.4.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomgooglecodefindbugsjsr3051.3.9jsr305-1.3.9.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgslf4jslf4j-api1.7.10slf4j-api-1.7.10.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgslf4jjul-to-slf4j1.7.10jul-to-slf4j-1.7.10.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgslf4jjcl-over-slf4j1.7.10jcl-over-slf4j-1.7.10.jar;D:1win7javaapache-maven-3.3.9-binRepositorylog4jlog4j1.2.17log4j-1.2.17.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomningcompress-lzf1.0.3compress-lzf-1.0.3.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgxerialsnappysnappy-java1.1.1.7snappy-java-1.1.1.7.jar;D:1win7javaapache-maven-3.3.9-binRepositorynetjpountzlz4lz41.2.0lz4-1.2.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgroaringbitmapRoaringBitmap.4.5RoaringBitmap-0.4.5.jar;D:1win7javaapache-maven-3.3.9-binRepositorycommons-netcommons-net2.2commons-net-2.2.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgspark-projectakkaakka-remote_2.102.3.4-sparkakka-remote_2.10-2.3.4-spark.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgspark-projectakkaakka-actor_2.102.3.4-sparkakka-actor_2.10-2.3.4-spark.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtypesafeconfig1.2.1config-1.2.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryionettynetty3.8.0.Finalnetty-3.8.0.Final.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgspark-projectprotobufprotobuf-java2.5.0-sparkprotobuf-java-2.5.0-spark.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorguncommonsmathsuncommons-maths1.2.2auncommons-maths-1.2.2a.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgspark-projectakkaakka-slf4j_2.102.3.4-sparkakka-slf4j_2.10-2.3.4-spark.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgjson4sjson4s-jackson_2.103.2.10json4s-jackson_2.10-3.2.10.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgjson4sjson4s-core_2.103.2.10json4s-core_2.10-3.2.10.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgjson4sjson4s-ast_2.103.2.10json4s-ast_2.10-3.2.10.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgscala-langscalap2.10.0scalap-2.10.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomsunjerseyjersey-server1.9jersey-server-1.9.jar;D:1win7javaapache-maven-3.3.9-binRepositoryasmasm3.1asm-3.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomsunjerseyjersey-core1.9jersey-core-1.9.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachemesosmesos.21.1mesos-0.21.1-shaded-protobuf.jar;D:1win7javaapache-maven-3.3.9-binRepositoryionettynetty-all4.0.23.Finalnetty-all-4.0.23.Final.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomclearspringanalyticsstream2.7.0stream-2.7.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryiodropwizardmetricsmetrics-core3.1.0metrics-core-3.1.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryiodropwizardmetricsmetrics-jvm3.1.0metrics-jvm-3.1.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryiodropwizardmetricsmetrics-json3.1.0metrics-json-3.1.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryiodropwizardmetricsmetrics-graphite3.1.0metrics-graphite-3.1.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomfasterxmljacksoncorejackson-databind2.4.4jackson-databind-2.4.4.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomfasterxmljacksoncorejackson-annotations2.4.0jackson-annotations-2.4.0.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomfasterxmljacksoncorejackson-core2.4.4jackson-core-2.4.4.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomfasterxmljacksonmodulejackson-module-scala_2.102.4.4jackson-module-scala_2.10-2.4.4.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgscala-langscala-reflect2.10.4scala-reflect-2.10.4.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapacheivyivy2.4.0ivy-2.4.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorooro2.0.8oro-2.0.8.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgtachyonprojecttachyon-client.6.4tachyon-client-0.6.4.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgtachyonprojecttachyon.6.4tachyon-0.6.4.jar;D:1win7javaapache-maven-3.3.9-binRepositorynetrazorvinepyrolite4.4pyrolite-4.4.jar;D:1win7javaapache-maven-3.3.9-binRepositorynetsfpy4jpy4j.8.2.1py4j-0.8.2.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgspark-projectsparkunused1.0.0unused-1.0.0.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachesparkspark-sql_2.101.4.1spark-sql_2.10-1.4.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgapachesparkspark-catalyst_2.101.4.1spark-catalyst_2.10-1.4.1.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgscala-langscala-compiler2.10.4scala-compiler-2.10.4.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgscalamacrosquasiquotes_2.102.0.1quasiquotes_2.10-2.0.1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtwitterparquet-column1.6.0rc3parquet-column-1.6.0rc3.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtwitterparquet-common1.6.0rc3parquet-common-1.6.0rc3.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtwitterparquet-encoding1.6.0rc3parquet-encoding-1.6.0rc3.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtwitterparquet-generator1.6.0rc3parquet-generator-1.6.0rc3.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtwitterparquet-hadoop1.6.0rc3parquet-hadoop-1.6.0rc3.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtwitterparquet-format2.2.0-rc1parquet-format-2.2.0-rc1.jar;D:1win7javaapache-maven-3.3.9-binRepositorycomtwitterparquet-jackson1.6.0rc3parquet-jackson-1.6.0rc3.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgjoddjodd-core3.6.3jodd-core-3.6.3.jar;D:1win7javaapache-maven-3.3.9-binRepositoryorgscalatestscalatest_2.101.9.2scalatest_2.10-1.9.2.jar;D:1win7ideaIntelliJ IDEA Community Edition 15.0.4libidea_rt.jar" com.intellij.rt.execution.application.AppMain org.bdgenomics.avocado.cli.AvocadoSuite
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/D:/1win7/java/apache-maven-3.3.9-bin/Repository/org/slf4j/slf4j-log4j12/1.7.12/slf4j-log4j12-1.7.12.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/D:/1win7/java/apache-maven-3.3.9-bin/Repository/org/bdgenomics/adam/adam-cli_2.10/0.18.2/adam-cli_2.10-0.18.2.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
2016-05-28 18:42:08 WARN  NativeCodeLoader:62 - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
/D:/all/idea/avocado-master/avocado-cli/target/scala-2.10.3/test-classes/artificial.realigned.sam
{"readNum": 0,"contig": {"contigName": "artificial","contigLength": 1120,"contigMD5": null,"referenceURL": null,"assembly": null,"species": null,"referenceIndex": 0},"start": 0,"oldPosition": 10,"end": 70,"mapq": 100,"readName": "read2","sequence": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGGGGGGGGGGAAAAAA","qual": "IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII","cigar": "34M10D26M","oldCigar": "44M10D16M","basesTrimmedFromStart": 0,"basesTrimmedFromEnd": 0,"readPaired": true,"properPair": true,"readMapped": true,"mateMapped": true,"failedVendorQualityChecks": false,"duplicateRead": false,"readNegativeStrand": false,"mateNegativeStrand": false,"primaryAlignment": true,"secondaryAlignment": false,"supplementaryAlignment": false,"mismatchingPositions": null,"origQual": null,"attributes": "XS:i:70tAS:i:70tMQ:i:90tOP:i:11tNM:i:10tRG:Z:read_group_idtOC:Z:44M10D16M","recordGroupName": "read_group_id","recordGroupSequencingCenter": null,"recordGroupDescription": null,"recordGroupRunDateEpoch": null,"recordGroupFlowOrder": null,"recordGroupKeySequence": null,"recordGroupLibrary": "library","recordGroupPredictedMedianInsertSize": null,"recordGroupPlatform": "illumina","recordGroupPlatformUnit": "platform_unit","recordGroupSample": "sequencing_center","mateAlignmentStart": 110,"mateAlignmentEnd": null,"mateContig": {"contigName": "artificial","inferredInsertSize": 111}
{"readNum": 0,"start": 5,"oldPosition": null,"end": 75,"mapq": 90,"readName": "read1","sequence": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA","cigar": "29M10D31M","oldCigar": null,"mismatchingPositions": "29^GGGGGGGGGG10G0G0G0G0G0G0G0G0G0G11","attributes": "XS:i:70tAS:i:70tMQ:i:90tNM:i:20tRG:Z:read_group_id","mateAlignmentStart": 105,"inferredInsertSize": 101}
{"readNum": 0,"start": 10,"oldPosition": 20,"end": 80,"readName": "read4","sequence": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGGGGGGGGGGAAAAAAAAAAAAAAAA","cigar": "24M10D36M","oldCigar": "34M10D26M","attributes": "XS:i:70tAS:i:70tMQ:i:90tOP:i:21tNM:i:10tRG:Z:read_group_idtOC:Z:34M10D26M","mateAlignmentStart": 120,"start": 15,"end": 85,"readName": "read3","cigar": "19M10D41M","mismatchingPositions": "19^GGGGGGGGGG10G0G0G0G0G0G0G0G0G0G21","mateAlignmentStart": 115,"start": 25,"end": 95,"readName": "read5","cigar": "9M10D51M","mismatchingPositions": "9^GGGGGGGGGG10G0G0G0G0G0G0G0G0G0G31","mateAlignmentStart": 125,"inferredInsertSize": 101}
{"readNum": 1,"start": 105,"end": 165,"cigar": "60M","mismatchingPositions": "60","attributes": "XS:i:70tAS:i:70tMQ:i:90tNM:i:0tRG:Z:read_group_id","mateAlignmentStart": 5,"inferredInsertSize": -101}
{"readNum": 1,"start": 110,"end": 170,"attributes": "XS:i:70tAS:i:70tMQ:i:100tNM:i:0tRG:Z:read_group_id","mateAlignmentStart": 0,"inferredInsertSize": -111}
{"readNum": 1,"start": 115,"end": 175,"mateAlignmentStart": 15,"start": 120,"end": 180,"mateAlignmentStart": 10,"start": 125,"end": 185,"mateAlignmentStart": 25,"inferredInsertSize": -101}
faLoad:
/D:/all/idea/avocado-master/avocado-cli/target/scala-2.10.3/test-classes/artificial.fa
{"contig": {"contigName": "artificial","referenceIndex": null},"description": "fasta","fragmentSequence": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGGGGGGGGGGAAAAAAAAAAGGGGGGGGGGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA","fragmentNumber": 0,"fragmentStartPosition": 0,"fragmentLength": 1120,"numberOfFragmentsInContig": 1}
C:UsersxuboAppDataLocalTemp83178325081869260/var
/D:/all/idea/avocado-master/avocado-cli/target/scala-2.10.3/test-classes/basic.properties
(%s,123)
D:allideaavocado-masteravocado-clisrctestresourcesoutputvar20160528184213248
companion:org.bdgenomics.avocado.cli.Avocado$@3e4e92e3
companion.commandName:Avocado
companion.commandDescription:Call variants using avocado and the ADAM preprocessing pipeline.
AvocadoArgs:
args:org.bdgenomics.avocado.cli.AvocadoArgs@188d950a
args.configFile:basic.properties
args.debug:false
args.fragmentLength:10000
args.readInput:/D:/all/idea/avocado-master/avocado-cli/target/scala-2.10.3/test-classes/artificial.realigned.sam
args.referenceInput:/D:/all/idea/avocado-master/avocado-cli/target/scala-2.10.3/test-classes/artificial.fa
args.variantOutput:D:allideaavocado-masteravocado-clisrctestresourcesoutputvar20160528184213248
test end

stream:java.io.BufferedInputStream@7733d01d
tempFilePath:C:UsersxuboAppDataLocalTempconfig4218631409097485876temp.properties
config:org.apache.commons.configuration.plist.PropertyListConfiguration@305e0ade
preprocessorNames:[Ljava.lang.String;@6e1b8da0
preprocessorAlgorithms:[Ljava.lang.String;@58424e18
preprocessingStagesZippedWithNames:[Lscala.Tuple2;@6fc8f5d9
explorerName:readExplorer
explorerAlgorithm:ReadExplorer
genotyperName:biallelicGenotyper
genotyperAlgorithm:BiallelicGenotyper
postprocessorNames:[Ljava.lang.String;@65fcd178
nonRef
postprocessorAlgorithms:[Ljava.lang.String;@7230adcb
filterReferenceCalls
postprocessorsZipped:[Lscala.Tuple2;@deacdcc
(nonRef,filterReferenceCalls)
stream:java.io.BufferedInputStream@7733d01d
stream:java.io.BufferedInputStream@7733d01d
stream:java.io.BufferedInputStream@7733d01d
stream:java.io.BufferedInputStream@7733d01d
Loading reads in from /D:/all/idea/avocado-master/avocado-cli/target/scala-2.10.3/test-classes/artificial.realigned.sam
readInput:
{"readNum": 0,"inferredInsertSize": -101}
stats:org.bdgenomics.avocado.stats.AvocadoConfigAndStats@32b9c6f7
stats.contigLengths:Map(artificial -> 1120)
stats.debug:false
stats.referenceObservations:MapPartitionsRDD[26] at flatMap at SliceReference.scala:28
stats.samplesInDataset:[Ljava.lang.String;@551a9433
sequencing_center
stats.sequenceDict:SequenceDictionary{
artificial->1120}
cleanedReads:MapPartitionsRDD[21] at map at ADAMContext.scala:289
cleanedReads.count:10
{"readNum": 0,"inferredInsertSize": -101}
cleanedReads2:
{"readNum": 0,"recordGroupSample": "hello","inferredInsertSize": -101}
calledVariants:InstrumentedRDD[41] at mapPartitions at BiallelicGenotyper.scala:398
calledVariants.count:1110
processedGenotypes:InstrumentedRDD[41] at mapPartitions at BiallelicGenotyper.scala:398
processedGenotypes.count:11
{"variant": {"variantErrorProbability": null,"start": 33,"end": 44,"referenceAllele": "AGGGGGGGGGG","alternateAllele": "A","svAllele": null,"isSomatic": false},"variantCallingAnnotations": {"variantIsPassing": null,"variantFilters": [],"downsampled": null,"baseQRankSum": null,"fisherStrandBiasPValue": "Infinity","rmsMapQ": 94.12757,"mapq0Reads": null,"mqRankSum": null,"readPositionRankSum": null,"genotypePriors": [],"genotypePosteriors": [],"vqslod": null,"culprit": null,"attributes": {}},"sampleId": "hello","sampleDescription": null,"processingDescription": null,"alleles": ["Ref","Alt"],"expectedAlleleDosage": null,"referenceReadDepth": 0,"alternateReadDepth": 5,"readDepth": 5,"minReadDepth": null,"genotypeQuality": 18,"genotypeLikelihoods": [-1.1486835E-6,-3.465736,-77.136604],"nonReferenceLikelihoods": [-1.1486835E-6,"strandBiasComponents": [],"splitFromMultiAllelic": false,"isPhased": false,"phaseSetId": null,"phaseQuality": null}
{"variant": {"variantErrorProbability": null,"start": 54,"end": 55,"referenceAllele": "G","mqRankSum": -1.7320508,"referenceReadDepth": 2,"alternateReadDepth": 3,"genotypeQuality": 2147483647,"genotypeLikelihoods": [-32.23619,-44.90041],"nonReferenceLikelihoods": [-32.23619,"start": 55,"end": 56,"start": 56,"end": 57,"start": 57,"end": 58,"start": 58,"end": 59,"start": 59,"end": 60,"start": 60,"end": 61,"start": 61,"end": 62,"start": 62,"end": 63,"start": 63,"end": 64,"phaseQuality": null}
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
processedGenotypes:{"variant": {"variantErrorProbability": null,"phaseQuality": null}
processedGenotypes:{"variant": {"variantErrorProbability": null,"phaseQuality": null}
read parquet:
{"variant": {"variantErrorProbability": null,"phaseQuality": null}
*************end*************

Process finished with exit code 0

(编辑:李大同)

【声明】本站内容均来自网络,其相关言论仅代表作者个人观点,不代表本站立场。若无意侵犯到您的权利,请及时与联系站长删除相关内容!

    推荐文章
      热点阅读