htsjdk.samtools.util.Histogram.getSumOfValues()方法的使用及代码示例

x33g5p2x  于2022-01-20 转载在 其他  
字(10.6k)|赞(0)|评价(0)|浏览(148)

本文整理了Java中htsjdk.samtools.util.Histogram.getSumOfValues()方法的一些代码示例,展示了Histogram.getSumOfValues()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Histogram.getSumOfValues()方法的具体详情如下:
包路径:htsjdk.samtools.util.Histogram
类名称:Histogram
方法名:getSumOfValues

Histogram.getSumOfValues介绍

[英]Returns the sum of the number of entries in each bin.
[中]返回每个箱子中条目数的总和。

代码示例

代码示例来源:origin: com.github.broadinstitute/picard

public long getNumberOfOpticalDuplicateClusters() {
    return (long) this.opticalDuplicatesByLibraryId.getSumOfValues();
  }
}

代码示例来源:origin: broadinstitute/picard

public long getNumberOfOpticalDuplicateClusters() {
    return (long) this.opticalDuplicatesByLibraryId.getSumOfValues();
  }
}

代码示例来源:origin: broadinstitute/picard

public void finish() {
  cytoConversionRate = nCytoTotal == 0 ? 0 : nCytoConverted / (double)nCytoTotal;
  nCpgSeen = (int)cpgTotal.getSumOfValues();
  nCpgConverted = (int)cpgConverted.getSumOfValues();
  cpgConversionRate = nCpgSeen == 0 ? 0 : nCpgConverted / (double)nCpgSeen;
  coverageMean = cpgTotal.getMeanBinSize();
  coverageMedian = (int)cpgTotal.getMedianBinSize();
}

代码示例来源:origin: broadinstitute/picard

/**
 * package-visible for testing
 */
long numOpticalDuplicates() {
  return ((long) this.libraryIdGenerator.getOpticalDuplicatesByLibraryIdMap().getSumOfValues());
} // cast as long due to returning a double

代码示例来源:origin: com.github.broadinstitute/picard

/**
 * package-visible for testing
 */
long numOpticalDuplicates() {
  return ((long) this.libraryIdGenerator.getOpticalDuplicatesByLibraryIdMap().getSumOfValues());
} // cast as long due to returning a double

代码示例来源:origin: com.github.samtools/htsjdk

/**
 * Calculates the mean bin size
 */
public double getMeanBinSize() {
  return (getSumOfValues() / size());
}

代码示例来源:origin: org.seqdoop/htsjdk

/**
 * Calculates the mean bin size
 */
public double getMeanBinSize() {
  return (getSumOfValues() / size());
}

代码示例来源:origin: samtools/htsjdk

/**
 * Calculates the mean bin size
 */
public double getMeanBinSize() {
  return (getSumOfValues() / size());
}

代码示例来源:origin: com.github.broadinstitute/picard

public void finish() {
  cytoConversionRate = nCytoTotal == 0 ? 0 : nCytoConverted / (double)nCytoTotal;
  nCpgSeen = (int)cpgTotal.getSumOfValues();
  nCpgConverted = (int)cpgConverted.getSumOfValues();
  cpgConversionRate = nCpgSeen == 0 ? 0 : nCpgConverted / (double)nCpgSeen;
  coverageMean = cpgTotal.getMeanBinSize();
  coverageMedian = (int)cpgTotal.getMedianBinSize();
}

代码示例来源:origin: broadinstitute/picard

private double effectiveNumberOfBases(Histogram<?> observations) {
    double totalObservations = observations.getSumOfValues();

    // Convert to log base 4 so that the entropy is now a measure
    // of the effective number of DNA bases.  If we used log(2.0)
    // our result would be in bits.
    double entropyBaseE = observations.values().stream().collect(Collectors.summingDouble(
        v -> {double p = v.getValue() / totalObservations;
            return -p * Math.log(p);}));

    return entropyBaseE / MathUtil.LOG_4_BASE_E;
  }
}

代码示例来源:origin: com.github.broadinstitute/picard

private double effectiveNumberOfBases(Histogram<?> observations) {
    double totalObservations = observations.getSumOfValues();

    // Convert to log base 4 so that the entropy is now a measure
    // of the effective number of DNA bases.  If we used log(2.0)
    // our result would be in bits.
    double entropyBaseE = observations.values().stream().collect(Collectors.summingDouble(
        v -> {double p = v.getValue() / totalObservations;
            return -p * Math.log(p);}));

    return entropyBaseE / MathUtil.LOG_4_BASE_E;
  }
}

代码示例来源:origin: broadinstitute/picard

private void onComplete() {
  final double meanClustersPerTile = tileToClusterHistogram.getMeanBinSize();
  metrics.MEAN_CLUSTERS_PER_TILE = Math.round(meanClustersPerTile);
  metrics.SD_CLUSTERS_PER_TILE = Math.round(tileToClusterHistogram.getStandardDeviationBinSize(meanClustersPerTile));
  final double meanPfClustersPerTile = tileToPfClusterHistogram.getMeanBinSize();
  metrics.MEAN_PF_CLUSTERS_PER_TILE = Math.round(meanPfClustersPerTile);
  metrics.SD_PF_CLUSTERS_PER_TILE = Math.round(tileToPfClusterHistogram.getStandardDeviationBinSize(meanPfClustersPerTile));
  final DecimalFormat decFormat = new DecimalFormat("#.##");
  final Histogram<Integer> laneToPctPfClusterHistogram = tileToPfClusterHistogram.divideByHistogram(tileToClusterHistogram);
  final double meanPctPfClustersPerTile = laneToPctPfClusterHistogram.getMeanBinSize();
  metrics.MEAN_PCT_PF_CLUSTERS_PER_TILE = (Double.isNaN(meanPctPfClustersPerTile) ? 0 : Double.valueOf(decFormat.format(meanPctPfClustersPerTile * 100)));
  metrics.SD_PCT_PF_CLUSTERS_PER_TILE = Double.valueOf(decFormat.format(laneToPctPfClusterHistogram.getStandardDeviationBinSize(meanPctPfClustersPerTile) * 100));
  metrics.TOTAL_CLUSTERS = (long) this.tileToClusterHistogram.getSumOfValues();
  metrics.PF_CLUSTERS = (long) this.tileToPfClusterHistogram.getSumOfValues();
  final ReadStructure readStructure = new ReadStructure(READ_STRUCTURE);
  int templateBaseCountPerCluster = 0;
  for (int i = 0; i < readStructure.templates.length(); i++) {
    templateBaseCountPerCluster += readStructure.templates.get(i).length;
  }
  metrics.TOTAL_READS = metrics.TOTAL_CLUSTERS * readStructure.templates.length();
  metrics.PF_READS = metrics.PF_CLUSTERS * readStructure.templates.length();
  metrics.TOTAL_BASES = metrics.TOTAL_CLUSTERS * templateBaseCountPerCluster;
  metrics.PF_BASES = metrics.PF_CLUSTERS * templateBaseCountPerCluster;
}

代码示例来源:origin: com.github.broadinstitute/picard

private void onComplete() {
  final double meanClustersPerTile = tileToClusterHistogram.getMeanBinSize();
  metrics.MEAN_CLUSTERS_PER_TILE = Math.round(meanClustersPerTile);
  metrics.SD_CLUSTERS_PER_TILE = Math.round(tileToClusterHistogram.getStandardDeviationBinSize(meanClustersPerTile));
  final double meanPfClustersPerTile = tileToPfClusterHistogram.getMeanBinSize();
  metrics.MEAN_PF_CLUSTERS_PER_TILE = Math.round(meanPfClustersPerTile);
  metrics.SD_PF_CLUSTERS_PER_TILE = Math.round(tileToPfClusterHistogram.getStandardDeviationBinSize(meanPfClustersPerTile));
  final DecimalFormat decFormat = new DecimalFormat("#.##");
  final Histogram<Integer> laneToPctPfClusterHistogram = tileToPfClusterHistogram.divideByHistogram(tileToClusterHistogram);
  final double meanPctPfClustersPerTile = laneToPctPfClusterHistogram.getMeanBinSize();
  metrics.MEAN_PCT_PF_CLUSTERS_PER_TILE = (Double.isNaN(meanPctPfClustersPerTile) ? 0 : Double.valueOf(decFormat.format(meanPctPfClustersPerTile * 100)));
  metrics.SD_PCT_PF_CLUSTERS_PER_TILE = Double.valueOf(decFormat.format(laneToPctPfClusterHistogram.getStandardDeviationBinSize(meanPctPfClustersPerTile) * 100));
  metrics.TOTAL_CLUSTERS = (long) this.tileToClusterHistogram.getSumOfValues();
  metrics.PF_CLUSTERS = (long) this.tileToPfClusterHistogram.getSumOfValues();
  final ReadStructure readStructure = new ReadStructure(READ_STRUCTURE);
  int templateBaseCountPerCluster = 0;
  for (int i = 0; i < readStructure.templates.length(); i++) {
    templateBaseCountPerCluster += readStructure.templates.get(i).length;
  }
  metrics.TOTAL_READS = metrics.TOTAL_CLUSTERS * readStructure.templates.length();
  metrics.PF_READS = metrics.PF_CLUSTERS * readStructure.templates.length();
  metrics.TOTAL_BASES = metrics.TOTAL_CLUSTERS * templateBaseCountPerCluster;
  metrics.PF_BASES = metrics.PF_CLUSTERS * templateBaseCountPerCluster;
}

代码示例来源:origin: samtools/htsjdk

@Test
public void testGetSumOfValues() {
  final int[] is = {4,4,5,5,5};
  final Histogram<Integer> histo = new Histogram<>();
  for (final int i : is) histo.increment(i);
  Assert.assertEquals(histo.getSumOfValues(), (double)(2+3), 0.000001);
}

代码示例来源:origin: broadinstitute/picard

public static double[] normalizeHistogram(final Histogram<Integer> histogram) {
  if (histogram == null) throw new PicardException("Histogram is null and cannot be normalized");
  final double histogramSumOfValues = histogram.getSumOfValues();
  final double[] normalizedHistogram = new double[histogram.size()];
  for (int i = 0; i < histogram.size(); i++) {
    if (histogram.get(i) != null) {
      normalizedHistogram[i] = histogram.get(i).getValue() / histogramSumOfValues;
    }
  }
  return normalizedHistogram;
}

代码示例来源:origin: com.github.broadinstitute/picard

public static double[] normalizeHistogram(final Histogram<Integer> histogram) {
  if (histogram == null) throw new PicardException("Histogram is null and cannot be normalized");
  final double histogramSumOfValues = histogram.getSumOfValues();
  final double[] normalizedHistogram = new double[histogram.size()];
  for (int i = 0; i < histogram.size(); i++) {
    if (histogram.get(i) != null) {
      normalizedHistogram[i] = histogram.get(i).getValue() / histogramSumOfValues;
    }
  }
  return normalizedHistogram;
}

代码示例来源:origin: PapenfussLab/gridss

public static InsertSizeDistribution create(Histogram<Integer> insertSizeHistogram) {
  if (insertSizeHistogram == null) return null;
  int[] insertSize = new int[insertSizeHistogram.size()];
  double[] count = new double[insertSizeHistogram.size()];
  double total = insertSizeHistogram.getSumOfValues();
  int i = 0;
  Set<Integer> keys = insertSizeHistogram.keySet();
  for (Integer key : keys) {
    insertSize[i] = key;
    count[i] = insertSizeHistogram.get(key).getValue();
    i++;
  }
  return new InsertSizeDistribution(insertSize, count, (long)total);
}
public InsertSizeDistribution(int[] singletons, double[] readCounts) {

代码示例来源:origin: broadinstitute/picard

@Test()
public void testRawBqDistributionWithSoftClips() throws IOException {
  final String input = TEST_DATA_DIR + "chrMReadsWithClips.sam";
  final File outFile = File.createTempFile("test", ".TargetedMetrics_Coverage");
  outFile.deleteOnExit();
  final String[] args = new String[] {
      "TARGET_INTERVALS=" + singleIntervals,
      "INPUT=" + input,
      "OUTPUT=" + outFile.getAbsolutePath(),
      "REFERENCE_SEQUENCE=" + referenceFile,
      "LEVEL=ALL_READS",
      "AMPLICON_INTERVALS=" + singleIntervals,
      "SAMPLE_SIZE=" + 0
  };
  Assert.assertEquals(runPicardCommandLine(args), 0);
  final MetricsFile<TargetedPcrMetrics, Comparable<Integer>> output = new MetricsFile<>();
  output.read(new FileReader(outFile));
  Assert.assertEquals(output.getMetrics().size(), 1);
  for (final TargetedPcrMetrics metrics : output.getMetrics()) {
    Assert.assertEquals(metrics.TOTAL_READS, 2);
  }
  Assert.assertEquals(output.getNumHistograms(), 2);
  final Histogram<Comparable<Integer>> histogram = output.getAllHistograms().get(1);
  Assert.assertTrue(TestNGUtil.compareDoubleWithAccuracy(histogram.getSumOfValues(), 62,0.01));
  Assert.assertTrue(TestNGUtil.compareDoubleWithAccuracy(histogram.get(32).getValue(), 52D, 0.01));
  Assert.assertTrue(TestNGUtil.compareDoubleWithAccuracy(histogram.get(33).getValue(), 10D, 0.01));
}

代码示例来源:origin: broadinstitute/picard

final Histogram<Integer> baseQHistogram = output.getAllHistograms().get(1);
Assert.assertEquals((long) highQualityDepthHistogram.getSumOfValues(), metrics.GENOME_TERRITORY);
Assert.assertEquals((long) highQualityDepthHistogram.get(1).getValue(), expectedSingletonCoverage);
Assert.assertEquals((long) highQualityDepthHistogram.get(3).getValue(), 2*10);

代码示例来源:origin: broadinstitute/picard

GENOME_TERRITORY = (long) highQualityDepthHistogram.getSumOfValues();
MEAN_COVERAGE    = highQualityDepthHistogram.getMean();
SD_COVERAGE      = highQualityDepthHistogram.getStandardDeviation();

相关文章