本文整理了Java中org.apache.spark.mllib.linalg.Vector.size()
方法的一些代码示例,展示了Vector.size()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Vector.size()
方法的具体详情如下:
包路径:org.apache.spark.mllib.linalg.Vector
类名称:Vector
方法名:size
暂无
代码示例来源:origin: mahmoudparsian/data-algorithms-book
static Vector average(List<Vector> list) {
// find sum
double[] sum = new double[list.get(0).size()];
for (Vector v : list) {
for (int i = 0; i < sum.length; i++) {
sum[i] += v.apply(i);
}
}
// find averages...
int numOfVectors = list.size();
for (int i = 0; i < sum.length; i++) {
sum[i] = sum[i] / numOfVectors;
}
return new DenseVector(sum);
}
代码示例来源:origin: mahmoudparsian/data-algorithms-book
static Vector average(Vector vec, Integer numVectors) {
double[] avg = new double[vec.size()];
for (int i = 0; i < avg.length; i++) {
// avg[i] = vec.apply(i) * (1.0 / numVectors);
avg[i] = vec.apply(i) / ((double) numVectors);
}
return new DenseVector(avg);
}
代码示例来源:origin: mahmoudparsian/data-algorithms-book
static double squaredDistance(Vector a, Vector b) {
double distance = 0.0;
int size = a.size();
for (int i = 0; i < size; i++) {
double diff = a.apply(i) - b.apply(i);
distance += diff * diff;
}
return distance;
}
代码示例来源:origin: mahmoudparsian/data-algorithms-book
static Vector add(Vector a, Vector b) {
double[] sum = new double[a.size()];
for (int i = 0; i < sum.length; i++) {
sum[i] += a.apply(i) + b.apply(i);
}
return new DenseVector(sum);
}
代码示例来源:origin: ypriverol/spark-java8
static double squaredDistance(Vector a, Vector b) {
double distance = 0.0;
int size = a.size();
for (int i = 0; i < size; i++) {
double diff = a.apply(i) - b.apply(i);
distance += diff * diff;
}
return distance;
}
代码示例来源:origin: ypriverol/spark-java8
static Vector average(Vector vec, Integer numVectors) {
double[] avg = new double[vec.size()];
for (int i = 0; i < avg.length; i++) {
// avg[i] = vec.apply(i) * (1.0 / numVectors);
avg[i] = vec.apply(i) / ((double) numVectors);
}
return new DenseVector(avg);
}
代码示例来源:origin: ypriverol/spark-java8
static Vector add(Vector a, Vector b) {
double[] sum = new double[a.size()];
for (int i = 0; i < sum.length; i++) {
sum[i] += a.apply(i) + b.apply(i);
}
return new DenseVector(sum);
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.11
@Test
@SuppressWarnings("unchecked")
public void testNormalVectorRDD() {
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = normalJavaVectorRDD(jsc, m, n);
JavaRDD<Vector> rdd2 = normalJavaVectorRDD(jsc, m, n, p);
JavaRDD<Vector> rdd3 = normalJavaVectorRDD(jsc, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.10
@Test
@SuppressWarnings("unchecked")
public void testNormalVectorRDD() {
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = normalJavaVectorRDD(jsc, m, n);
JavaRDD<Vector> rdd2 = normalJavaVectorRDD(jsc, m, n, p);
JavaRDD<Vector> rdd3 = normalJavaVectorRDD(jsc, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.10
@Test
@SuppressWarnings("unchecked")
public void testPoissonVectorRDD() {
double mean = 2.0;
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = poissonJavaVectorRDD(jsc, mean, m, n);
JavaRDD<Vector> rdd2 = poissonJavaVectorRDD(jsc, mean, m, n, p);
JavaRDD<Vector> rdd3 = poissonJavaVectorRDD(jsc, mean, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib
@Test
@SuppressWarnings("unchecked")
public void testUniformVectorRDD() {
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = uniformJavaVectorRDD(jsc, m, n);
JavaRDD<Vector> rdd2 = uniformJavaVectorRDD(jsc, m, n, p);
JavaRDD<Vector> rdd3 = uniformJavaVectorRDD(jsc, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.11
@Test
@SuppressWarnings("unchecked")
public void testUniformVectorRDD() {
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = uniformJavaVectorRDD(jsc, m, n);
JavaRDD<Vector> rdd2 = uniformJavaVectorRDD(jsc, m, n, p);
JavaRDD<Vector> rdd3 = uniformJavaVectorRDD(jsc, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib
@Test
@SuppressWarnings("unchecked")
public void testNormalVectorRDD() {
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = normalJavaVectorRDD(jsc, m, n);
JavaRDD<Vector> rdd2 = normalJavaVectorRDD(jsc, m, n, p);
JavaRDD<Vector> rdd3 = normalJavaVectorRDD(jsc, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.11
@Test
@SuppressWarnings("unchecked")
public void testPoissonVectorRDD() {
double mean = 2.0;
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = poissonJavaVectorRDD(jsc, mean, m, n);
JavaRDD<Vector> rdd2 = poissonJavaVectorRDD(jsc, mean, m, n, p);
JavaRDD<Vector> rdd3 = poissonJavaVectorRDD(jsc, mean, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.10
@Test
@SuppressWarnings("unchecked")
public void testUniformVectorRDD() {
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = uniformJavaVectorRDD(jsc, m, n);
JavaRDD<Vector> rdd2 = uniformJavaVectorRDD(jsc, m, n, p);
JavaRDD<Vector> rdd3 = uniformJavaVectorRDD(jsc, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.11
@Test
@SuppressWarnings("unchecked")
public void testExponentialVectorRDD() {
double mean = 2.0;
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = exponentialJavaVectorRDD(jsc, mean, m, n);
JavaRDD<Vector> rdd2 = exponentialJavaVectorRDD(jsc, mean, m, n, p);
JavaRDD<Vector> rdd3 = exponentialJavaVectorRDD(jsc, mean, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib
@Test
@SuppressWarnings("unchecked")
public void testExponentialVectorRDD() {
double mean = 2.0;
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = exponentialJavaVectorRDD(jsc, mean, m, n);
JavaRDD<Vector> rdd2 = exponentialJavaVectorRDD(jsc, mean, m, n, p);
JavaRDD<Vector> rdd3 = exponentialJavaVectorRDD(jsc, mean, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.10
@Test
@SuppressWarnings("unchecked")
public void testLogNormalVectorRDD() {
double mean = 4.0;
double std = 2.0;
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = logNormalJavaVectorRDD(jsc, mean, std, m, n);
JavaRDD<Vector> rdd2 = logNormalJavaVectorRDD(jsc, mean, std, m, n, p);
JavaRDD<Vector> rdd3 = logNormalJavaVectorRDD(jsc, mean, std, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.10
@Test
@SuppressWarnings("unchecked")
public void testGammaVectorRDD() {
double shape = 1.0;
double jscale = 2.0;
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = gammaJavaVectorRDD(jsc, shape, jscale, m, n);
JavaRDD<Vector> rdd2 = gammaJavaVectorRDD(jsc, shape, jscale, m, n, p);
JavaRDD<Vector> rdd3 = gammaJavaVectorRDD(jsc, shape, jscale, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
代码示例来源:origin: org.apache.spark/spark-mllib
@Test
@SuppressWarnings("unchecked")
public void testRandomVectorRDD() {
UniformGenerator generator = new UniformGenerator();
long m = 100L;
int n = 10;
int p = 2;
long seed = 1L;
JavaRDD<Vector> rdd1 = randomJavaVectorRDD(jsc, generator, m, n);
JavaRDD<Vector> rdd2 = randomJavaVectorRDD(jsc, generator, m, n, p);
JavaRDD<Vector> rdd3 = randomJavaVectorRDD(jsc, generator, m, n, p, seed);
for (JavaRDD<Vector> rdd : Arrays.asList(rdd1, rdd2, rdd3)) {
Assert.assertEquals(m, rdd.count());
Assert.assertEquals(n, rdd.first().size());
}
}
}
内容来源于网络,如有侵权,请联系作者删除!