org.deeplearning4j.nn.api.Layer.setParams()方法的使用及代码示例

x33g5p2x  于2022-01-24 转载在 其他  
字(5.1k)|赞(0)|评价(0)|浏览(120)

本文整理了Java中org.deeplearning4j.nn.api.Layer.setParams()方法的一些代码示例,展示了Layer.setParams()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Layer.setParams()方法的具体详情如下:
包路径:org.deeplearning4j.nn.api.Layer
类名称:Layer
方法名:setParams

Layer.setParams介绍

暂无

代码示例

代码示例来源:origin: org.deeplearning4j/deeplearning4j-scaleout-akka

@Override
public void update(Object... o) {
  INDArray arr = (INDArray) o[0];
  neuralNetwork.setParams(arr);
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

@Override
public void setParams(INDArray params) {
  insideLayer.setParams(params);
}

代码示例来源:origin: org.deeplearning4j/cdh4

/**
 * Collect the update from the master node and apply it to the local
 * parameter vector
 *
 * TODO: check the state changes of the incoming message!
 *
 */
@Override
public void update(ParameterVectorUpdateable masterUpdateUpdateable) {
  neuralNetwork.setParams(masterUpdateUpdateable.get());
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

private void copyParamsFromSubsetMLNToOrig() {
  for (int i = frozenInputLayer + 1; i < origMLN.getnLayers(); i++) {
    origMLN.getLayer(i).setParams(unFrozenSubsetMLN.getLayer(i - frozenInputLayer - 1).params());
  }
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

private void copyOrigParamsToSubsetGraph() {
  for (GraphVertex aVertex : unFrozenSubsetGraph.getVertices()) {
    if (!aVertex.hasLayer())
      continue;
    aVertex.getLayer().setParams(origGraph.getLayer(aVertex.getVertexName()).params());
  }
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

private void copyParamsFromSubsetGraphToOrig() {
  for (GraphVertex aVertex : unFrozenSubsetGraph.getVertices()) {
    if (!aVertex.hasLayer())
      continue;
    origGraph.getVertex(aVertex.getVertexName()).getLayer().setParams(aVertex.getLayer().params());
  }
}

代码示例来源:origin: CampagneLaboratory/variationanalysis

private void transferParams() throws IOException {
    if (args().pretrainingModelPath != null) {
      ModelLoader pretrainingLoader = new ModelLoader(args().pretrainingModelPath);
      Model savedPretrainingNetwork = pretrainingLoader.loadModel(args().pretrainingModelName);
      ComputationGraph savedPretrainingGraph = savedPretrainingNetwork instanceof ComputationGraph ?
          (ComputationGraph) savedPretrainingNetwork :
          null;
      if (savedPretrainingNetwork == null || savedPretrainingGraph == null
          || savedPretrainingGraph.getUpdater() == null || savedPretrainingGraph.getLayers() == null) {
        throw new RuntimeException(String.format("Unable to load model for pretraining from %s",
            args().pretrainingModelPath));
      } else {
        for (String inputLayer : assembler.getInputNames()) {
          computationGraph.getLayer(inputLayer).setParams(
              savedPretrainingGraph.getLayer(inputLayer).params());
        }
        for (String componentLayer : assembler.getComponentNames()) {
          computationGraph.getLayer(componentLayer).setParams(
              savedPretrainingGraph.getLayer(componentLayer).params());
        }
      }
      String modelPrefix = args().modelPrefix != null ? args().modelPrefix : "pretraining";
      ComputationGraphSaver graphSaver = new ComputationGraphSaver(modelPath);
      graphSaver.saveModel(computationGraph, modelPrefix);
    }
  }
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

@Override
public void setParams(INDArray params) {
  if (params == flattenedParams)
    return; //No op
  if (this.flattenedParams != null && this.flattenedParams.length() == params.length()) {
    this.flattenedParams.assign(params);
    return;
  }
  int idx = 0;
  for (int i = 0; i < topologicalOrder.length; i++) {
    if (!vertices[topologicalOrder[i]].hasLayer())
      continue;
    Layer layer = vertices[topologicalOrder[i]].getLayer();
    int range = layer.numParams();
    if (range <= 0)
      continue; //Some layers: no parameters (subsampling etc)
    INDArray get = params.get(NDArrayIndex.point(0), NDArrayIndex.interval(idx, range + idx));
    layer.setParams(get);
    idx += range;
  }
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

continue; //Some layers: no parameters (subsampling, etc)
INDArray get = params.get(NDArrayIndex.point(0), NDArrayIndex.interval(idx, range + idx));
layer.setParams(get);
idx += range;

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

if (editedVertices.contains(layerName))
  continue; //keep the changed params
layer.setParams(origGraph.getLayer(layerName).params().dup()); //copy over origGraph params

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

private void initHelperMLN() {
  if (applyFrozen) {
    org.deeplearning4j.nn.api.Layer[] layers = origMLN.getLayers();
    for (int i = frozenTill; i >= 0; i--) {
      //unchecked?
      layers[i] = new FrozenLayer(layers[i]);
    }
    origMLN.setLayers(layers);
  }
  for (int i = 0; i < origMLN.getnLayers(); i++) {
    if (origMLN.getLayer(i) instanceof FrozenLayer) {
      frozenInputLayer = i;
    }
  }
  List<NeuralNetConfiguration> allConfs = new ArrayList<>();
  for (int i = frozenInputLayer + 1; i < origMLN.getnLayers(); i++) {
    allConfs.add(origMLN.getLayer(i).conf());
  }
  MultiLayerConfiguration c = origMLN.getLayerWiseConfigurations();
  unFrozenSubsetMLN = new MultiLayerNetwork(new MultiLayerConfiguration.Builder().backprop(c.isBackprop())
          .inputPreProcessors(c.getInputPreProcessors()).pretrain(c.isPretrain())
          .backpropType(c.getBackpropType()).tBPTTForwardLength(c.getTbpttFwdLength())
          .tBPTTBackwardLength(c.getTbpttBackLength()).confs(allConfs).build());
  unFrozenSubsetMLN.init();
  //copy over params
  for (int i = frozenInputLayer + 1; i < origMLN.getnLayers(); i++) {
    unFrozenSubsetMLN.getLayer(i - frozenInputLayer - 1).setParams(origMLN.getLayer(i).params());
  }
  //unFrozenSubsetMLN.setListeners(origMLN.getListeners());
}

相关文章