org.deeplearning4j.nn.api.Layer类的使用及代码示例

x33g5p2x  于2022-01-24 转载在 其他  
字(11.7k)|赞(0)|评价(0)|浏览(155)

本文整理了Java中org.deeplearning4j.nn.api.Layer类的一些代码示例,展示了Layer类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Layer类的具体详情如下:
包路径:org.deeplearning4j.nn.api.Layer
类名称:Layer

Layer介绍

[英]Interface for a layer of a neural network. This has an activation function, an input and output size, weights, and a bias
[中]神经网络层的接口。它具有激活功能、输入和输出大小、权重和偏差

代码示例

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

@Override
public NeuralNetConfiguration conf() {
  return insideLayer.conf();
}

代码示例来源:origin: deeplearning4j/dl4j-examples

long totalNumParams = 0;
for( int  i= 0; i < layers.length; i++) {
  long nParams = layers[i].numParams();
  System.out.println("Number of parameters in layer " + i + ": " + nParams);
  totalNumParams += nParams;

代码示例来源:origin: org.deeplearning4j/deeplearning4j-scaleout-akka

@Override
public void perform(Job job) {
  Serializable work = job.getWork();
  if(work instanceof DataSet) {
    DataSet data = (DataSet) work;
    neuralNetwork.fit(data.getFeatureMatrix());
  }
  else if(work instanceof INDArray) {
    neuralNetwork.fit((INDArray) work);
  }
  job.setResult(neuralNetwork.params());
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

@Override
public void setBackpropGradientsViewArray(INDArray gradients) {
  int paramsSoFar = 0;
  for (Layer layer : layers) {
    if (layer.numParams() == 0)
      continue;
    layer.setBackpropGradientsViewArray(gradients.get(NDArrayIndex.point(0),
            NDArrayIndex.interval(paramsSoFar, paramsSoFar + layer.numParams())));
    paramsSoFar += layer.numParams();
  }
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

public Map<String, INDArray> paramTable(boolean backpropParamsOnly) {
  //Get all parameters from all layers
  Map<String, INDArray> allParams = new LinkedHashMap<>();
  for (Layer layer : layers) {
    Map<String, INDArray> paramMap = layer.paramTable(backpropParamsOnly);
    for (Map.Entry<String, INDArray> entry : paramMap.entrySet()) {
      String newKey = layer.conf().getLayer().getLayerName() + "_" + entry.getKey();
      allParams.put(newKey, entry.getValue());
    }
  }
  return allParams;
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

int frozenParams = 0;
for (Layer currentLayer : layers) {
  String name = String.valueOf(currentLayer.getIndex());
  String paramShape = "-";
  String in = "-";
  String[] classNameArr = currentLayer.getClass().getName().split("\\.");
  String className = classNameArr[classNameArr.length - 1];
  String paramCount = String.valueOf(currentLayer.numParams());
  if (currentLayer.numParams() > 0) {
    paramShape = "";
    in = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNIn());
    out = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNOut());
    Set<String> paraNames = currentLayer.conf().getLearningRateByParam().keySet();
    for (String aP : paraNames) {
      String paramS = ArrayUtils.toString(currentLayer.paramTable().get(aP).shape());
      paramShape += aP + ":" + paramS + ", ";
    frozenParams += currentLayer.numParams();
    classNameArr = ((FrozenLayer) currentLayer).getInsideLayer().getClass().getName().split("\\.");
    className = "Frozen " + classNameArr[classNameArr.length - 1];

代码示例来源:origin: neo4j-contrib/neo4j-ml-procedures

for (Layer layer : model.getLayers()) {
  Node node = node("Layer",
      "type", layer.type().name(), "index", layer.getIndex(),
      "pretrainLayer", layer.isPretrainLayer(), "miniBatchSize", layer.getInputMiniBatchSize(),
      "numParams", layer.numParams());
  if (layer instanceof DenseLayer) {
    DenseLayer dl = (DenseLayer) layer;

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

classNameArr = currentLayer.getClass().getName().split("\\.");
className = classNameArr[classNameArr.length - 1];
paramCount = String.valueOf(currentLayer.numParams());
if (currentLayer.numParams() > 0) {
  paramShape = "";
  in = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNIn());
  out = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNOut());
  Set<String> paraNames = currentLayer.conf().getLearningRateByParam().keySet();
  for (String aP : paraNames) {
    String paramS = ArrayUtils.toString(currentLayer.paramTable().get(aP).shape());
    paramShape += aP + ":" + paramS + ", ";
  frozenParams += currentLayer.numParams();
  classNameArr = ((FrozenLayer) currentLayer).getInsideLayer().getClass().getName().split("\\.");
  className = "Frozen " + classNameArr[classNameArr.length - 1];

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

private void initHelperMLN() {
  if (applyFrozen) {
    org.deeplearning4j.nn.api.Layer[] layers = origMLN.getLayers();
    for (int i = frozenTill; i >= 0; i--) {
      //unchecked?
      layers[i] = new FrozenLayer(layers[i]);
    }
    origMLN.setLayers(layers);
  }
  for (int i = 0; i < origMLN.getnLayers(); i++) {
    if (origMLN.getLayer(i) instanceof FrozenLayer) {
      frozenInputLayer = i;
    }
  }
  List<NeuralNetConfiguration> allConfs = new ArrayList<>();
  for (int i = frozenInputLayer + 1; i < origMLN.getnLayers(); i++) {
    allConfs.add(origMLN.getLayer(i).conf());
  }
  MultiLayerConfiguration c = origMLN.getLayerWiseConfigurations();
  unFrozenSubsetMLN = new MultiLayerNetwork(new MultiLayerConfiguration.Builder().backprop(c.isBackprop())
          .inputPreProcessors(c.getInputPreProcessors()).pretrain(c.isPretrain())
          .backpropType(c.getBackpropType()).tBPTTForwardLength(c.getTbpttFwdLength())
          .tBPTTBackwardLength(c.getTbpttBackLength()).confs(allConfs).build());
  unFrozenSubsetMLN.init();
  //copy over params
  for (int i = frozenInputLayer + 1; i < origMLN.getnLayers(); i++) {
    unFrozenSubsetMLN.getLayer(i - frozenInputLayer - 1).setParams(origMLN.getLayer(i).params());
  }
  //unFrozenSubsetMLN.setListeners(origMLN.getListeners());
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

private void copyParamsFromSubsetMLNToOrig() {
  for (int i = frozenInputLayer + 1; i < origMLN.getnLayers(); i++) {
    origMLN.getLayer(i).setParams(unFrozenSubsetMLN.getLayer(i - frozenInputLayer - 1).params());
  }
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

layer.fit(gv.getInputs()[0]);
layer.conf().setPretrain(false);

代码示例来源:origin: org.deeplearning4j/deeplearning4j-ui_2.11

info.setName(layer.conf().getLayer().getLayerName());
} catch (Exception e) {
if (layer.type().equals(Layer.Type.CONVOLUTIONAL)) {
  org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 =
          (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer.conf().getLayer();
  mainLine.append("K: " + Arrays.toString(layer1.getKernelSize()) + " S: "
          + Arrays.toString(layer1.getStride()) + " P: " + Arrays.toString(layer1.getPadding()));
  fullLine.append("Inputs number: ").append(layer1.getNIn()).append("<br/>");
  fullLine.append("Outputs number: ").append(layer1.getNOut()).append("<br/>");
} else if (layer.conf().getLayer() instanceof SubsamplingLayer) {
  SubsamplingLayer layer1 = (SubsamplingLayer) layer.conf().getLayer();
  fullLine.append("Kernel size: ").append(Arrays.toString(layer1.getKernelSize())).append("<br/>");
  fullLine.append("Stride: ").append(Arrays.toString(layer1.getStride())).append("<br/>");
  fullLine.append("Padding: ").append(Arrays.toString(layer1.getPadding())).append("<br/>");
  fullLine.append("Pooling type: ").append(layer1.getPoolingType().toString()).append("<br/>");
} else if (layer.conf().getLayer() instanceof FeedForwardLayer) {
  org.deeplearning4j.nn.conf.layers.FeedForwardLayer layer1 =
          (org.deeplearning4j.nn.conf.layers.FeedForwardLayer) layer.conf().getLayer();
  mainLine.append("nIn/nOut: [" + layer1.getNIn() + "/" + layer1.getNOut() + "]");
  subLine.append(info.getLayerType());
            + ((org.deeplearning4j.nn.conf.layers.BaseOutputLayer) layer.conf().getLayer())
                    .getNOut()
            + "]");
    fullLine.append("Outputs number: ").append(
            ((org.deeplearning4j.nn.conf.layers.BaseOutputLayer) layer.conf().getLayer()).getNOut())
            .append("<br/>");

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

protected String layerId() {
  String name = insideLayer.conf().getLayer().getLayerName();
  return "(layer name: " + (name == null ? "\"\"" : name) + ", layer index: " + insideLayer.getIndex() + ")";
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-modelimport

/**
 * Copy Keras layer weights to DL4J Layer.
 *
 * @param layer
 * @throws InvalidKerasConfigurationException
 */
public void copyWeightsToLayer(org.deeplearning4j.nn.api.Layer layer) throws InvalidKerasConfigurationException {
  if (this.getNumParams() > 0) {
    String dl4jLayerName = layer.conf().getLayer().getLayerName();
    String kerasLayerName = this.getLayerName();
    String msg = "Error when attempting to copy weights from Keras layer " + kerasLayerName + " to DL4J layer "
            + dl4jLayerName;
    if (this.weights == null)
      throw new InvalidKerasConfigurationException(msg + "(weights is null)");
    Set<String> paramsInLayer = new HashSet<String>(layer.paramTable().keySet());
    Set<String> paramsInKerasLayer = new HashSet<String>(this.weights.keySet());
    /* Check for parameters in layer for which we don't have weights. */
    paramsInLayer.removeAll(paramsInKerasLayer);
    for (String paramName : paramsInLayer)
      throw new InvalidKerasConfigurationException(
              msg + "(no stored weights for parameter " + paramName + ")");
    /* Check for parameters NOT in layer for which we DO have weights. */
    paramsInKerasLayer.removeAll(layer.paramTable().keySet());
    for (String paramName : paramsInKerasLayer)
      throw new InvalidKerasConfigurationException(msg + "(found no parameter named " + paramName + ")");
    /* Copy weights. */
    for (String paramName : layer.paramTable().keySet())
      layer.setParam(paramName, this.weights.get(paramName));
  }
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

int range = layer.numParams();
if (range <= 0)
  continue; //some layers have no params
if (editedVertices.contains(layerName))
  continue; //keep the changed params
layer.setParams(origGraph.getLayer(layerName).params().dup()); //copy over origGraph params

代码示例来源:origin: org.deeplearning4j/deeplearning4j-ui-model

} else if (model instanceof Layer) {
  Layer l = (Layer) model;
  jsonConf = l.conf().toJson();
  numLayers = 1;
  numParams = l.numParams();
} else {
  throw new RuntimeException("Invalid model: Expected MultiLayerNetwork or ComputationGraph. Got: "

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

layer.setInput(input);
Nd4j.getRandom().setSeed(rngSeed);
layer.computeGradientAndScore();
Pair<Gradient, Double> gradAndScore = layer.gradientAndScore();
updater.update(layer, gradAndScore.getFirst(), 0, layer.batchSize());
INDArray originalParams = layer.params().dup(); //need dup: params are a *view* of full parameters
Map<String, INDArray> paramTable = layer.paramTable();
List<String> paramNames = new ArrayList<>(paramTable.keySet());
int[] paramEnds = new int[paramNames.size()];
int currParamNameIdx = 0;
INDArray params = layer.params(); //Assumption here: params is a view that we can modify in-place
for (int i = 0; i < nParams; i++) {
  layer.computeGradientAndScore();
  double scorePlus = layer.score();
  layer.computeGradientAndScore();
  double scoreMinus = layer.score();

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

params = Nd4j.create(1, numParams);
  org.deeplearning4j.nn.api.Layer someLayer = layer.instantiate(layerConf, null, 0, params, true);
  appendParams.add(someLayer.params());
  appendConfs.add(someLayer.conf());
} else {
  appendConfs.add(layerConf);

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

if (!layer.isPretrainLayer())
  return;
layer.conf().setPretrain(true);
  layer.fit(layerInput);
layer.conf().setPretrain(false);

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

@Override
public void setParams(INDArray params) {
  if (params == flattenedParams)
    return; //No op
  if (this.flattenedParams != null && this.flattenedParams.length() == params.length()) {
    this.flattenedParams.assign(params);
    return;
  }
  int idx = 0;
  for (int i = 0; i < topologicalOrder.length; i++) {
    if (!vertices[topologicalOrder[i]].hasLayer())
      continue;
    Layer layer = vertices[topologicalOrder[i]].getLayer();
    int range = layer.numParams();
    if (range <= 0)
      continue; //Some layers: no parameters (subsampling etc)
    INDArray get = params.get(NDArrayIndex.point(0), NDArrayIndex.interval(idx, range + idx));
    layer.setParams(get);
    idx += range;
  }
}

相关文章