Caffe2 - Python API
A deep learning, cross platform ML framework
nomnigraph.py
1 from __future__ import absolute_import
2 from __future__ import division
3 from __future__ import print_function
4 from __future__ import unicode_literals
5 
7 from caffe2.python import core
8 from caffe2.proto import caffe2_pb2
9 import os
10 from subprocess import Popen, PIPE
11 import errno
12 
13 
14 class NNModule(object):
15  def __init__(self, net=None, device_map=None):
16  if net is not None:
17  serialized_proto = None
18  if isinstance(net, core.Net):
19  serialized_proto = net.Proto().SerializeToString()
20  elif isinstance(net, caffe2_pb2.NetDef):
21  serialized_proto = net.SerializeToString()
22 
23  # Distributed
24  if device_map is not None:
25  serialized_device_map = {}
26  for k in device_map:
27  serialized_device_map[k] = device_map[k].SerializeToString()
28  self._NNModule = C.NNModuleFromProtobufDistributed(serialized_proto,
29  serialized_device_map)
30  # Default
31  elif serialized_proto:
32  self._NNModule, self._OpList = C.NNModuleFromProtobuf(serialized_proto)
33  else:
34  raise Exception(
35  "NNModule can be constructed with core.Net or caffe2_pb2.NetDef types"
36  )
37  else:
38  self._NNModule = C.NNModule()
39 
40  @property
41  def dataFlow(self):
42  return self._NNModule.dataFlow()
43 
44  @property
45  def controlFlow(self):
46  return self._NNModule.getExecutionOrder()
47 
48  @property
49  def nodes(self):
50  return self._NNModule.dataFlow().nodes
51 
52  @property
53  def operators(self):
54  return self._NNModule.dataFlow().operators
55 
56  @property
57  def tensors(self):
58  return self._NNModule.dataFlow().tensors
59 
60  def createNode(self, val):
61  return self._NNModule.dataFlow().createNode(val)
62 
63  def deleteNode(self, node):
64  return self._NNModule.dataFlow().deleteNode(node)
65 
66  def createEdge(self, a, b):
67  return self._NNModule.dataFlow().createEdge(a, b)
68 
69  def deleteEdge(self, a, b=None):
70  if b:
71  self._NNModule.dataFlow().deleteEdge(a, b)
72  else:
73  self._NNModule.dataFlow().deleteEdge(a)
74 
75  def replaceNode(self, old_node, new_node):
76  return self._NNModule.dataFlow().replaceNode(old_node, new_node)
77 
78  def replaceProducer(self, tensor, new_producer):
79  C.replaceProducer(tensor, new_producer)
80 
81  def replaceAllUsesWith(self, old_tensor, new_tensor):
82  C.replaceAllUsesWith(old_tensor, new_tensor)
83 
84  def replaceAsConsumer(self, old_consumer, new_consumer):
85  C.replaceAsConsumer(old_consumer, new_consumer)
86 
87  def replaceSubgraph(self, subgraph, new_node, inputs, outputs):
88  self._NNModule.replaceSubgraph(subgraph, new_node, inputs, outputs)
89 
90  def deleteSubgraph(self, subgraph):
91  self._NNModule.deleteSubgraph(subgraph)
92 
93  def createUniqueDataNode(self, prefix="_unique"):
94  return self._NNModule.createUniqueDataNode(prefix)
95 
96  def convertToCaffe2Proto(self, old_proto=None):
97  if not old_proto:
98  old_proto = caffe2_pb2.NetDef()
99  output = self._NNModule.convertToCaffe2Proto(old_proto)
100  new_proto = caffe2_pb2.NetDef()
101  new_proto.ParseFromString(output)
102  return new_proto
103 
104  def match(self, pattern):
105  for n in self.dataFlow.getMutableNodes():
106  m = C.matchSubgraph(n, pattern)
107  if m:
108  yield m
109 
110 
111 def render(s):
112  s = str(s)
113  cmd_exists = lambda x: any(
114  os.access(os.path.join(path, x), os.X_OK)
115  for path in os.environ["PATH"].split(os.pathsep)
116  )
117  if cmd_exists("graph-easy"):
118  p = Popen("graph-easy", stdin=PIPE)
119  try:
120  p.stdin.write(s.encode("utf-8"))
121  except IOError as e:
122  if e.errno == errno.EPIPE or e.errno == errno.EINVAL:
123  pass
124  else:
125  # Raise any other error.
126  raise
127 
128  p.stdin.close()
129  p.wait()
130  else:
131  print(s)
132 
133 
134 NeuralNetOperator = C.NeuralNetOperator
135 Operator = C.NeuralNetOperator
136 NeuralNetData = C.NeuralNetData
137 Data = C.NeuralNetData
138 NNSubgraph = C.NNSubgraph
139 NNMatchGraph = C.NNMatchGraph
140 Graph = C.Graph
141 Annotation = C.Annotation
Module caffe2.python.layers.split.