Caffe2 - C++ API
A deep learning, cross platform ML framework
elementwise_ops_schema.cc
1 #include "caffe2/operators/elementwise_ops.h"
2 
3 #include "caffe2/core/operator_gradient.h"
4 #include "caffe2/utils/proto_utils.h"
5 
6 namespace caffe2 {
7 
8 namespace {
9 
10 const char kBroadcastDoc[] = R"DOC(
11 If necessary the right-hand-side argument will be broadcasted to match the
12 shape of left-hand-side argument. When broadcasting is specified, the second
13 tensor can either be of size 1 (a scalar value), or having its shape as a
14 contiguous subset of the first tensor's shape. The starting of the mutually
15 equal shape is specified by the argument "axis", and if it is not set, suffix
16 matching is assumed. 1-dim expansion doesn't work yet.
17 
18 For example, the following tensor shapes are supported (with broadcast=1):
19 ```
20  shape(A) = (2, 3, 4, 5), shape(B) = (,), i.e. B is a scalar
21  shape(A) = (2, 3, 4, 5), shape(B) = (5,)
22  shape(A) = (2, 3, 4, 5), shape(B) = (4, 5)
23  shape(A) = (2, 3, 4, 5), shape(B) = (3, 4), with axis=1
24  shape(A) = (2, 3, 4, 5), shape(B) = (2), with axis=0
25 ```
26 Argument `broadcast=1` needs to be passed to enable broadcasting.
27 
28 Github Links:
29 
30 - https://github.com/pytorch/pytorch/blob/master/caffe2/operators/elementwise_op_schema.cc
31 
32 )DOC";
33 
34 const char kAddExample[] = R"DOC(
35 <details>
36 
37 <summary> <b>Example</b> </summary>
38 
39 **Code**
40 
41 ```
42 
43 workspace.ResetWorkspace()
44 
45 op = core.CreateOperator(
46  "Add",
47  ["A", "B"],
48  ["C"],
49 )
50 
51 workspace.FeedBlob("A", np.array([[1,2],[3,4]]))
52 workspace.FeedBlob("B", np.array([[5,6],[7,8]]))
53 print("A:", workspace.FetchBlob("A"))
54 print("B:", workspace.FetchBlob("B"))
55 workspace.RunOperatorOnce(op)
56 print("C:", workspace.FetchBlob("C"))
57 
58 ```
59 
60 **Result**
61 
62 ```
63 
64 A:
65 [[1 2]
66  [3 4]]
67 B:
68 [[5 6]
69  [7 8]]
70 C:
71 [[ 6 8]
72  [10 12]]
73 
74 ```
75 
76 </details>
77 
78 )DOC";
79 
80 const char kSubExample[] = R"DOC(
81 <details>
82 
83 <summary> <b>Example</b> </summary>
84 
85 **Code**
86 
87 ```
88 
89 workspace.ResetWorkspace()
90 
91 op = core.CreateOperator(
92  "Sub",
93  ["A", "B"],
94  ["C"],
95 )
96 
97 workspace.FeedBlob("A", np.array([[10,12],[4,14]]))
98 workspace.FeedBlob("B", np.array([[5,16],[1,19]]))
99 print("A:", workspace.FetchBlob("A"))
100 print("B:", workspace.FetchBlob("B"))
101 workspace.RunOperatorOnce(op)
102 print("C:", workspace.FetchBlob("C"))
103 
104 ```
105 
106 **Result**
107 
108 ```
109 
110 A:
111 [[10 12]
112  [ 4 14]]
113 B:
114 [[ 5 16]
115  [ 1 19]]
116 C:
117 [[ 5 -4]
118  [ 3 -5]]
119 
120 ```
121 
122 </details>
123 
124 )DOC";
125 
126 const char kMulExample[] = R"DOC(
127 <details>
128 
129 <summary> <b>Example</b> </summary>
130 
131 **Code**
132 
133 ```
134 
135 workspace.ResetWorkspace()
136 
137 op = core.CreateOperator(
138  "Mul",
139  ["A", "B"],
140  ["C"],
141 )
142 
143 workspace.FeedBlob("A", np.array([[1,2],[3,4]]))
144 workspace.FeedBlob("B", np.array([[5,6],[7,8]]))
145 print("A:", workspace.FetchBlob("A"))
146 print("B:", workspace.FetchBlob("B"))
147 workspace.RunOperatorOnce(op)
148 print("C:", workspace.FetchBlob("C"))
149 
150 ```
151 
152 **Result**
153 
154 ```
155 
156 A:
157 [[1 2]
158  [3 4]]
159 B:
160 [[5 6]
161  [7 8]]
162 C:
163 [[ 5 12]
164  [21 32]]
165 
166 ```
167 
168 </details>
169 
170 )DOC";
171 
172 const char kDivExample[] = R"DOC(
173 <details>
174 
175 <summary> <b>Example</b> </summary>
176 
177 **Code**
178 
179 ```
180 
181 workspace.ResetWorkspace()
182 
183 op = core.CreateOperator(
184  "Div",
185  ["A", "B"],
186  ["C"],
187 )
188 
189 workspace.FeedBlob("A", np.array([[18,8],[2,9]]))
190 workspace.FeedBlob("B", np.array([[9,2],[3,2]]))
191 print("A:", workspace.FetchBlob("A"))
192 print("B:", workspace.FetchBlob("B"))
193 workspace.RunOperatorOnce(op)
194 print("C:", workspace.FetchBlob("C"))
195 
196 ```
197 
198 **Result**
199 
200 ```
201 
202 A:
203 [[18 8]
204  [ 2 9]]
205 B:
206 [[9 2]
207  [3 2]]
208 C:
209 [[2 4]
210  [0 4]]
211 
212 ```
213 
214 </details>
215 )DOC";
216 
217 std::function<void(OpSchema&)> MathDocGenerator(const char* name, const char* extra) {
218  return [=](OpSchema& schema) {
219  string doc = R"DOC(
220 Performs element-wise binary {name} (with limited broadcast support).
221 {broadcast_doc}
222 
223 {extra}
224 )DOC";
225  c10::ReplaceAll(doc, "{name}", name);
226  c10::ReplaceAll(doc, "{broadcast_doc}", kBroadcastDoc);
227  c10::ReplaceAll(doc, "{extra}", extra);
228  schema.SetDoc(doc);
229  schema.Arg("broadcast", "*(type: int; default: 0)* Pass 1 to enable broadcasting");
230  schema.Arg(
231  "axis",
232  "*(type: int; default: -1)* Axis to concatenate on.");
233  schema.Input(
234  0,
235  "A",
236  "*(type: Tensor`<float>`)* First operand, should share the type with the second operand.");
237  schema.Input(
238  1,
239  "B",
240  "*(type: Tensor`<float>`)* Second operand. With broadcasting can be of smaller size than A. "
241  "If broadcasting is disabled it should be of the same size as A.");
242  schema.Output(0, "C", "*(type: Tensor`<float>`)* Output tensor with same dimensions and type as A.");
243  };
244 }
245 
246 std::vector<TensorShape> ElementwiseOpShapeInference(
247  const OperatorDef& def,
248  const std::vector<TensorShape>& in) {
249  std::vector<TensorShape> out(1);
250  out[0].set_data_type(in[0].data_type());
251  ArgumentHelper helper(def);
252  const bool broadcast = helper.GetSingleArgument<bool>("broadcast", false);
253  if (broadcast) {
254  out[0].mutable_dims()->CopyFrom(in[0].dims());
255  } else {
256  const std::vector<int> A_dims(in[0].dims().begin(), in[0].dims().end());
257  const std::vector<int> B_dims(in[1].dims().begin(), in[1].dims().end());
258  const std::vector<int> C_dims =
259  elementwise_ops_utils::ComputeBinaryBroadcastForwardDims(
260  A_dims, B_dims);
261  for (const int dim : C_dims) {
262  out[0].add_dims(dim);
263  }
264  }
265  return out;
266 }
267 
268 } // namespace
269 
270 OPERATOR_SCHEMA(Add)
271  .NumInputs(2)
272  .NumOutputs(1)
273  .AllowInplace({{0, 0}, {1, 0}})
274  .CostInferenceFunction(PointwiseCostInference<1>)
275  .TensorInferenceFunction(ElementwiseOpShapeInference)
276  .FillUsing(MathDocGenerator("addition", kAddExample))
277  .InheritOnnxSchema();
278 OPERATOR_SCHEMA(AddGradient)
279  .NumInputs(3)
280  .NumOutputs(2)
281  .AllowInplace({{0, 0}, {0, 1}});
282 
283 OPERATOR_SCHEMA(Sub)
284  .NumInputs(2)
285  .NumOutputs(1)
286  .AllowInplace({{0, 0}, {1, 0}})
287  .CostInferenceFunction(PointwiseCostInference<1>)
288  .TensorInferenceFunction(ElementwiseOpShapeInference)
289  .FillUsing(MathDocGenerator("subtraction", kSubExample))
290  .InheritOnnxSchema();
291 OPERATOR_SCHEMA(SubGradient)
292  .NumInputs(3)
293  .NumOutputs(2)
294  .AllowInplace({{0, 0}, {0, 1}});
295 
296 OPERATOR_SCHEMA(Mul)
297  .NumInputs(2)
298  .NumOutputs(1)
299  .AllowInplace({{0, 0}, {1, 0}})
300  .CostInferenceFunction(PointwiseCostInference<1>)
301  .TensorInferenceFunction(ElementwiseOpShapeInference)
302  .FillUsing(MathDocGenerator("multiplication", kMulExample))
303  .InheritOnnxSchema();
304 OPERATOR_SCHEMA(MulGradient)
305  .NumInputs(3)
306  .NumOutputs(2)
307  .AllowInplace({{0, 0}, {0, 1}});
308 
309 OPERATOR_SCHEMA(Div)
310  .NumInputs(2)
311  .NumOutputs(1)
312  .AllowInplace({{0, 0}})
313  .CostInferenceFunction(PointwiseCostInference<1>)
314  .TensorInferenceFunction(ElementwiseOpShapeInference)
315  .FillUsing(MathDocGenerator("division", kDivExample))
316  .InheritOnnxSchema();
317 OPERATOR_SCHEMA(DivGradient)
318  .NumInputs(3, 4)
319  .NumOutputs(2)
320  .AllowInplace({{0, 0}});
321 
322 OPERATOR_SCHEMA(SumReduceLike)
323  .NumInputs(2)
324  .NumOutputs(1)
325  .IdenticalTypeAndShapeOfInput(0)
326  .SetDoc(R"DOC(
327 SumReduceLike operator takes 2 tensors as input. It performs reduce sum to the
328 first input so that the output looks like the second one.
329 It assumes that the first input
330 has more dimensions than the second, and the dimensions of the second input is
331 the contiguous subset of the dimensions of the first.
332 For example, the following tensor shapes are supported:
333 
334  shape(A) = (2, 3, 4, 5), shape(B) = (4, 5)
335  shape(A) = (2, 3, 4, 5), shape(B) = (,), i.e. B is a scalar
336  shape(A) = (2, 3, 4, 5), shape(B) = (3, 4), with axis=1
337  shape(A) = (2, 3, 2, 5), shape(B) = (2), with axis=0
338  )DOC")
339  .Arg(
340  "axis",
341  "If set, defines the starting dimension for reduction. Args `axis` and "
342  "`axis_str` cannot be used simultaneously.")
343  .Arg(
344  "axis_str",
345  "If set, it could only be N or C or H or W. `order` arg should also be "
346  "provided. It defines the reduction dimensions on NCHW or NHWC. Args "
347  "`axis` and `axis_str` cannot be used simultaneously.")
348  .Arg("order", "Either NHWC or HCWH")
349  .Input(
350  0,
351  "A",
352  "First operand, should share the type with the second operand.")
353  .Input(
354  1,
355  "B",
356  "Second operand. With broadcasting can be of smaller size than A. "
357  "If broadcasting is disabled it should be of the same size.")
358  .Output(0, "C", "Result, has same dimensions and type as B");
359 
360 const char kLTExample[] = R"DOC(
361 <details>
362 
363 <summary> <b>Example</b> </summary>
364 
365 **Code**
366 
367 ```
368 
369 workspace.ResetWorkspace()
370 
371 op = core.CreateOperator(
372  "LT",
373  ["A", "B"],
374  ["C"],
375 )
376 
377 workspace.FeedBlob("A", np.array([1, 5, 2, 9, 12, 3]))
378 workspace.FeedBlob("B", np.array([1, 3, 4, 9, 12, 8]))
379 print("A:", workspace.FetchBlob("A"))
380 print("B:", workspace.FetchBlob("B"))
381 workspace.RunOperatorOnce(op)
382 print("C:", workspace.FetchBlob("C"))
383 
384 ```
385 
386 **Result**
387 
388 ```
389 
390 A: [ 1 5 2 9 12 3]
391 B: [ 1 3 4 9 12 8]
392 C: [False False True False False True]
393 
394 ```
395 
396 </details>
397 )DOC";
398 
399 const char kLEExample[] = R"DOC(
400 <details>
401 
402 <summary> <b>Example</b> </summary>
403 
404 **Code**
405 
406 ```
407 
408 workspace.ResetWorkspace()
409 
410 op = core.CreateOperator(
411  "LE",
412  ["A", "B"],
413  ["C"],
414 )
415 
416 workspace.FeedBlob("A", np.array([1, 5, 2, 9, 12, 3]))
417 workspace.FeedBlob("B", np.array([1, 3, 4, 9, 12, 8]))
418 print("A:", workspace.FetchBlob("A"))
419 print("B:", workspace.FetchBlob("B"))
420 workspace.RunOperatorOnce(op)
421 print("C:", workspace.FetchBlob("C"))
422 
423 ```
424 
425 **Result**
426 
427 ```
428 
429 A: [ 1 5 2 9 12 3]
430 B: [ 1 3 4 9 12 8]
431 C: [ True False True True True True]
432 
433 ```
434 
435 </details>
436 )DOC";
437 
438 const char kGTExample[] = R"DOC(
439 <details>
440 
441 <summary> <b>Example</b> </summary>
442 
443 **Code**
444 
445 ```
446 
447 workspace.ResetWorkspace()
448 
449 op = core.CreateOperator(
450  "GT",
451  ["A", "B"],
452  ["C"],
453 )
454 
455 workspace.FeedBlob("A", np.array([1, 5, 2, 9, 12, 3]))
456 workspace.FeedBlob("B", np.array([1, 3, 4, 9, 12, 8]))
457 print("A:", workspace.FetchBlob("A"))
458 print("B:", workspace.FetchBlob("B"))
459 workspace.RunOperatorOnce(op)
460 print("C:", workspace.FetchBlob("C"))
461 
462 ```
463 
464 **Result**
465 
466 ```
467 
468 A: [ 1 5 2 9 12 3]
469 B: [ 1 3 4 9 12 8]
470 C: [False True False False False False]
471 
472 ```
473 
474 </details>
475 )DOC";
476 
477 const char kGEExample[] = R"DOC(
478 <details>
479 
480 <summary> <b>Example</b> </summary>
481 
482 **Code**
483 
484 ```
485 
486 workspace.ResetWorkspace()
487 
488 op = core.CreateOperator(
489  "GE",
490  ["A", "B"],
491  ["C"],
492 )
493 
494 workspace.FeedBlob("A", np.array([1, 5, 2, 9, 12, 3]))
495 workspace.FeedBlob("B", np.array([1, 3, 4, 9, 12, 8]))
496 print("A:", workspace.FetchBlob("A"))
497 print("B:", workspace.FetchBlob("B"))
498 workspace.RunOperatorOnce(op)
499 print("C:", workspace.FetchBlob("C"))
500 
501 ```
502 
503 **Result**
504 
505 ```
506 
507 A: [ 1 5 2 9 12 3]
508 B: [ 1 3 4 9 12 8]
509 C: [ True True False True True False]
510 
511 ```
512 
513 </details>
514 )DOC";
515 
516 const char kEQExample[] = R"DOC(
517 <details>
518 
519 <summary> <b>Example</b> </summary>
520 
521 **Code**
522 
523 ```
524 
525 workspace.ResetWorkspace()
526 
527 op = core.CreateOperator(
528  "EQ",
529  ["A", "B"],
530  ["C"],
531 )
532 
533 workspace.FeedBlob("A", np.array([1, 5, 2, 9, 12, 3]))
534 workspace.FeedBlob("B", np.array([1, 3, 4, 9, 12, 8]))
535 print("A:", workspace.FetchBlob("A"))
536 print("B:", workspace.FetchBlob("B"))
537 workspace.RunOperatorOnce(op)
538 print("C:", workspace.FetchBlob("C"))
539 
540 ```
541 
542 **Result**
543 
544 ```
545 A: [ 1 5 2 9 12 3]
546 B: [ 1 3 4 9 12 8]
547 C: [ True False False True True False]
548 ```
549 
550 </details>
551 )DOC";
552 
553 const char kNEExample[] = R"DOC(
554 <details>
555 
556 <summary> <b>Example</b> </summary>
557 
558 **Code**
559 
560 ```
561 workspace.ResetWorkspace()
562 
563 op = core.CreateOperator(
564  "NE",
565  ["A", "B"],
566  ["C"],
567 )
568 
569 workspace.FeedBlob("A", np.array([1, 5, 2, 9, 12, 3]))
570 workspace.FeedBlob("B", np.array([1, 3, 4, 9, 12, 8]))
571 print("A:", workspace.FetchBlob("A"))
572 print("B:", workspace.FetchBlob("B"))
573 workspace.RunOperatorOnce(op)
574 print("C:", workspace.FetchBlob("C"))
575 
576 ```
577 
578 **Result**
579 
580 ```
581 A: [ 1 5 2 9 12 3]
582 B: [ 1 3 4 9 12 8]
583 C: [False True True False False True]
584 ```
585 
586 </details>
587 )DOC";
588 
589 std::function<void(OpSchema&)> ComparisonDocGenerator(
590  const char* name,
591  const char* desc,
592  const char* extra) {
593  return [=](OpSchema& schema) {
594  string doc = R"DOC(
595 Performs element-wise {desc} comparison **{name}** (with limited broadcast support).
596 
597 {broadcast_doc}
598 
599 {extra}
600 )DOC";
601  c10::ReplaceAll(doc, "{name}", name);
602  c10::ReplaceAll(doc, "{desc}", desc);
603  c10::ReplaceAll(doc, "{broadcast_doc}", kBroadcastDoc);
604  c10::ReplaceAll(doc, "{extra}", extra);
605  schema.SetDoc(doc);
606  schema.Arg("broadcast", "*(type: int; default: 0)* Pass 1 to enable broadcasting.");
607  schema.Arg(
608  "axis",
609  "*(type: int; default: -1)* Axis to concatenate on. If set, defines the broadcast dimensions.");
610  schema.Input(
611  0,
612  "A",
613  "*(type: Tensor`<bool>`)* First operand, should share the type with the second operand.");
614  schema.Input(
615  1,
616  "B",
617  "*(type: Tensor`<bool>`)* Second operand. With broadcasting can be of smaller size than `A`. "
618  "If broadcasting is disabled it should be of the same size.");
619  schema.Output(0, "C", "*(type: Tensor`<bool>`)* Output tensor with same dimensions as `A`.");
620  };
621 }
622 
623 #define CAFFE2_SCHEMA_FOR_BINARY_COMPARISON_OP(name, symbol, desc, extra) \
624  OPERATOR_SCHEMA(name) \
625  .NumInputs(2) \
626  .NumOutputs(1) \
627  .TensorInferenceFunction( \
628  [](const OperatorDef& def, const vector<TensorShape>& in) { \
629  ArgumentHelper helper(def); \
630  const auto broadcasted = \
631  helper.GetSingleArgument<bool>("broadcast", false); \
632  if (!broadcasted) { \
633  CAFFE_ENFORCE_EQ(in[0].dims().size(), in[1].dims().size()); \
634  for (int i = 0; i < in[0].dims().size(); ++i) { \
635  CAFFE_ENFORCE_EQ(in[0].dims(i), in[1].dims(i)); \
636  } \
637  } \
638  auto output_dims = \
639  std::vector<int64_t>(in[0].dims().begin(), in[0].dims().end()); \
640  return vector<TensorShape>{ \
641  CreateTensorShape(output_dims, TensorProto::BOOL)}; \
642  }) \
643  .FillUsing(ComparisonDocGenerator(symbol, desc, extra)); \
644  SHOULD_NOT_DO_GRADIENT(name)
645 
646 CAFFE2_SCHEMA_FOR_BINARY_COMPARISON_OP(EQ, "==", "equal to", kEQExample);
647 CAFFE2_SCHEMA_FOR_BINARY_COMPARISON_OP(NE, "!=", "not equal to", kNEExample);
648 CAFFE2_SCHEMA_FOR_BINARY_COMPARISON_OP(LT, "<", "less than", kLTExample);
649 CAFFE2_SCHEMA_FOR_BINARY_COMPARISON_OP(LE, "<=", "less or equal than", kLEExample);
650 CAFFE2_SCHEMA_FOR_BINARY_COMPARISON_OP(GT, ">", "greater than", kGTExample);
651 CAFFE2_SCHEMA_FOR_BINARY_COMPARISON_OP(GE, ">=", "greater or equal than", kGEExample);
652 
653 const char kAndExample[] = R"DOC(
654 <details>
655 
656 <summary> <b>Example</b> </summary>
657 
658 **Code**
659 
660 ```
661 
662 workspace.ResetWorkspace()
663 
664 op = core.CreateOperator(
665  "And",
666  ["A", "B"],
667  ["C"],
668 )
669 
670 workspace.FeedBlob("A", (np.random.rand(3, 3) > 0.5))
671 workspace.FeedBlob("B", (np.random.rand(3, 3) > 0.5))
672 print("A:", workspace.FetchBlob("A"))
673 print("B:", workspace.FetchBlob("B"))
674 workspace.RunOperatorOnce(op)
675 print("C:", workspace.FetchBlob("C"))
676 
677 ```
678 
679 **Result**
680 
681 ```
682 
683 A:
684  [[ True False False]
685  [False True False]
686  [False False True]]
687 B:
688  [[ True False True]
689  [False False False]
690  [False False False]]
691 C:
692  [[ True False False]
693  [False False False]
694  [False False False]]
695 
696 ```
697 
698 </details>
699 )DOC";
700 
701 const char kOrExample[] = R"DOC(
702 <details>
703 
704 <summary> <b>Example</b> </summary>
705 
706 **Code**
707 
708 ```
709 
710 workspace.ResetWorkspace()
711 
712 op = core.CreateOperator(
713  "Or",
714  ["A", "B"],
715  ["C"],
716 )
717 
718 workspace.FeedBlob("A", (np.random.rand(3, 3) > 0.5))
719 workspace.FeedBlob("B", (np.random.rand(3, 3) > 0.5))
720 print("A:", workspace.FetchBlob("A"))
721 print("B:", workspace.FetchBlob("B"))
722 workspace.RunOperatorOnce(op)
723 print("C:", workspace.FetchBlob("C"))
724 
725 ```
726 
727 **Result**
728 
729 ```
730 
731 A:
732 [[False True True]
733  [False True True]
734  [ True True True]]
735 B:
736 [[False True False]
737  [ True True True]
738  [False True False]]
739 C:
740 [[False True True]
741  [ True True True]
742  [ True True True]]
743 
744 ```
745 
746 </details>
747 )DOC";
748 
749 const char kXorExample[] = R"DOC(
750 <details>
751 
752 <summary> <b>Example</b> </summary>
753 
754 **Code**
755 
756 ```
757 
758 workspace.ResetWorkspace()
759 
760 op = core.CreateOperator(
761  "Xor",
762  ["A", "B"],
763  ["C"],
764 )
765 
766 workspace.FeedBlob("A", (np.random.rand(3, 3) > 0.5))
767 workspace.FeedBlob("B", (np.random.rand(3, 3) > 0.5))
768 print("A:", workspace.FetchBlob("A"))
769 print("B:", workspace.FetchBlob("B"))
770 workspace.RunOperatorOnce(op)
771 print("C:", workspace.FetchBlob("C"))
772 
773 ```
774 
775 **Result**
776 
777 ```
778 
779 A:
780 [[ True True True]
781  [False False True]
782  [False True False]]
783 B:
784 [[False False False]
785  [ True True True]
786  [False False False]]
787 C:
788 [[ True True True]
789  [ True True False]
790  [False True False]]
791 
792 ```
793 
794 </details>
795 )DOC";
796 
797 std::function<void(OpSchema&)> LogicalDocGenerator(const char* name, const char* extra) {
798  return [=](OpSchema& schema) {
799  string doc = R"DOC(
800 Performs element-wise logical operation **{name}** (with limited broadcast support).
801 Both input operands should be of type `bool`.
802 
803 {broadcast_doc}
804 
805 {extra}
806  )DOC";
807  c10::ReplaceAll(doc, "{name}", name);
808  c10::ReplaceAll(doc, "{broadcast_doc}", kBroadcastDoc);
809  c10::ReplaceAll(doc, "{extra}", extra);
810  schema.SetDoc(doc);
811  schema.Arg("broadcast", "*(type: int; default: 0)* Pass 1 to enable broadcasting.");
812  schema.Arg(
813  "axis",
814  "*(type: int; default: -1)* Axis to concatenate on. If set, defines the broadcast dimensions.");
815  schema.Input(0, "A", "*(type: Tensor`<bool>`)* First operand.");
816  schema.Input(
817  1,
818  "B",
819  "*(type: Tensor`<bool>`)* Second operand. With broadcasting can be of smaller size than `A`. "
820  "If broadcasting is disabled it should be of the same size.");
821  schema.Output(0, "C", "*(type: Tensor`<bool>`)* Output tensor of booleans. Has same dimensions as input `A`.");
822  };
823 }
824 
825 #define CAFFE2_SCHEMA_FOR_BINARY_LOGICAL_OP(name, symbol, onnx_schema, extra) \
826  OPERATOR_SCHEMA(name) \
827  .NumInputs(2) \
828  .NumOutputs(1) \
829  .AllowInplace({{0, 0}}) \
830  .FillUsing(LogicalDocGenerator(symbol, extra)) \
831  .TensorInferenceFunction(ElementwiseOpShapeInference) \
832  .InheritOnnxSchema(onnx_schema); \
833  SHOULD_NOT_DO_GRADIENT(name)
834 
835 CAFFE2_SCHEMA_FOR_BINARY_LOGICAL_OP(Or, "or", "Or", kOrExample);
836 CAFFE2_SCHEMA_FOR_BINARY_LOGICAL_OP(And, "and", "And", kAndExample);
837 CAFFE2_SCHEMA_FOR_BINARY_LOGICAL_OP(Xor, "xor", "Xor", kXorExample);
838 
839 #undef CAFFE2_SCHEMA_FOR_BINARY_LOGICAL_OP
840 
841 std::function<void(OpSchema&)> BitwiseDocGenerator(const char* name) {
842  return [=](OpSchema& schema) {
843  string doc = R"DOC(
844 Performs element-wise bitwise operation `{name}` (with limited broadcast support).
845 Both input operands should be of type `bool`.
846 {broadcast_doc})DOC";
847  c10::ReplaceAll(doc, "{name}", name);
848  c10::ReplaceAll(doc, "{broadcast_doc}", kBroadcastDoc);
849  schema.SetDoc(doc);
850  schema.Arg("broadcast", "*(type: int; default: 0)* Pass 1 to enable broadcasting.");
851  schema.Arg(
852  "axis",
853  "*(type: int; default: -1)* Axis to concatenate on. If set, defines the broadcast dimensions.");
854  schema.Input(0, "A", "*(type: Tensor)* First operand.");
855  schema.Input(
856  1,
857  "B",
858  "*(type: Tensor)* Second operand. With broadcasting can be of smaller size than `A`. "
859  "If broadcasting is disabled it should be of the same size.");
860  schema.Output(0, "C", "*(type: Tensor)* Output tensor. Has same dimensions as input `A`.");
861  };
862 }
863 
864 #define CAFFE2_SCHEMA_FOR_BINARY_BITWISE_OP(name, symbol) \
865  OPERATOR_SCHEMA(name) \
866  .NumInputs(2) \
867  .NumOutputs(1) \
868  .AllowInplace({{0, 0}}) \
869  .FillUsing(BitwiseDocGenerator(symbol)); \
870  SHOULD_NOT_DO_GRADIENT(name)
871 
872 CAFFE2_SCHEMA_FOR_BINARY_BITWISE_OP(BitwiseOr, "bitwise_or");
873 CAFFE2_SCHEMA_FOR_BINARY_BITWISE_OP(BitwiseAnd, "bitwise_and");
874 CAFFE2_SCHEMA_FOR_BINARY_BITWISE_OP(BitwiseXor, "bitwise_xor");
875 
876 #undef CAFFE2_SCHEMA_FOR_BINARY_BITWISE_OP
877 
878 OPERATOR_SCHEMA(Not)
879  .NumInputs(1)
880  .NumOutputs(1)
881  .SetDoc(R"DOC(
882 Performs element-wise negation on input tensor `X`.
883 
884 Github Links:
885 
886 - https://github.com/pytorch/pytorch/blob/master/caffe2/operators/elementwise_op_schema.cc
887 
888 <details>
889 
890 <summary> <b>Example</b> </summary>
891 
892 **Code**
893 
894 ```
895 
896 workspace.ResetWorkspace()
897 
898 op = core.CreateOperator(
899 "Not",
900 ["X"],
901 ["Y"],
902 )
903 
904 workspace.FeedBlob("X", (np.random.rand(3, 3) > 0.5))
905 print("X:", workspace.FetchBlob("X"))
906 workspace.RunOperatorOnce(op)
907 print("Y:", workspace.FetchBlob("Y"))
908 
909 ```
910 
911 **Result**
912 
913 ```
914 
915 X:
916 [[ True False False]
917 [False False False]
918 [ True True True]]
919 Y:
920 [[False True True]
921 [ True True True]
922 [False False False]]
923 
924 ```
925 
926 </details>
927 
928  )DOC")
929  .Input(0, "X", "*(Tensor`<bool>`)* Input tensor.")
930  .Output(0, "Y", "*(Tensor`<bool>`)* Negated output tensor.")
931  .InheritOnnxSchema();
932 SHOULD_NOT_DO_GRADIENT(Not);
933 
934 OPERATOR_SCHEMA(Sign)
935  .NumInputs(1)
936  .NumOutputs(1)
937  .SetDoc(R"DOC(
938 Computes sign for each element of the input: -1, 0 or 1.
939 
940 Github Link:
941 - https://github.com/pytorch/pytorch/blob/master/caffe2/operators/elementwise_op_schema.cc
942 
943 <details>
944 
945 <summary> <b>Example</b> </summary>
946 
947 **Code**
948 
949 ```
950 
951 workspace.ResetWorkspace()
952 
953 op = core.CreateOperator(
954 "Sign",
955 ["X"],
956 ["Y"],
957 )
958 
959 workspace.FeedBlob("X", (np.random.rand(3, 3).astype(np.float32) - np.random.rand(3, 3).astype(np.float32)))
960 print("X:", workspace.FetchBlob("X"))
961 workspace.RunOperatorOnce(op)
962 print("Y:", workspace.FetchBlob("Y"))
963 
964 ```
965 
966 **Result**
967 
968 ```
969 
970 X:
971 [[ 0.02816287 0.22408086 -0.30342305]
972 [-0.18481976 0.03948995 0.39698976]
973 [-0.63304734 -0.6919183 -0.31524038]]
974 Y:
975 [[ 1. 1. -1.]
976 [-1. 1. 1.]
977 [-1. -1. -1.]]
978 
979 ```
980 
981 </details>
982 
983  )DOC")
984  .Input(0, "X", "*(type: Tensor`<float>`)* Input data tensor.")
985  .Output(0, "Y", "*(type: Tensor`<float>`)* Output tensor.");
986 SHOULD_NOT_DO_GRADIENT(Sign);
987 
988 } // namespace caffe2
A global dictionary that holds information about what Caffe2 modules have been loaded in the current ...
Definition: blob.h:13
Definition: OpClasses.h:659