In Tensorflow, how to add an tensor/array to tensor/array which already contains multiple tensors/arrays....












1















Here is a minimal example of what I'm trying to do. Here I just use regular python arrays to minimalize the code but I am looking to do this with tensorflow tensors.



import tensorflow as tf

c1 = [[5,8], [7,4]]
c2 = [6,9]
c3= tf.stack( [c1, c2] )
with tf.Session( ) as sess:
sess.run(tf.global_variables_initializer())
print(sess.run([ c3] ))


This is the error I get



---------------------------------------------------------------------------
InvalidArgumentError Traceback (most recent call last)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
1658 try:
-> 1659 c_op = c_api.TF_FinishOperation(op_desc)
1660 except errors.InvalidArgumentError as e:

InvalidArgumentError: Shapes must be equal rank, but are 2 and 1
From merging shape 0 with other shapes. for 'stack_38' (op: 'Pack') with input shapes: [2,2], [2].

During handling of the above exception, another exception occurred:

ValueError Traceback (most recent call last)
<ipython-input-96-3acc40ce0738> in <module>()
1 c1 = [[5,8], [7,4]]
2 c2 = [6,9]
----> 3 c3= tf.stack( [c1, c2] )
4 with tf.Session( ) as sess:
5 sess.run(tf.global_variables_initializer())

/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py in wrapper(*args, **kwargs)
178 """Call target, and fall back on dispatchers if there is a TypeError."""
179 try:
--> 180 return target(*args, **kwargs)
181 except (TypeError, ValueError):
182 # Note: convert_to_eager_tensor currently raises a ValueError, not a

/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/array_ops.py in stack(values, axis, name)
1003 expanded_num_dims))
1004
-> 1005 return gen_array_ops.pack(values, axis=axis, name=name)
1006
1007

/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/gen_array_ops.py in pack(values, axis, name)
5446 axis = _execute.make_int(axis, "axis")
5447 _, _, _op = _op_def_lib._apply_op_helper(
-> 5448 "Pack", values=values, axis=axis, name=name)
5449 _result = _op.outputs[:]
5450 _inputs_flat = _op.inputs

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py in _apply_op_helper(self, op_type_name, name, **keywords)
786 op = g.create_op(op_type_name, inputs, output_types, name=scope,
787 input_types=input_types, attrs=attr_protos,
--> 788 op_def=op_def)
789 return output_structure, op_def.is_stateful, op
790

/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/deprecation.py in new_func(*args, **kwargs)
499 'in a future version' if date is None else ('after %s' % date),
500 instructions)
--> 501 return func(*args, **kwargs)
502
503 doc = _add_deprecated_arg_notice_to_docstring(

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in create_op(***failed resolving arguments***)
3298 input_types=input_types,
3299 original_op=self._default_original_op,
-> 3300 op_def=op_def)
3301 self._create_op_helper(ret, compute_device=compute_device)
3302 return ret

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in __init__(self, node_def, g, inputs, output_types, control_inputs, input_types, original_op, op_def)
1821 op_def, inputs, node_def.attr)
1822 self._c_op = _create_c_op(self._graph, node_def, grouped_inputs,
-> 1823 control_input_ops)
1824
1825 # Initialize self._outputs.

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
1660 except errors.InvalidArgumentError as e:
1661 # Convert to ValueError for backwards compatibility.
-> 1662 raise ValueError(str(e))
1663
1664 return c_op

ValueError: Shapes must be equal rank, but are 2 and 1
From merging shape 0 with other shapes. for 'stack_38' (op: 'Pack') with input shapes: [2,2], [2].


I have also tried with different axis values and with tf.concat as well. It seems that all require equal shapes in order to merge the tensors.



I am looking for a result like this



c3=[[5,8], [7,4], [6,9] ]









share|improve this question





























    1















    Here is a minimal example of what I'm trying to do. Here I just use regular python arrays to minimalize the code but I am looking to do this with tensorflow tensors.



    import tensorflow as tf

    c1 = [[5,8], [7,4]]
    c2 = [6,9]
    c3= tf.stack( [c1, c2] )
    with tf.Session( ) as sess:
    sess.run(tf.global_variables_initializer())
    print(sess.run([ c3] ))


    This is the error I get



    ---------------------------------------------------------------------------
    InvalidArgumentError Traceback (most recent call last)
    /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
    1658 try:
    -> 1659 c_op = c_api.TF_FinishOperation(op_desc)
    1660 except errors.InvalidArgumentError as e:

    InvalidArgumentError: Shapes must be equal rank, but are 2 and 1
    From merging shape 0 with other shapes. for 'stack_38' (op: 'Pack') with input shapes: [2,2], [2].

    During handling of the above exception, another exception occurred:

    ValueError Traceback (most recent call last)
    <ipython-input-96-3acc40ce0738> in <module>()
    1 c1 = [[5,8], [7,4]]
    2 c2 = [6,9]
    ----> 3 c3= tf.stack( [c1, c2] )
    4 with tf.Session( ) as sess:
    5 sess.run(tf.global_variables_initializer())

    /usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py in wrapper(*args, **kwargs)
    178 """Call target, and fall back on dispatchers if there is a TypeError."""
    179 try:
    --> 180 return target(*args, **kwargs)
    181 except (TypeError, ValueError):
    182 # Note: convert_to_eager_tensor currently raises a ValueError, not a

    /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/array_ops.py in stack(values, axis, name)
    1003 expanded_num_dims))
    1004
    -> 1005 return gen_array_ops.pack(values, axis=axis, name=name)
    1006
    1007

    /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/gen_array_ops.py in pack(values, axis, name)
    5446 axis = _execute.make_int(axis, "axis")
    5447 _, _, _op = _op_def_lib._apply_op_helper(
    -> 5448 "Pack", values=values, axis=axis, name=name)
    5449 _result = _op.outputs[:]
    5450 _inputs_flat = _op.inputs

    /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py in _apply_op_helper(self, op_type_name, name, **keywords)
    786 op = g.create_op(op_type_name, inputs, output_types, name=scope,
    787 input_types=input_types, attrs=attr_protos,
    --> 788 op_def=op_def)
    789 return output_structure, op_def.is_stateful, op
    790

    /usr/local/lib/python3.6/dist-packages/tensorflow/python/util/deprecation.py in new_func(*args, **kwargs)
    499 'in a future version' if date is None else ('after %s' % date),
    500 instructions)
    --> 501 return func(*args, **kwargs)
    502
    503 doc = _add_deprecated_arg_notice_to_docstring(

    /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in create_op(***failed resolving arguments***)
    3298 input_types=input_types,
    3299 original_op=self._default_original_op,
    -> 3300 op_def=op_def)
    3301 self._create_op_helper(ret, compute_device=compute_device)
    3302 return ret

    /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in __init__(self, node_def, g, inputs, output_types, control_inputs, input_types, original_op, op_def)
    1821 op_def, inputs, node_def.attr)
    1822 self._c_op = _create_c_op(self._graph, node_def, grouped_inputs,
    -> 1823 control_input_ops)
    1824
    1825 # Initialize self._outputs.

    /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
    1660 except errors.InvalidArgumentError as e:
    1661 # Convert to ValueError for backwards compatibility.
    -> 1662 raise ValueError(str(e))
    1663
    1664 return c_op

    ValueError: Shapes must be equal rank, but are 2 and 1
    From merging shape 0 with other shapes. for 'stack_38' (op: 'Pack') with input shapes: [2,2], [2].


    I have also tried with different axis values and with tf.concat as well. It seems that all require equal shapes in order to merge the tensors.



    I am looking for a result like this



    c3=[[5,8], [7,4], [6,9] ]









    share|improve this question



























      1












      1








      1








      Here is a minimal example of what I'm trying to do. Here I just use regular python arrays to minimalize the code but I am looking to do this with tensorflow tensors.



      import tensorflow as tf

      c1 = [[5,8], [7,4]]
      c2 = [6,9]
      c3= tf.stack( [c1, c2] )
      with tf.Session( ) as sess:
      sess.run(tf.global_variables_initializer())
      print(sess.run([ c3] ))


      This is the error I get



      ---------------------------------------------------------------------------
      InvalidArgumentError Traceback (most recent call last)
      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
      1658 try:
      -> 1659 c_op = c_api.TF_FinishOperation(op_desc)
      1660 except errors.InvalidArgumentError as e:

      InvalidArgumentError: Shapes must be equal rank, but are 2 and 1
      From merging shape 0 with other shapes. for 'stack_38' (op: 'Pack') with input shapes: [2,2], [2].

      During handling of the above exception, another exception occurred:

      ValueError Traceback (most recent call last)
      <ipython-input-96-3acc40ce0738> in <module>()
      1 c1 = [[5,8], [7,4]]
      2 c2 = [6,9]
      ----> 3 c3= tf.stack( [c1, c2] )
      4 with tf.Session( ) as sess:
      5 sess.run(tf.global_variables_initializer())

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py in wrapper(*args, **kwargs)
      178 """Call target, and fall back on dispatchers if there is a TypeError."""
      179 try:
      --> 180 return target(*args, **kwargs)
      181 except (TypeError, ValueError):
      182 # Note: convert_to_eager_tensor currently raises a ValueError, not a

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/array_ops.py in stack(values, axis, name)
      1003 expanded_num_dims))
      1004
      -> 1005 return gen_array_ops.pack(values, axis=axis, name=name)
      1006
      1007

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/gen_array_ops.py in pack(values, axis, name)
      5446 axis = _execute.make_int(axis, "axis")
      5447 _, _, _op = _op_def_lib._apply_op_helper(
      -> 5448 "Pack", values=values, axis=axis, name=name)
      5449 _result = _op.outputs[:]
      5450 _inputs_flat = _op.inputs

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py in _apply_op_helper(self, op_type_name, name, **keywords)
      786 op = g.create_op(op_type_name, inputs, output_types, name=scope,
      787 input_types=input_types, attrs=attr_protos,
      --> 788 op_def=op_def)
      789 return output_structure, op_def.is_stateful, op
      790

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/util/deprecation.py in new_func(*args, **kwargs)
      499 'in a future version' if date is None else ('after %s' % date),
      500 instructions)
      --> 501 return func(*args, **kwargs)
      502
      503 doc = _add_deprecated_arg_notice_to_docstring(

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in create_op(***failed resolving arguments***)
      3298 input_types=input_types,
      3299 original_op=self._default_original_op,
      -> 3300 op_def=op_def)
      3301 self._create_op_helper(ret, compute_device=compute_device)
      3302 return ret

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in __init__(self, node_def, g, inputs, output_types, control_inputs, input_types, original_op, op_def)
      1821 op_def, inputs, node_def.attr)
      1822 self._c_op = _create_c_op(self._graph, node_def, grouped_inputs,
      -> 1823 control_input_ops)
      1824
      1825 # Initialize self._outputs.

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
      1660 except errors.InvalidArgumentError as e:
      1661 # Convert to ValueError for backwards compatibility.
      -> 1662 raise ValueError(str(e))
      1663
      1664 return c_op

      ValueError: Shapes must be equal rank, but are 2 and 1
      From merging shape 0 with other shapes. for 'stack_38' (op: 'Pack') with input shapes: [2,2], [2].


      I have also tried with different axis values and with tf.concat as well. It seems that all require equal shapes in order to merge the tensors.



      I am looking for a result like this



      c3=[[5,8], [7,4], [6,9] ]









      share|improve this question
















      Here is a minimal example of what I'm trying to do. Here I just use regular python arrays to minimalize the code but I am looking to do this with tensorflow tensors.



      import tensorflow as tf

      c1 = [[5,8], [7,4]]
      c2 = [6,9]
      c3= tf.stack( [c1, c2] )
      with tf.Session( ) as sess:
      sess.run(tf.global_variables_initializer())
      print(sess.run([ c3] ))


      This is the error I get



      ---------------------------------------------------------------------------
      InvalidArgumentError Traceback (most recent call last)
      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
      1658 try:
      -> 1659 c_op = c_api.TF_FinishOperation(op_desc)
      1660 except errors.InvalidArgumentError as e:

      InvalidArgumentError: Shapes must be equal rank, but are 2 and 1
      From merging shape 0 with other shapes. for 'stack_38' (op: 'Pack') with input shapes: [2,2], [2].

      During handling of the above exception, another exception occurred:

      ValueError Traceback (most recent call last)
      <ipython-input-96-3acc40ce0738> in <module>()
      1 c1 = [[5,8], [7,4]]
      2 c2 = [6,9]
      ----> 3 c3= tf.stack( [c1, c2] )
      4 with tf.Session( ) as sess:
      5 sess.run(tf.global_variables_initializer())

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py in wrapper(*args, **kwargs)
      178 """Call target, and fall back on dispatchers if there is a TypeError."""
      179 try:
      --> 180 return target(*args, **kwargs)
      181 except (TypeError, ValueError):
      182 # Note: convert_to_eager_tensor currently raises a ValueError, not a

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/array_ops.py in stack(values, axis, name)
      1003 expanded_num_dims))
      1004
      -> 1005 return gen_array_ops.pack(values, axis=axis, name=name)
      1006
      1007

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/gen_array_ops.py in pack(values, axis, name)
      5446 axis = _execute.make_int(axis, "axis")
      5447 _, _, _op = _op_def_lib._apply_op_helper(
      -> 5448 "Pack", values=values, axis=axis, name=name)
      5449 _result = _op.outputs[:]
      5450 _inputs_flat = _op.inputs

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py in _apply_op_helper(self, op_type_name, name, **keywords)
      786 op = g.create_op(op_type_name, inputs, output_types, name=scope,
      787 input_types=input_types, attrs=attr_protos,
      --> 788 op_def=op_def)
      789 return output_structure, op_def.is_stateful, op
      790

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/util/deprecation.py in new_func(*args, **kwargs)
      499 'in a future version' if date is None else ('after %s' % date),
      500 instructions)
      --> 501 return func(*args, **kwargs)
      502
      503 doc = _add_deprecated_arg_notice_to_docstring(

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in create_op(***failed resolving arguments***)
      3298 input_types=input_types,
      3299 original_op=self._default_original_op,
      -> 3300 op_def=op_def)
      3301 self._create_op_helper(ret, compute_device=compute_device)
      3302 return ret

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in __init__(self, node_def, g, inputs, output_types, control_inputs, input_types, original_op, op_def)
      1821 op_def, inputs, node_def.attr)
      1822 self._c_op = _create_c_op(self._graph, node_def, grouped_inputs,
      -> 1823 control_input_ops)
      1824
      1825 # Initialize self._outputs.

      /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
      1660 except errors.InvalidArgumentError as e:
      1661 # Convert to ValueError for backwards compatibility.
      -> 1662 raise ValueError(str(e))
      1663
      1664 return c_op

      ValueError: Shapes must be equal rank, but are 2 and 1
      From merging shape 0 with other shapes. for 'stack_38' (op: 'Pack') with input shapes: [2,2], [2].


      I have also tried with different axis values and with tf.concat as well. It seems that all require equal shapes in order to merge the tensors.



      I am looking for a result like this



      c3=[[5,8], [7,4], [6,9] ]






      python tensorflow






      share|improve this question















      share|improve this question













      share|improve this question




      share|improve this question








      edited Jan 2 at 1:59







      SantoshGupta7

















      asked Jan 1 at 23:25









      SantoshGupta7SantoshGupta7

      6811515




      6811515
























          1 Answer
          1






          active

          oldest

          votes


















          1














          I think what you want here is tf.concat. And with that, you need the arrays to be of same dimension, not shape. So promoting c2 to a 2-D array will fix this:



          c1 = [[5,8], [7,4]]

          # change c2 to be 2-D
          c2 = [[6,9]]

          # use concat
          c3= tf.concat( [c1, c2], axis=0)

          with tf.Session( ) as sess:
          sess.run(tf.global_variables_initializer())
          print(sess.run(c3))


          As a side note, if you are working with tensors, you can use tf.expand_dims(c2 ,0) to promote c2 (if it is a placeholder/variable) to a 2-D tensor.






          share|improve this answer


























          • Actually I just tried to run this and got an InvalidArgumentError: Dimension 0 in both shapes must be equal, but are 2 and 1. Shapes are [2,2] and [1,2]. From merging shape 0 with other shapes. for 'stack_1' (op: 'Pack') with input shapes: [2,2], [1,2]. error

            – SantoshGupta7
            Jan 2 at 2:12











          • Sorry I have a mistake, I mentioned you should use tf.concat but kept it as stack in the answer.

            – Gerges
            Jan 2 at 2:14











          • ah I see it now , it works now

            – SantoshGupta7
            Jan 2 at 2:14











          Your Answer






          StackExchange.ifUsing("editor", function () {
          StackExchange.using("externalEditor", function () {
          StackExchange.using("snippets", function () {
          StackExchange.snippets.init();
          });
          });
          }, "code-snippets");

          StackExchange.ready(function() {
          var channelOptions = {
          tags: "".split(" "),
          id: "1"
          };
          initTagRenderer("".split(" "), "".split(" "), channelOptions);

          StackExchange.using("externalEditor", function() {
          // Have to fire editor after snippets, if snippets enabled
          if (StackExchange.settings.snippets.snippetsEnabled) {
          StackExchange.using("snippets", function() {
          createEditor();
          });
          }
          else {
          createEditor();
          }
          });

          function createEditor() {
          StackExchange.prepareEditor({
          heartbeatType: 'answer',
          autoActivateHeartbeat: false,
          convertImagesToLinks: true,
          noModals: true,
          showLowRepImageUploadWarning: true,
          reputationToPostImages: 10,
          bindNavPrevention: true,
          postfix: "",
          imageUploader: {
          brandingHtml: "Powered by u003ca class="icon-imgur-white" href="https://imgur.com/"u003eu003c/au003e",
          contentPolicyHtml: "User contributions licensed under u003ca href="https://creativecommons.org/licenses/by-sa/3.0/"u003ecc by-sa 3.0 with attribution requiredu003c/au003e u003ca href="https://stackoverflow.com/legal/content-policy"u003e(content policy)u003c/au003e",
          allowUrls: true
          },
          onDemand: true,
          discardSelector: ".discard-answer"
          ,immediatelyShowMarkdownHelp:true
          });


          }
          });














          draft saved

          draft discarded


















          StackExchange.ready(
          function () {
          StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53999738%2fin-tensorflow-how-to-add-an-tensor-array-to-tensor-array-which-already-contains%23new-answer', 'question_page');
          }
          );

          Post as a guest















          Required, but never shown

























          1 Answer
          1






          active

          oldest

          votes








          1 Answer
          1






          active

          oldest

          votes









          active

          oldest

          votes






          active

          oldest

          votes









          1














          I think what you want here is tf.concat. And with that, you need the arrays to be of same dimension, not shape. So promoting c2 to a 2-D array will fix this:



          c1 = [[5,8], [7,4]]

          # change c2 to be 2-D
          c2 = [[6,9]]

          # use concat
          c3= tf.concat( [c1, c2], axis=0)

          with tf.Session( ) as sess:
          sess.run(tf.global_variables_initializer())
          print(sess.run(c3))


          As a side note, if you are working with tensors, you can use tf.expand_dims(c2 ,0) to promote c2 (if it is a placeholder/variable) to a 2-D tensor.






          share|improve this answer


























          • Actually I just tried to run this and got an InvalidArgumentError: Dimension 0 in both shapes must be equal, but are 2 and 1. Shapes are [2,2] and [1,2]. From merging shape 0 with other shapes. for 'stack_1' (op: 'Pack') with input shapes: [2,2], [1,2]. error

            – SantoshGupta7
            Jan 2 at 2:12











          • Sorry I have a mistake, I mentioned you should use tf.concat but kept it as stack in the answer.

            – Gerges
            Jan 2 at 2:14











          • ah I see it now , it works now

            – SantoshGupta7
            Jan 2 at 2:14
















          1














          I think what you want here is tf.concat. And with that, you need the arrays to be of same dimension, not shape. So promoting c2 to a 2-D array will fix this:



          c1 = [[5,8], [7,4]]

          # change c2 to be 2-D
          c2 = [[6,9]]

          # use concat
          c3= tf.concat( [c1, c2], axis=0)

          with tf.Session( ) as sess:
          sess.run(tf.global_variables_initializer())
          print(sess.run(c3))


          As a side note, if you are working with tensors, you can use tf.expand_dims(c2 ,0) to promote c2 (if it is a placeholder/variable) to a 2-D tensor.






          share|improve this answer


























          • Actually I just tried to run this and got an InvalidArgumentError: Dimension 0 in both shapes must be equal, but are 2 and 1. Shapes are [2,2] and [1,2]. From merging shape 0 with other shapes. for 'stack_1' (op: 'Pack') with input shapes: [2,2], [1,2]. error

            – SantoshGupta7
            Jan 2 at 2:12











          • Sorry I have a mistake, I mentioned you should use tf.concat but kept it as stack in the answer.

            – Gerges
            Jan 2 at 2:14











          • ah I see it now , it works now

            – SantoshGupta7
            Jan 2 at 2:14














          1












          1








          1







          I think what you want here is tf.concat. And with that, you need the arrays to be of same dimension, not shape. So promoting c2 to a 2-D array will fix this:



          c1 = [[5,8], [7,4]]

          # change c2 to be 2-D
          c2 = [[6,9]]

          # use concat
          c3= tf.concat( [c1, c2], axis=0)

          with tf.Session( ) as sess:
          sess.run(tf.global_variables_initializer())
          print(sess.run(c3))


          As a side note, if you are working with tensors, you can use tf.expand_dims(c2 ,0) to promote c2 (if it is a placeholder/variable) to a 2-D tensor.






          share|improve this answer















          I think what you want here is tf.concat. And with that, you need the arrays to be of same dimension, not shape. So promoting c2 to a 2-D array will fix this:



          c1 = [[5,8], [7,4]]

          # change c2 to be 2-D
          c2 = [[6,9]]

          # use concat
          c3= tf.concat( [c1, c2], axis=0)

          with tf.Session( ) as sess:
          sess.run(tf.global_variables_initializer())
          print(sess.run(c3))


          As a side note, if you are working with tensors, you can use tf.expand_dims(c2 ,0) to promote c2 (if it is a placeholder/variable) to a 2-D tensor.







          share|improve this answer














          share|improve this answer



          share|improve this answer








          edited Jan 2 at 2:13

























          answered Jan 2 at 2:08









          GergesGerges

          3,0381821




          3,0381821













          • Actually I just tried to run this and got an InvalidArgumentError: Dimension 0 in both shapes must be equal, but are 2 and 1. Shapes are [2,2] and [1,2]. From merging shape 0 with other shapes. for 'stack_1' (op: 'Pack') with input shapes: [2,2], [1,2]. error

            – SantoshGupta7
            Jan 2 at 2:12











          • Sorry I have a mistake, I mentioned you should use tf.concat but kept it as stack in the answer.

            – Gerges
            Jan 2 at 2:14











          • ah I see it now , it works now

            – SantoshGupta7
            Jan 2 at 2:14



















          • Actually I just tried to run this and got an InvalidArgumentError: Dimension 0 in both shapes must be equal, but are 2 and 1. Shapes are [2,2] and [1,2]. From merging shape 0 with other shapes. for 'stack_1' (op: 'Pack') with input shapes: [2,2], [1,2]. error

            – SantoshGupta7
            Jan 2 at 2:12











          • Sorry I have a mistake, I mentioned you should use tf.concat but kept it as stack in the answer.

            – Gerges
            Jan 2 at 2:14











          • ah I see it now , it works now

            – SantoshGupta7
            Jan 2 at 2:14

















          Actually I just tried to run this and got an InvalidArgumentError: Dimension 0 in both shapes must be equal, but are 2 and 1. Shapes are [2,2] and [1,2]. From merging shape 0 with other shapes. for 'stack_1' (op: 'Pack') with input shapes: [2,2], [1,2]. error

          – SantoshGupta7
          Jan 2 at 2:12





          Actually I just tried to run this and got an InvalidArgumentError: Dimension 0 in both shapes must be equal, but are 2 and 1. Shapes are [2,2] and [1,2]. From merging shape 0 with other shapes. for 'stack_1' (op: 'Pack') with input shapes: [2,2], [1,2]. error

          – SantoshGupta7
          Jan 2 at 2:12













          Sorry I have a mistake, I mentioned you should use tf.concat but kept it as stack in the answer.

          – Gerges
          Jan 2 at 2:14





          Sorry I have a mistake, I mentioned you should use tf.concat but kept it as stack in the answer.

          – Gerges
          Jan 2 at 2:14













          ah I see it now , it works now

          – SantoshGupta7
          Jan 2 at 2:14





          ah I see it now , it works now

          – SantoshGupta7
          Jan 2 at 2:14




















          draft saved

          draft discarded




















































          Thanks for contributing an answer to Stack Overflow!


          • Please be sure to answer the question. Provide details and share your research!

          But avoid



          • Asking for help, clarification, or responding to other answers.

          • Making statements based on opinion; back them up with references or personal experience.


          To learn more, see our tips on writing great answers.




          draft saved


          draft discarded














          StackExchange.ready(
          function () {
          StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53999738%2fin-tensorflow-how-to-add-an-tensor-array-to-tensor-array-which-already-contains%23new-answer', 'question_page');
          }
          );

          Post as a guest















          Required, but never shown





















































          Required, but never shown














          Required, but never shown












          Required, but never shown







          Required, but never shown

































          Required, but never shown














          Required, but never shown












          Required, but never shown







          Required, but never shown







          Popular posts from this blog

          Monofisismo

          Angular Downloading a file using contenturl with Basic Authentication

          Olmecas