I have a workflow with a ParallelGroup. I instantiate many times the same Component inside it and pass each a different input.
I am using the option prob.setup(vector_class=PETScVector, check=False, mode='fwd') like in the example.
I get the following error:
Traceback (most recent call last):
File "workflow.py", line 73, in <module>
prob.run_model()
File "/usr/local/lib/python2.7/dist-packages/openmdao/core/problem.py", line 282, in run_model
self.final_setup()
File "/usr/local/lib/python2.7/dist-packages/openmdao/core/problem.py", line 423, in final_setup
model._final_setup(comm, vector_class, 'full', force_alloc_complex=force_alloc_complex)
File "/usr/local/lib/python2.7/dist-packages/openmdao/core/system.py", line 787, in _final_setup
force_alloc_complex=force_alloc_complex)
File "/usr/local/lib/python2.7/dist-packages/openmdao/core/system.py", line 586, in _get_root_vectors
ncol=ncol, relevant=rel)
File "/usr/local/lib/python2.7/dist-packages/openmdao/vectors/vector.py", line 160, in __init__
self._initialize_views()
File "/usr/local/lib/python2.7/dist-packages/openmdao/vectors/default_vector.py", line 320, in _initialize_views
v.shape = shape
ValueError: cannot reshape array of size 0 into shape (4,3)
The variable with shape (4,3) is a "global" variable given to the ParallelGroup (by promotion of each of its Components) by an external IndepVarComp.
EDIT: This only happens when the number of nodes allocated is less than the number of Components in the ParallelGroup.
Related
I have installed airflow and trying to start the worker on the mac. But I am getting following error. Unable to identify what must be causing this issue.
[2018-05-02 15:37:11,458: CRITICAL/MainProcess] Unrecoverable error: TypeError("Invalid argument(s) 'visibility_timeout' sent to create_engine(), using configuration MySQLDialect_mysqldb/QueuePool/Engine. Please check that the keyword arguments are appropriate for this combination of components.",)
Traceback (most recent call last):
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/celery/worker/worker.py", line 203, in start
self.blueprint.start(self)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/celery/bootsteps.py", line 370, in start
return self.obj.start()
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/celery/worker/consumer/consumer.py", line 320, in start
blueprint.start(self)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/celery/worker/consumer/tasks.py", line 37, in start
c.connection, on_decode_error=c.on_decode_error,
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/celery/app/amqp.py", line 302, in TaskConsumer
**kw
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/messaging.py", line 386, in __init__
self.revive(self.channel)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/messaging.py", line 408, in revive
self.declare()
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/messaging.py", line 421, in declare
queue.declare()
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/entity.py", line 605, in declare
self._create_queue(nowait=nowait, channel=channel)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/entity.py", line 614, in _create_queue
self.queue_declare(nowait=nowait, passive=False, channel=channel)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/entity.py", line 649, in queue_declare
nowait=nowait,
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/transport/virtual/base.py", line 531, in queue_declare
self._new_queue(queue, **kwargs)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/transport/sqlalchemy/__init__.py", line 82, in _new_queue
self._get_or_create(queue)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/transport/sqlalchemy/__init__.py", line 70, in _get_or_create
obj = self.session.query(self.queue_cls) \
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/transport/sqlalchemy/__init__.py", line 65, in session
_, Session = self._open()
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/transport/sqlalchemy/__init__.py", line 56, in _open
engine = self._engine_from_config()
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/kombu/transport/sqlalchemy/__init__.py", line 51, in _engine_from_config
return create_engine(conninfo.hostname, **transport_options)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/sqlalchemy/engine/__init__.py", line 424, in create_engine
return strategy.create(*args, **kwargs)
File "/Users/manishz/anaconda2/envs/airflow/lib/python2.7/site-packages/sqlalchemy/engine/strategies.py", line 162, in create
engineclass.__name__))
TypeError: Invalid argument(s) 'visibility_timeout' sent to create_engine(), using configuration MySQLDialect_mysqldb/QueuePool/Engine. Please check that the keyword arguments are appropriate for this combination of components.
Appreciate any help on it.
Thanks in avance
Manish
I've already seen this answer:
Gremlin, How to add edge to existing vertex in gremlin-python
and it wasn't really helpful. As suggested in one of the comments I did try to update gremlinpython 3.3.0 but then I get key error.
Stack:
JanusGraph 0.2.0, gremlinpython3.2.3
This is my code
from gremlin_python import statics
from gremlin_python.structure.graph import Graph
from gremlin_python.process.graph_traversal import __
from gremlin_python.process.strategies import *
from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection
graph = Graph()
g = graph.traversal().withRemote(DriverRemoteConnection('ws://localhost:8182/gremlin','g'))
martha = g.V().has('name','martha').next()
jack = g.V().has('name','jack').next()
#e_id = g.addE(jack,'likes',martha).next()
e_id = g.V(martha).as_('to').V(jack).addE("Likes").to('to').toList()
print e_id.toList()
StackTrace with gremlinpython 3.3.0
Traceback (most recent call last):
File "gremlin-py.py", line 9, in <module>
martha = g.V().has('name','martha').next()
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/process/traversal.py", line 70,in next
return self.__next__()
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/process/traversal.py", line 43,in __next__
self.traversal_strategies.apply_strategies(self)
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/process/traversal.py", line 352, in apply_strategies
traversal_strategy.apply(traversal)
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/driver/remote_connection.py", line 143, in apply
remote_traversal = self.remote_connection.submit(traversal.bytecode)
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/driver/driver_remote_connection.py", line 54, in submit
results = result_set.all().result()
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/concurrent/futures/_base.py", line 429, in result
return self.__get_result()
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/concurrent/futures/_base.py", line 381, in __get_result
raise exception_type, self._exception, self._traceback
KeyError: None
In my case, 3.3.0 is throwing error for all queries including g.V().next(). Now going back to 3.2.3, addvertex and other queries are working absolutely fine, but I couldn't figure out how to add edges. The same code when run with 3.2.3 produces,
StackTrace with gremlinpython 3.2.3
Traceback (most recent call last): File "gremlin-py.py", line 12, in <module>
e_id = g.V(martha).as_('to').V(jack).addE("Likes").to('to').toList()
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/process/traversal.py", line 52, in toList return list(iter(self))
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/process/traversal.py", line 70, in next
return self.__next__() File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/process/traversal.py", line 43, in __next__
self.traversal_strategies.apply_strategies(self) File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/process/traversal.py", line 284, in apply_strategies
traversal_strategy.apply(traversal)
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/driver/remote_connection.py", line 95, in apply remote_traversal = self.remote_connection.submit(traversal.bytecode) File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/driver/driver_remote_connection.py", line 53, in submit traversers = self._loop.run_sync(lambda: self.submit_traversal_bytecode(request_id, bytecode))
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/tornado/ioloop.py", line 457, in run_sync
return future_cell[0].result() File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/tornado/gen.py", line 285, in wrapper
yielded = next(result)
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/driver/driver_remote_connection.py", line 69, in submit_traversal_bytecode
"gremlin": self._graphson_writer.writeObject(bytecode),
File "/Users/arvindn/.virtualenvs/gremlinenv/lib/python2.7/site-packages/gremlin_python/structure/io/graphson.py", line 72, in writeObject
return json.dumps(self.toDict(objectData), separators=(',', ':'))
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/__init__.py", line 250, in dumps
sort_keys=sort_keys, **kw).encode(obj)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/encoder.py", line 207, in encode
chunks = self.iterencode(o, _one_shot=True)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/encoder.py", line 270, in iterencode
return _iterencode(o, 0)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/encoder.py", line 184, in default
raise TypeError(repr(o) + " is not JSON serializable")
TypeError: v[4184] is not JSON serializable
It says v[x] is not JSON serializable. I'm not sure what causes this error. It'll be awesome if someone can help. If any more info is needed, I shall update the question accordingly.
JanusGraph 0.2.0 uses Apache TinkerPop 3.2.6. You should use the 3.2.6 version of the gremlinpython driver.
pip uninstall gremlinpython
pip install gremlinpython==3.2.6
I have 2-dimension array , When I use following code to compute loss:
_roi_score = roi_score[row_index, col_index]
gt_roi_label_lst = gt_roi_label_lst[row_index, col_index]
loss = F.sigmoid_cross_entropy(roi_score, gt_roi_label_lst) # multi label
during back propagation, code report error:
File "AU_rcnn/train.py", line 249, in main
trainer.run()
File "/usr/local/anaconda3/lib/python3.6/site-packages/chainer-3.0.0b1-py3.6.egg/chainer/training/trainer.py", line 324, in run
six.reraise(*sys.exc_info())
File "/usr/local/anaconda3/lib/python3.6/site-packages/six.py", line 686, in reraise
raise value
File "/usr/local/anaconda3/lib/python3.6/site-packages/chainer-3.0.0b1-py3.6.egg/chainer/training/trainer.py", line 310, in run
update()
File "/usr/local/anaconda3/lib/python3.6/site-packages/chainer-3.0.0b1-py3.6.egg/chainer/training/updater.py", line 223, in update
self.update_core()
File "/usr/local/anaconda3/lib/python3.6/site-packages/chainer-3.0.0b1-py3.6.egg/chainer/training/updater.py", line 367, in update_core
loss.backward()
File "/usr/local/anaconda3/lib/python3.6/site-packages/chainer-3.0.0b1-py3.6.egg/chainer/variable.py", line 916, in backward
target_input_indexes, out_grad, in_grad)
File "/usr/local/anaconda3/lib/python3.6/site-packages/chainer-3.0.0b1-py3.6.egg/chainer/function_node.py", line 486, in backward_accumulate
gxs = self.backward(target_input_indexes, grad_outputs)
File "/usr/local/anaconda3/lib/python3.6/site-packages/chainer-3.0.0b1-py3.6.egg/chainer/function.py", line 124, in backward
gxs = self._function.backward(in_data, grad_out_data)
File "/usr/local/anaconda3/lib/python3.6/site-packages/chainer-3.0.0b1-py3.6.egg/chainer/functions/connection/linear.py", line 56, in backward
gb = gy.sum(0)
File "cupy/core/core.pyx", line 967, in cupy.core.core.ndarray.sum
File "cupy/core/core.pyx", line 975, in cupy.core.core.ndarray.sum
File "cupy/core/reduction.pxi", line 216, in cupy.core.core.simple_reduction_function.__call__
File "cupy/core/elementwise.pxi", line 102, in cupy.core.core._preprocess_args
ValueError: Array device must be same as the current device: array device = 1 while current = 0
Although I only use one GPU, it appeared. What is reason caused this, I have stucked for long time.
I am new to bokeh. I'm trying to plot an Area figure with the below code -
source_quantity = dict(
q1=[],
q2=[]
)
areaPlot = Area(source_quantity, title="Area Chart", legend="top_left", xlabel='time', ylabel='Quantity')
I don't understand why it's giving me the following stack trace -
ERROR:bokeh.application.application:Error running application handler <bokeh.application.handlers.script.ScriptHandler object at 0x10854fbd0>: Input of table-like dict must be column-oriented.
File "data_source.py", line 681, in from_data:
raise TypeError('Input of table-like dict must be column-oriented.') Traceback (most recent call last):
File "//anaconda/lib/python2.7/site-packages/bokeh/application/handlers/code_runner.py", line 71, in run
exec(self._code, module.__dict__)
File "/Users/e22654/workspace/python/BokehDemo/Plot.py", line 70, in <module>
areaPlot = Area(source_quantity, title="Area Chart", legend="top_left", xlabel='time', ylabel='Quantity')
File "//anaconda/lib/python2.7/site-packages/bokeh/charts/builders/area_builder.py", line 69, in Area
return create_and_build(AreaBuilder, data, **kws)
File "//anaconda/lib/python2.7/site-packages/bokeh/charts/builder.py", line 62, in create_and_build
builder = builder_class(*data, **builder_kws)
File "//anaconda/lib/python2.7/site-packages/bokeh/charts/builder.py", line 275, in __init__
data = ChartDataSource.from_data(*args, **data_args)
File "//anaconda/lib/python2.7/site-packages/bokeh/charts/data_source.py", line 681, in from_data
raise TypeError('Input of table-like dict must be column-oriented.')
TypeError: Input of table-like dict must be column-oriented.
I am trying to understand Annotations from this document:
http://docs.zope.org/zope.annotation/index.html
However the example fails when run.
I get:
Traceback (most recent call last):
File "./zopepy", line 366, in <module>
exec(compile(__file__f.read(), __file__, "exec"))
File "test1.py", line 29, in <module>
bar = IBar(foo)
File "eggs/zope.component-3.9.5-py2.7.egg/zope/component/_api.py", line 156, in adapter_hook
return sitemanager.queryAdapter(object, interface, name, default)
File "eggs/zope.component-3.9.5- py2.7.egg/zope/component/registry.py", line 228, in queryAdapter
return self.adapters.queryAdapter(object, interface, name, default)
File "eggs/zope.annotation-3.5.0-py2.7.egg/zope/annotation/factory.py", line 42, in getAnnotation
annotations = zope.annotation.interfaces.IAnnotations(context)
TypeError: ('Could not adapt', <__main__.Foo object at 0xb6d6956c>, <InterfaceClass zope.annotation.interfaces.IAnnotations>)
Example missing the following statements:
from zope.annotation.attribute import AttributeAnnotations
provideAdapter(AttributeAnnotations)