I am trying to deploy a simple Flask app with a SQLite database. It works fine when running locally. I deploy it on Vercel via Github. It works until it needs to write to the db. Then there is
OperationalError: attempt to write a readonly database
Both the DB and the folder ‘instance’ that contains it are Full-access on my local system, I can see it with
attrib ~/instance
in the command line (I am working with Windows 10). Is there a way to check and change the access mode of a file or a folder right on the Github repo?
Here are the logs regarding this error:
[ERROR] 2023-01-12T17:19:33.931Z 63bacb5b-ec13-4247-b784-9aaba99376a0
Exception on /register [POST]Traceback (most recent call last):
File "/var/task/sqlalchemy/engine/base.py", line 1900, in _execute_context self.dialect.do_execute
( File "/var/task/sqlalchemy/engine/default.py", line 736, in do_execute cursor.execute(statement, parameters)
sqlite3.OperationalError: attempt to write a readonly database
The above exception was the direct cause of the following exception:
Traceback (most recent call last): File "/var/task/flask/app.py", line 2525, in wsgi_app response = self.full_dispatch_request() File "/var/task/flask/app.py", line 1822, in full_dispatch_request rv = self.handle_user_exception(e) File "/var/task/flask/app.py", line 1820, in full_dispatch_request rv = self.dispatch_request() File "/var/task/flask/app.py", line 1796, in dispatch_request return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) File "./app.py", line 136, in register db.session.commit()
File "<string>", line 2, in commit File "/var/task/sqlalchemy/orm/session.py", line 1451, in commit self._transaction.commit(_to_root=self.future) File "/var/task/sqlalchemy/orm/session.py", line 829, in commit self._prepare_impl() File "/var/task/sqlalchemy/orm/session.py", line 808, in _prepare_impl self.session.flush() File "/var/task/sqlalchemy/orm/session.py", line 3444, in flush self._flush(objects) File "/var/task/sqlalchemy/orm/session.py", line 3584, in _flush transaction.rollback(_capture_exception=True) File "/var/task/sqlalchemy/util/langhelpers.py", line 70, in __exit__ compat.raise_( File "/var/task/sqlalchemy/util/compat.py", line 211, in raise_ raise exception File "/var/task/sqlalchemy/orm/session.py", line 3544, in _flush flush_context.execute() File "/var/task/sqlalchemy/orm/unitofwork.py", line 456, in execute rec.execute(self) File "/var/task/sqlalchemy/orm/unitofwork.py", line 630, in execute util.preloaded.orm_persistence.save_obj( File "/var/task/sqlalchemy/orm/persistence.py", line 245, in save_obj _emit_insert_statements( File "/var/task/sqlalchemy/orm/persistence.py", line 1238, in _emit_insert_statements result = connection._execute_20( File "/var/task/sqlalchemy/engine/base.py", line 1705, in _execute_20 return meth(self, args_10style, kwargs_10style, execution_options) File "/var/task/sqlalchemy/sql/elements.py", line 334, in _execute_on_connection return connection._execute_clauseelement( File "/var/task/sqlalchemy/engine/base.py", line 1572, in _execute_clauseelement ret = self._execute_context( File "/var/task/sqlalchemy/engine/base.py", line 1943, in _execute_context self._handle_dbapi_exception( File "/var/task/sqlalchemy/engine/base.py", line 2124, in _handle_dbapi_exception util.raise_( File "/var/task/sqlalchemy/util/compat.py", line 211, in raise_ raise exception File "/var/task/sqlalchemy/engine/base.py", line 1900, in _execute_context self.dialect.do_execute( File "/var/task/sqlalchemy/engine/default.py", line 736, in do_execute cursor.execute(statement, parameters)sqlalchemy.exc.
OperationalError: (sqlite3.OperationalError) attempt to write a readonly database
I double-checked the access mode of the DB in my local system both as User and as Admin
It turns out that Vercel does not support SQLite. It is clearly written in their guides:
SQLite needs a local file system on the server to store the data
permanently when write requests are made. In a serverless environment,
this central single permanent storage is not available because storage
is ephemeral with serverless functions.
So it's not the problem of a Read-only mode, but rather the issue with SQL databases on Vercel.
We deployed Apache Airflow 2.3.3 to Azure.
Webserver - Web App
Scheduler - ACI
Celery Worker - ACI
We were seeing errors on the Celery ACI console related to Postgres and Redis connection timeouts as below
[2022-09-22 18:55:50,650: WARNING/ForkPoolWorker-15] Failed operation _store_result. Retrying 2 more times.
Traceback (most recent call last):
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/base.py", line 1803, in _execute_context
cursor, statement, parameters, context
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/default.py", line 719, in do_execute
cursor.execute(statement, parameters)
psycopg2.DatabaseError: could not receive data from server: Connection timed out
SSL SYSCALL error: Connection timed out
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/airflow/.local/lib/python3.7/site-packages/celery/backends/database/__init__.py", line 47, in _inner
return fun(*args, **kwargs)
File "/home/airflow/.local/lib/python3.7/site-packages/celery/backends/database/__init__.py", line 117, in _store_result
task = list(session.query(self.task_cls).filter(self.task_cls.task_id == task_id))
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 2887, in __iter__
return self._iter().__iter__()
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 2897, in _iter
execution_options={"_sa_orm_load_options": self.load_options},
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/session.py", line 1689, in execute
result = conn._execute_20(statement, params or {}, execution_options)
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/base.py", line 1614, in _execute_20
return meth(self, args_10style, kwargs_10style, execution_options)
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 326, in _execute_on_connection
self, multiparams, params, execution_options
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/base.py", line 1491, in _execute_clauseelement
cache_hit=cache_hit,
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/base.py", line 1846, in _execute_context
e, statement, parameters, cursor, context
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/base.py", line 2027, in _handle_dbapi_exception
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 207, in raise_
raise exception
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/base.py", line 1803, in _execute_context
cursor, statement, parameters, context
File "/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/default.py", line 719, in do_execute
cursor.execute(statement, parameters)
sqlalchemy.exc.DatabaseError: (psycopg2.DatabaseError) could not receive data from server: Connection timed out
SSL SYSCALL error: Connection timed out
[SQL: SELECT celery_taskmeta.id AS celery_taskmeta_id, celery_taskmeta.task_id AS celery_taskmeta_task_id, celery_taskmeta.status AS celery_taskmeta_status, celery_taskmeta.result AS celery_taskmeta_result, celery_taskmeta.date_done AS celery_taskmeta_date_done, celery_taskmeta.traceback AS celery_taskmeta_traceback
FROM celery_taskmeta
WHERE celery_taskmeta.task_id = %(task_id_1)s]
[parameters: {'task_id_1': 'c5f9f53c-8afe-4d67-8d3b-d7ad84875de1'}]
(Background on this error at: https://sqlalche.me/e/14/4xp6)
[2022-09-22 18:55:50,929: INFO/ForkPoolWorker-15] [c5f9f53c-8afe-4d67-8d3b-d7ad84875de1] Executing command in Celery: ['airflow', 'tasks', 'run', 'CS_ALERTING', 'CheckRunningTasks', 'scheduled__2022-09-22T18:00:00+00:00', '--local', '--subdir', 'DAGS_FOLDER/CS_ALERTING.py']
[2022-09-22 18:55:51,241: INFO/ForkPoolWorker-15] Filling up the DagBag from /opt/airflow/platform_pam/dags/CS_ALERTING.py
[2022-09-22 18:55:53,467: INFO/ForkPoolWorker-15] Running <TaskInstance: CS_ALERTING.CheckRunningTasks scheduled__2022-09-22T18:00:00+00:00 [queued]> on host localhost
[2022-09-22 18:55:58,304: INFO/ForkPoolWorker-15] Task airflow.executors.celery_executor.execute_command[c5f9f53c-8afe-4d67-8d3b-d7ad84875de1] succeeded in 960.1964174450004s: None
[2022-09-22 19:29:25,931: WARNING/MainProcess] consumer: Connection to broker lost. Trying to re-establish the connection...
Traceback (most recent call last):
File "/home/airflow/.local/lib/python3.7/site-packages/redis/connection.py", line 706, in send_packed_command
sendall(self._sock, item)
File "/home/airflow/.local/lib/python3.7/site-packages/redis/_compat.py", line 9, in sendall
return sock.sendall(*args, **kwargs)
File "/usr/local/lib/python3.7/ssl.py", line 1034, in sendall
v = self.send(byte_view[count:])
File "/usr/local/lib/python3.7/ssl.py", line 1003, in send
return self._sslobj.write(data)
TimeoutError: [Errno 110] Connection timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/airflow/.local/lib/python3.7/site-packages/celery/worker/consumer/consumer.py", line 332, in start
blueprint.start(self)
File "/home/airflow/.local/lib/python3.7/site-packages/celery/bootsteps.py", line 116, in start
step.start(parent)
File "/home/airflow/.local/lib/python3.7/site-packages/celery/worker/consumer/consumer.py", line 628, in start
c.loop(*c.loop_args())
File "/home/airflow/.local/lib/python3.7/site-packages/celery/worker/loops.py", line 97, in asynloop
next(loop)
File "/home/airflow/.local/lib/python3.7/site-packages/kombu/asynchronous/hub.py", line 301, in create_loop
poll_timeout = fire_timers(propagate=propagate) if scheduled else 1
File "/home/airflow/.local/lib/python3.7/site-packages/kombu/asynchronous/hub.py", line 143, in fire_timers
entry()
File "/home/airflow/.local/lib/python3.7/site-packages/kombu/asynchronous/timer.py", line 64, in __call__
return self.fun(*self.args, **self.kwargs)
File "/home/airflow/.local/lib/python3.7/site-packages/kombu/asynchronous/timer.py", line 126, in _reschedules
return fun(*args, **kwargs)
File "/home/airflow/.local/lib/python3.7/site-packages/kombu/transport/redis.py", line 557, in maybe_check_subclient_health
client.check_health()
File "/home/airflow/.local/lib/python3.7/site-packages/redis/client.py", line 3522, in check_health
check_health=False)
File "/home/airflow/.local/lib/python3.7/site-packages/redis/connection.py", line 726, in send_command
check_health=kwargs.get('check_health', True))
File "/home/airflow/.local/lib/python3.7/site-packages/redis/connection.py", line 718, in send_packed_command
(errno, errmsg))
redis.exceptions.ConnectionError: Error 110 while writing to socket. Connection timed out.
[2022-09-22 19:29:26,023: WARNING/MainProcess] /home/airflow/.local/lib/python3.7/site-packages/celery/worker/consumer/consumer.py:367: CPendingDeprecationWarning:
I referred the Airflow's documentation and found setting up database
We are modifying Airflow's docker image and adding a python file,
airflow.www.db_utils.db_config (This file is installed to site_packages) and defined the dictionary
keepalive_kwargs = {
"keepalives": 1,
"keepalives_idle": 30,
"keepalives_interval": 5,
"keepalives_count": 5,
}
Finally, we are setting
ENV AIRFLOW__DATABASE__SQL_ALCHEMY_CONNECT_ARGS="airflow.www.db_utils.db_config.keepalive_kwargs"
Unfortunately, the error stills persist. It will be great if someone helps me to resolve this issue.
I've tried to install a Openstack cluster. After completing Keystone,Nova, I got an issue with Neutron-Server in Database sync-up step as below.
root#controller:/etc/neutron# su -s /bin/sh -c "neutron-db-manage --config-file /etc/neutron/neutron.conf \
> --config-file /etc/neutron/plugins/ml2/ml2_conf.ini upgrade head" neutron
INFO [alembic.runtime.migration] Context impl MySQLImpl.
INFO [alembic.runtime.migration] Will assume non-transactional DDL.
Running upgrade for neutron ...
INFO [alembic.runtime.migration] Context impl MySQLImpl.
INFO [alembic.runtime.migration] Will assume non-transactional DDL.
INFO [alembic.runtime.migration] Running upgrade 63fd95af7dcd -> c613d0b82681
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1246, in _execute_context
cursor, statement, parameters, context
File "/usr/lib/python3/dist-packages/sqlalchemy/engine/default.py", line 581, in do_execute
cursor.execute(statement, parameters)
File "/usr/lib/python3/dist-packages/pymysql/cursors.py", line 165, in execute
result = self._query(query)
File "/usr/lib/python3/dist-packages/pymysql/cursors.py", line 321, in _query
conn.query(q)
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 860, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1061, in _read_query_result
result.read()
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1349, in read
first_packet = self.connection._read_packet()
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1018, in _read_packet
packet.check_error()
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 384, in check_error
err.raise_mysql_exception(self._data)
File "/usr/lib/python3/dist-packages/pymysql/err.py", line 107, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.InternalError: (1832, "Cannot change column 'network_id': used in a foreign key constraint 'subnets_ibfk_1'")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/bin/neutron-db-manage", line 10, in <module>
sys.exit(main())
File "/usr/lib/python3/dist-packages/neutron/db/migration/cli.py", line 658, in main
return_val |= bool(CONF.command.func(config, CONF.command.name))
File "/usr/lib/python3/dist-packages/neutron/db/migration/cli.py", line 182, in do_upgrade
desc=branch, sql=CONF.command.sql)
File "/usr/lib/python3/dist-packages/neutron/db/migration/cli.py", line 83, in do_alembic_command
getattr(alembic_command, cmd)(config, *args, **kwargs)
File "/usr/lib/python3/dist-packages/alembic/command.py", line 279, in upgrade
script.run_env()
File "/usr/lib/python3/dist-packages/alembic/script/base.py", line 475, in run_env
util.load_python_file(self.dir, "env.py")
File "/usr/lib/python3/dist-packages/alembic/util/pyfiles.py", line 98, in load_python_file
module = load_module_py(module_id, path)
File "/usr/lib/python3/dist-packages/alembic/util/compat.py", line 174, in load_module_py
spec.loader.exec_module(module)
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/usr/lib/python3/dist-packages/neutron/db/migration/alembic_migrations/env.py", line 120, in <module>
run_migrations_online()
File "/usr/lib/python3/dist-packages/neutron/db/migration/alembic_migrations/env.py", line 114, in run_migrations_online
context.run_migrations()
File "<string>", line 8, in run_migrations
File "/usr/lib/python3/dist-packages/alembic/runtime/environment.py", line 846, in run_migrations
self.get_context().run_migrations(**kw)
File "/usr/lib/python3/dist-packages/alembic/runtime/migration.py", line 365, in run_migrations
step.migration_fn(**kw)
File "/usr/lib/python3/dist-packages/neutron/db/migration/alembic_migrations/versions/train/expand/c613d0b82681_subnet_force_network_id.py", line 40, in upgrade
existing_type=sa.String(36))
File "<string>", line 8, in alter_column
File "<string>", line 3, in alter_column
File "/usr/lib/python3/dist-packages/alembic/operations/ops.py", line 1775, in alter_column
return operations.invoke(alt)
File "/usr/lib/python3/dist-packages/alembic/operations/base.py", line 345, in invoke
return fn(self, operation)
File "/usr/lib/python3/dist-packages/alembic/operations/toimpl.py", line 56, in alter_column
**operation.kw
File "/usr/lib/python3/dist-packages/alembic/ddl/mysql.py", line 98, in alter_column
else existing_comment,
File "/usr/lib/python3/dist-packages/alembic/ddl/impl.py", line 134, in _exec
return conn.execute(construct, *multiparams, **params)
File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 982, in execute
return meth(self, multiparams, params)
File "/usr/lib/python3/dist-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1044, in _execute_ddl
compiled,
File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1250, in _execute_context
e, statement, parameters, cursor, context
File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1474, in _handle_dbapi_exception
util.raise_from_cause(newraise, exc_info)
File "/usr/lib/python3/dist-packages/sqlalchemy/util/compat.py", line 398, in raise_from_cause
reraise(type(exception), exception, tb=exc_tb, cause=cause)
File "/usr/lib/python3/dist-packages/sqlalchemy/util/compat.py", line 152, in reraise
raise value.with_traceback(tb)
File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1246, in _execute_context
cursor, statement, parameters, context
File "/usr/lib/python3/dist-packages/sqlalchemy/engine/default.py", line 581, in do_execute
cursor.execute(statement, parameters)
File "/usr/lib/python3/dist-packages/pymysql/cursors.py", line 165, in execute
result = self._query(query)
File "/usr/lib/python3/dist-packages/pymysql/cursors.py", line 321, in _query
conn.query(q)
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 860, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1061, in _read_query_result
result.read()
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1349, in read
first_packet = self.connection._read_packet()
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1018, in _read_packet
packet.check_error()
File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 384, in check_error
err.raise_mysql_exception(self._data)
File "/usr/lib/python3/dist-packages/pymysql/err.py", line 107, in raise_mysql_exception
raise errorclass(errno, errval)
oslo_db.exception.DBError: (pymysql.err.InternalError) (1832, "Cannot change column 'network_id': used in a foreign key constraint 'subnets_ibfk_1'")
[SQL: ALTER TABLE subnets MODIFY network_id VARCHAR(36) NOT NULL]
(Background on this error at: http://sqlalche.me/e/2j85)
root#controller:/etc/neutron#
Then check neutron-server log, I saw that 'neutron.subnet_dns_publish_fixed_ips' table does not be created successully.
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.146 5355 ERROR neutron.agent.dhcp.agent
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent [req-fc8e970a-8bf9-485d-9716-85d76a0d60bc - - - - -] Unable to sync network state.: oslo_messaging.rpc.client.RemoteError: Remote error: ProgrammingError (pymysql.err.ProgrammingError) (1146, "Table 'neutron.subnet_dns_publish_fixed_ips' doesn't exist")
/var/log/neutron/neutron-dhcp-agent.log:(Background on this error at: http://sqlalche.me/e/f405)
/var/log/neutron/neutron-dhcp-agent.log:['Traceback (most recent call last):\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1246, in _execute_context\n cursor, statement, parameters, context\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/engine/default.py", line 581, in do_execute\n cursor.execute(statement, parameters)\n', ' File "/usr/lib/python3/dist-packages/pymysql/cursors.py", line 165, in execute\n result = self._query(query)\n', ' File "/usr/lib/python3/dist-packages/pymysql/cursors.py", line 321, in _query\n conn.query(q)\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 860, in query\n self._affected_rows = self._read_query_result(unbuffered=unbuffered)\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1061, in _read_query_result\n result.read()\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1349, in read\n first_packet = self.connection._read_packet()\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1018, in _read_packet\n packet.check_error()\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 384, in check_error\n err.raise_mysql_exception(self._data)\n', ' File "/usr/lib/python3/dist-packages/pymysql/err.py", line 107, in raise_mysql_exception\n raise errorclass(errno, errval)\n', 'pymysql.err.ProgrammingError: (1146, "Table \'neutron.subnet_dns_publish_fixed_ips\' doesn\'t exist")\n', '\nThe above exception was the direct cause of the following exception:\n\n', 'Traceback (most recent call last):\n', ' File "/usr/lib/python3/dist-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming\n res = self.dispatcher.dispatch(message)\n', ' File "/usr/lib/python3/dist-packages/oslo_messaging/rpc/dispatcher.py", line 276, in dispatch\n return self._do_dispatch(endpoint, method, ctxt, args)\n', ' File "/usr/lib/python3/dist-packages/oslo_messaging/rpc/dispatcher.py", line 196, in _do_dispatch\n result = func(ctxt, **new_args)\n', ' File "/usr/lib/python3/dist-packages/neutron/api/rpc/handlers/dhcp_rpc.py", line 143, in get_active_networks_info\n networks = self._get_active_networks(context, **kwargs)\n', ' File "/usr/lib/python3/dist-packages/neutron/api/rpc/handlers/dhcp_rpc.py", line 87, in _get_active_networks\n plugin.auto_schedule_networks(context, host)\n', ' File "/usr/lib/python3/dist-packages/neutron/db/agentschedulers_db.py", line 492, in auto_schedule_networks\n self.network_scheduler.auto_schedule_networks(self, context, host)\n', ' File "/usr/lib/python3/dist-packages/neutron/scheduler/dhcp_agent_scheduler.py", line 50, in auto_schedule_networks\n subnets = plugin.get_subnets(context, fields=fields)\n', ' File "/usr/lib/python3/dist-packages/neutron_lib/db/api.py", line 233, in wrapped\n return method(*args, **kwargs)\n', ' File "/usr/lib/python3/dist-packages/neutron/db/db_base_plugin_v2.py", line 1078, in get_subnets\n marker, page_reverse)\n', ' File "/usr/lib/python3/dist-packages/neutron/db/db_base_plugin_common.py", line 320, in _get_subnets\n **filters)\n', ' File "/usr/lib/python3/dist-packages/neutron/objects/base.py", line 640, in get_objects\n cls, context, _pager=_pager, **cls.modify_fields_to_db(kwargs))\n', ' File "/usr/lib/python3/dist-packages/neutron/objects/db/api.py", line 52, in get_objects\n **(_pager.to_kwargs(context, obj_cls) if _pager else {}))\n', ' File "/usr/lib/python3/dist-packages/neutron_lib/db/model_query.py", line 317, in get_collection\n for c in query\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/orm/query.py", line 3367, in __iter__\n return self._execute_and_instances(context)\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/orm/query.py", line 3392, in _execute_and_instances\n result = conn.execute(querycontext.statement, self._params)\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 982, in execute\n return meth(self, multiparams, params)\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/sql/elements.py", line 287, in _execute_on_connection\n return connection._execute_clauseelement(self, multiparams, params)\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1101, in _execute_clauseelement\n distilled_params,\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1250, in _execute_context\n e, statement, parameters, cursor, context\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1474, in _handle_dbapi_exception\n util.raise_from_cause(newraise, exc_info)\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/util/compat.py", line 398, in raise_from_cause\n reraise(type(exception), exception, tb=exc_tb, cause=cause)\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/util/compat.py", line 152, in reraise\n raise value.with_traceback(tb)\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/engine/base.py", line 1246, in _execute_context\n cursor, statement, parameters, context\n', ' File "/usr/lib/python3/dist-packages/sqlalchemy/engine/default.py", line 581, in do_execute\n cursor.execute(statement, parameters)\n', ' File "/usr/lib/python3/dist-packages/pymysql/cursors.py", line 165, in execute\n result = self._query(query)\n', ' File "/usr/lib/python3/dist-packages/pymysql/cursors.py", line 321, in _query\n conn.query(q)\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 860, in query\n self._affected_rows = self._read_query_result(unbuffered=unbuffered)\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1061, in _read_query_result\n result.read()\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1349, in read\n first_packet = self.connection._read_packet()\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 1018, in _read_packet\n packet.check_error()\n', ' File "/usr/lib/python3/dist-packages/pymysql/connections.py", line 384, in check_error\n err.raise_mysql_exception(self._data)\n', ' File "/usr/lib/python3/dist-packages/pymysql/err.py", line 107, in raise_mysql_exception\n raise errorclass(errno, errval)\n', 'sqlalchemy.exc.ProgrammingError: (pymysql.err.ProgrammingError) (1146, "Table \'neutron.subnet_dns_publish_fixed_ips\' doesn\'t exist")\n[SQL: SELECT subnets.project_id AS subnets_project_id, subnets.id AS subnets_id, subnets.in_use AS subnets_in_use, subnets.name AS subnets_name, subnets.network_id AS subnets_network_id, subnets.segment_id AS subnets_segment_id, subnets.subnetpool_id AS subnets_subnetpool_id, subnets.ip_version AS subnets_ip_version, subnets.cidr AS subnets_cidr, subnets.gateway_ip AS subnets_gateway_ip, subnets.enable_dhcp AS subnets_enable_dhcp, subnets.ipv6_ra_mode AS subnets_ipv6_ra_mode, subnets.ipv6_address_mode AS subnets_ipv6_address_mode, subnets.standard_attr_id AS subnets_standard_attr_id, subnetpools_1.shared AS subnetpools_1_shared, standardattributes_1.id AS standardattributes_1_id, standardattributes_1.resource_type AS standardattributes_1_resource_type, standardattributes_1.description AS standardattributes_1_description, standardattributes_1.revision_number AS standardattributes_1_revision_number, standardattributes_1.created_at AS standardattributes_1_created_at, standardattributes_1.updated_at AS standardattributes_1_updated_at, subnetpools_1.project_id AS subnetpools_1_project_id, subnetpools_1.id AS subnetpools_1_id, subnetpools_1.name AS subnetpools_1_name, subnetpools_1.ip_version AS subnetpools_1_ip_version, subnetpools_1.default_prefixlen AS subnetpools_1_default_prefixlen, subnetpools_1.min_prefixlen AS subnetpools_1_min_prefixlen, subnetpools_1.max_prefixlen AS subnetpools_1_max_prefixlen, subnetpools_1.is_default AS subnetpools_1_is_default, subnetpools_1.default_quota AS subnetpools_1_default_quota, subnetpools_1.hash AS subnetpools_1_hash, subnetpools_1.address_scope_id AS subnetpools_1_address_scope_id, subnetpools_1.standard_attr_id AS subnetpools_1_standard_attr_id, standardattributes_2.id AS standardattributes_2_id, standardattributes_2.resource_type AS standardattributes_2_resource_type, standardattributes_2.description AS standardattributes_2_description, standardattributes_2.revision_number AS standardattributes_2_revision_number, standardattributes_2.created_at AS standardattributes_2_created_at, standardattributes_2.updated_at AS standardattributes_2_updated_at, subnet_dns_publish_fixed_ips_1.subnet_id AS subnet_dns_publish_fixed_ips_1_subnet_id, subnet_dns_publish_fixed_ips_1.dns_publish_fixed_ip AS subnet_dns_publish_fixed_ips_1_dns_publish_fixed_ip \nFROM subnets LEFT OUTER JOIN subnetpools AS subnetpools_1 ON subnets.subnetpool_id = subnetpools_1.id LEFT OUTER JOIN standardattributes AS standardattributes_1 ON standardattributes_1.id = subnetpools_1.standard_attr_id LEFT OUTER JOIN standardattributes AS standardattributes_2 ON standardattributes_2.id = subnets.standard_attr_id LEFT OUTER JOIN subnet_dns_publish_fixed_ips AS subnet_dns_publish_fixed_ips_1 ON subnets.id = subnet_dns_publish_fixed_ips_1.subnet_id]\n(Background on this error at: http://sqlalche.me/e/f405)\n'].
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent Traceback (most recent call last):
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent File "/usr/lib/python3/dist-packages/neutron/agent/dhcp/agent.py", line 266, in sync_state
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent enable_dhcp_filter=False)
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent File "/usr/lib/python3/dist-packages/neutron/agent/dhcp/agent.py", line 826, in get_active_networks_info
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent host=self.host, **kwargs)
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent File "/usr/lib/python3/dist-packages/neutron_lib/rpc.py", line 157, in call
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent return self._original_context.call(ctxt, method, **kwargs)
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent File "/usr/lib/python3/dist-packages/oslo_messaging/rpc/client.py", line 181, in call
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent transport_options=self.transport_options)
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent File "/usr/lib/python3/dist-packages/oslo_messaging/transport.py", line 129, in _send
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent transport_options=transport_options)
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent File "/usr/lib/python3/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 682, in send
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent transport_options=transport_options)
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent File "/usr/lib/python3/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 672, in _send
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent raise result
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent oslo_messaging.rpc.client.RemoteError: Remote error: ProgrammingError (pymysql.err.ProgrammingError) (1146, "Table '**neutron.subnet_dns_publish_fixed_ips**' doesn't exist")
/var/log/neutron/neutron-dhcp-agent.log:2022-06-06 12:41:30.169 5355 ERROR neutron.agent.dhcp.agent [SQL: SELECT subnets.project_id AS subnets_project_id, subnets.id AS subnets_id, subnets.in_use AS subnets_in_use, subnets.name AS subnets_name, subnets.network_id AS subnets_network_id, subnets.segment_id AS subnets_segment_id, subnets.subnetpool_id AS subnets_subnetpool_id, subnets.ip_version AS subnets_ip_version, subnets.cidr AS subnets_cidr, subnets.gateway_ip AS subnets_gateway_ip, subnets.enable_dhcp AS subnets_enable_dhcp, subnets.ipv6_ra_mode AS subnets_ipv6_ra_mode, subnets.ipv6_address_mode AS subnets_ipv6_address_mode, subnets.standard_attr_id AS subnets_standard_attr_id, subnetpools_1.shared AS subnetpools_1_shared, standardattributes_1.id AS standardattributes_1_id, standardattributes_1.resource_type AS standardattributes_1_resource_type, standardattributes_1.description AS standardattributes_1_description, standardattributes_1.revision_number AS standardattributes_1_revision_number, standardattributes_1.created_at AS standardattributes_1_cre^C
root#controller:/etc/neutron#
Please help me advise this case. Thanks.
I've found issue. It belonged to MariaDB 10.1 which had a bug. So the problems was resolved after upgrading to 10.6 as below guide.
https://computingforgeeks.com/install-mariadb-on-ubuntu-and-centos/
Note: maybe you need to delete Neutron DB then sync up again.
Good morning,
i am using tensorflow lite and i also wanted to use telepot.
I also installed the Coral USB accelerator, but I don't think it depends on him, also because it is independent of whether or not to add --edgetpu to the end of the program start command.
It works only if the transfer of messages or images I place it before this instruction:
from tensorflow.lite.python.interpreter import Interpreter
it's how telepot is incompatible with tflite.
Obviously everything works without the telepot instruction
What can I do?
I'm using a raspberry pi 4 with s.o. Debian burst, Python 3.7 and Opencv 4.1
This is the error that gives me:
Traceback (most recent call last):
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py", line 485, in wrap_socket
cnx.do_handshake()
File "/usr/lib/python3/dist-packages/OpenSSL/SSL.py", line 1915, in do_handshake
self._raise_ssl_error(self._ssl, result)
File "/usr/lib/python3/dist-packages/OpenSSL/SSL.py", line 1647, in _raise_ssl_error
_raise_current_error()
File "/usr/lib/python3/dist-packages/OpenSSL/_util.py", line 54, in exception_from_error_queue
raise exception_type(errors)
OpenSSL.SSL.Error: [('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')]
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/connectionpool.py", line 376, in _make_request
self._validate_conn(conn)
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/connectionpool.py", line 994, in _validate_conn
conn.connect()
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/connection.py", line 394, in connect
ssl_context=context,
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/util/ssl_.py", line 370, in ssl_wrap_socket
return context.wrap_socket(sock, server_hostname=server_hostname)
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py", line 491, in wrap_socket
raise ssl.SSLError("bad handshake: %r" % e)
ssl.SSLError: ("bad handshake: Error([('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')])",)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "TFLite_detection_webcam_Prova1.py", line 137, in <module>
spedisci()
File "TFLite_detection_webcam_Prova1.py", line 33, in spedisci
bot.sendPhoto(256868258, foto)
File "/usr/local/lib/python3.7/dist-packages/telepot/__init__.py", line 539, in sendPhoto
return self._api_request_with_file('sendPhoto', _rectify(p), 'photo', photo)
File "/usr/local/lib/python3.7/dist-packages/telepot/__init__.py", line 499, in _api_request_with_file
return self._api_request(method, _rectify(params), files, **kwargs)
File "/usr/local/lib/python3.7/dist-packages/telepot/__init__.py", line 491, in _api_request
return api.request((self._token, method, params, files), **kwargs)
File "/usr/local/lib/python3.7/dist-packages/telepot/api.py", line 154, in request
r = fn(*args, **kwargs) # `fn` must be thread-safe
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/request.py", line 171, in request_encode_body
return self.urlopen(method, url, **extra_kw)
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/poolmanager.py", line 330, in urlopen
response = conn.urlopen(method, u.request_uri, **kw)
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/connectionpool.py", line 760, in urlopen
**response_kw
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/connectionpool.py", line 760, in urlopen
**response_kw
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/connectionpool.py", line 760, in urlopen
**response_kw
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/connectionpool.py", line 720, in urlopen
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
File "/home/pi/tflite1/tflite1-env/lib/python3.7/site-packages/urllib3/util/retry.py", line 436, in increment
raise MaxRetryError(_pool, url, error or ResponseError(cause))
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool
(host='api.telegram.org', port=443): Max retries exceeded with url: /bot926377239:AAEl0gqMWzG0dMidkGNqcGr2wkeTLbgZn3g/sendPhoto (Caused by SSLError(SSLError("bad handshake: Error([('SSL routines', 'tls_process_server_certificate', 'certificate verify failed')])")))