Merge branch 'develop' of https://github.com/coreemu/core into develop
This commit is contained in:
commit
684cb26c5c
14 changed files with 86 additions and 88 deletions
|
@ -2,7 +2,7 @@
|
||||||
# Process this file with autoconf to produce a configure script.
|
# Process this file with autoconf to produce a configure script.
|
||||||
|
|
||||||
# this defines the CORE version number, must be static for AC_INIT
|
# this defines the CORE version number, must be static for AC_INIT
|
||||||
AC_INIT(core, 5.5.2)
|
AC_INIT(core, 6.0.0)
|
||||||
|
|
||||||
# autoconf and automake initialization
|
# autoconf and automake initialization
|
||||||
AC_CONFIG_SRCDIR([netns/version.h.in])
|
AC_CONFIG_SRCDIR([netns/version.h.in])
|
||||||
|
|
|
@ -145,7 +145,7 @@ def handle_exception_event(event):
|
||||||
"""
|
"""
|
||||||
return core_pb2.ExceptionEvent(
|
return core_pb2.ExceptionEvent(
|
||||||
node_id=event.node,
|
node_id=event.node,
|
||||||
level=event.level,
|
level=event.level.value,
|
||||||
source=event.source,
|
source=event.source,
|
||||||
date=event.date,
|
date=event.date,
|
||||||
text=event.text,
|
text=event.text,
|
||||||
|
|
|
@ -299,7 +299,7 @@ class CoreHandler(socketserver.BaseRequestHandler):
|
||||||
[
|
[
|
||||||
(ExceptionTlvs.NODE, exception_data.node),
|
(ExceptionTlvs.NODE, exception_data.node),
|
||||||
(ExceptionTlvs.SESSION, exception_data.session),
|
(ExceptionTlvs.SESSION, exception_data.session),
|
||||||
(ExceptionTlvs.LEVEL, exception_data.level),
|
(ExceptionTlvs.LEVEL, exception_data.level.value),
|
||||||
(ExceptionTlvs.SOURCE, exception_data.source),
|
(ExceptionTlvs.SOURCE, exception_data.source),
|
||||||
(ExceptionTlvs.DATE, exception_data.date),
|
(ExceptionTlvs.DATE, exception_data.date),
|
||||||
(ExceptionTlvs.TEXT, exception_data.text),
|
(ExceptionTlvs.TEXT, exception_data.text),
|
||||||
|
|
|
@ -1411,13 +1411,12 @@ class Session:
|
||||||
"""
|
"""
|
||||||
Generate and broadcast an exception event.
|
Generate and broadcast an exception event.
|
||||||
|
|
||||||
:param str level: exception level
|
:param core.emulator.enumerations.ExceptionLevel level: exception level
|
||||||
:param str source: source name
|
:param str source: source name
|
||||||
:param int node_id: node related to exception
|
:param int node_id: node related to exception
|
||||||
:param str text: exception message
|
:param str text: exception message
|
||||||
:return: nothing
|
:return: nothing
|
||||||
"""
|
"""
|
||||||
|
|
||||||
exception_data = ExceptionData(
|
exception_data = ExceptionData(
|
||||||
node=node_id,
|
node=node_id,
|
||||||
session=str(self.id),
|
session=str(self.id),
|
||||||
|
@ -1426,7 +1425,6 @@ class Session:
|
||||||
date=time.ctime(),
|
date=time.ctime(),
|
||||||
text=text,
|
text=text,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.broadcast_exception(exception_data)
|
self.broadcast_exception(exception_data)
|
||||||
|
|
||||||
def instantiate(self):
|
def instantiate(self):
|
||||||
|
|
|
@ -167,7 +167,7 @@ class CoreClient:
|
||||||
elif event.HasField("config_event"):
|
elif event.HasField("config_event"):
|
||||||
logging.info("config event: %s", event)
|
logging.info("config event: %s", event)
|
||||||
elif event.HasField("exception_event"):
|
elif event.HasField("exception_event"):
|
||||||
self.handle_exception_event(event.exception_event)
|
self.handle_exception_event(event)
|
||||||
else:
|
else:
|
||||||
logging.info("unhandled event: %s", event)
|
logging.info("unhandled event: %s", event)
|
||||||
|
|
||||||
|
@ -204,7 +204,7 @@ class CoreClient:
|
||||||
|
|
||||||
def handle_throughputs(self, event):
|
def handle_throughputs(self, event):
|
||||||
if event.session_id != self.session_id:
|
if event.session_id != self.session_id:
|
||||||
logging.warn(
|
logging.warning(
|
||||||
"ignoring throughput event session(%s) current(%s)",
|
"ignoring throughput event session(%s) current(%s)",
|
||||||
event.session_id,
|
event.session_id,
|
||||||
self.session_id,
|
self.session_id,
|
||||||
|
|
|
@ -4,9 +4,7 @@ check engine light
|
||||||
import tkinter as tk
|
import tkinter as tk
|
||||||
from tkinter import ttk
|
from tkinter import ttk
|
||||||
|
|
||||||
from grpc import RpcError
|
from core.api.grpc.core_pb2 import ExceptionLevel
|
||||||
|
|
||||||
from core.api.grpc import core_pb2
|
|
||||||
from core.gui.dialogs.dialog import Dialog
|
from core.gui.dialogs.dialog import Dialog
|
||||||
from core.gui.themes import PADX, PADY
|
from core.gui.themes import PADX, PADY
|
||||||
from core.gui.widgets import CodeText
|
from core.gui.widgets import CodeText
|
||||||
|
@ -18,6 +16,7 @@ class AlertsDialog(Dialog):
|
||||||
self.app = app
|
self.app = app
|
||||||
self.tree = None
|
self.tree = None
|
||||||
self.codetext = None
|
self.codetext = None
|
||||||
|
self.alarm_map = {}
|
||||||
self.draw()
|
self.draw()
|
||||||
|
|
||||||
def draw(self):
|
def draw(self):
|
||||||
|
@ -48,25 +47,31 @@ class AlertsDialog(Dialog):
|
||||||
self.tree.bind("<<TreeviewSelect>>", self.click_select)
|
self.tree.bind("<<TreeviewSelect>>", self.click_select)
|
||||||
|
|
||||||
for alarm in self.app.statusbar.core_alarms:
|
for alarm in self.app.statusbar.core_alarms:
|
||||||
level = self.get_level(alarm.level)
|
exception = alarm.exception_event
|
||||||
self.tree.insert(
|
level_name = ExceptionLevel.Enum.Name(exception.level)
|
||||||
|
insert_id = self.tree.insert(
|
||||||
"",
|
"",
|
||||||
tk.END,
|
tk.END,
|
||||||
text=str(alarm.date),
|
text=exception.date,
|
||||||
values=(
|
values=(
|
||||||
alarm.date,
|
exception.date,
|
||||||
level + " (%s)" % alarm.level,
|
level_name,
|
||||||
alarm.session_id,
|
alarm.session_id,
|
||||||
alarm.node_id,
|
exception.node_id,
|
||||||
alarm.source,
|
exception.source,
|
||||||
),
|
),
|
||||||
tags=(level,),
|
tags=(level_name,),
|
||||||
)
|
)
|
||||||
|
self.alarm_map[insert_id] = alarm
|
||||||
|
|
||||||
self.tree.tag_configure("ERROR", background="#ff6666")
|
error_name = ExceptionLevel.Enum.Name(ExceptionLevel.ERROR)
|
||||||
self.tree.tag_configure("FATAL", background="#d9d9d9")
|
self.tree.tag_configure(error_name, background="#ff6666")
|
||||||
self.tree.tag_configure("WARNING", background="#ffff99")
|
fatal_name = ExceptionLevel.Enum.Name(ExceptionLevel.FATAL)
|
||||||
self.tree.tag_configure("NOTICE", background="#85e085")
|
self.tree.tag_configure(fatal_name, background="#d9d9d9")
|
||||||
|
warning_name = ExceptionLevel.Enum.Name(ExceptionLevel.WARNING)
|
||||||
|
self.tree.tag_configure(warning_name, background="#ffff99")
|
||||||
|
notice_name = ExceptionLevel.Enum.Name(ExceptionLevel.NOTICE)
|
||||||
|
self.tree.tag_configure(notice_name, background="#85e085")
|
||||||
|
|
||||||
yscrollbar = ttk.Scrollbar(frame, orient="vertical", command=self.tree.yview)
|
yscrollbar = ttk.Scrollbar(frame, orient="vertical", command=self.tree.yview)
|
||||||
yscrollbar.grid(row=0, column=1, sticky="ns")
|
yscrollbar.grid(row=0, column=1, sticky="ns")
|
||||||
|
@ -105,40 +110,13 @@ class AlertsDialog(Dialog):
|
||||||
dialog = DaemonLog(self, self.app)
|
dialog = DaemonLog(self, self.app)
|
||||||
dialog.show()
|
dialog.show()
|
||||||
|
|
||||||
def get_level(self, level):
|
|
||||||
if level == core_pb2.ExceptionLevel.ERROR:
|
|
||||||
return "ERROR"
|
|
||||||
if level == core_pb2.ExceptionLevel.FATAL:
|
|
||||||
return "FATAL"
|
|
||||||
if level == core_pb2.ExceptionLevel.WARNING:
|
|
||||||
return "WARNING"
|
|
||||||
if level == core_pb2.ExceptionLevel.NOTICE:
|
|
||||||
return "NOTICE"
|
|
||||||
|
|
||||||
def click_select(self, event):
|
def click_select(self, event):
|
||||||
current = self.tree.selection()
|
current = self.tree.selection()[0]
|
||||||
values = self.tree.item(current)["values"]
|
alarm = self.alarm_map[current]
|
||||||
time = values[0]
|
self.codetext.text.config(state=tk.NORMAL)
|
||||||
level = values[1]
|
|
||||||
session_id = values[2]
|
|
||||||
node_id = values[3]
|
|
||||||
source = values[4]
|
|
||||||
text = "DATE: %s\nLEVEL: %s\nNODE: %s (%s)\nSESSION: %s\nSOURCE: %s\n\n" % (
|
|
||||||
time,
|
|
||||||
level,
|
|
||||||
node_id,
|
|
||||||
self.app.core.canvas_nodes[node_id].core_node.name,
|
|
||||||
session_id,
|
|
||||||
source,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
sid = self.app.core.session_id
|
|
||||||
self.app.core.client.get_node(sid, node_id)
|
|
||||||
text = text + "node created"
|
|
||||||
except RpcError:
|
|
||||||
text = text + "node not created"
|
|
||||||
self.codetext.text.delete("1.0", "end")
|
self.codetext.text.delete("1.0", "end")
|
||||||
self.codetext.text.insert("1.0", text)
|
self.codetext.text.insert("1.0", alarm.exception_event.text)
|
||||||
|
self.codetext.text.config(state=tk.DISABLED)
|
||||||
|
|
||||||
|
|
||||||
class DaemonLog(Dialog):
|
class DaemonLog(Dialog):
|
||||||
|
|
|
@ -380,12 +380,13 @@ class CoreNodeBase(NodeBase):
|
||||||
|
|
||||||
return common
|
return common
|
||||||
|
|
||||||
def cmd(self, args, wait=True):
|
def cmd(self, args, wait=True, shell=False):
|
||||||
"""
|
"""
|
||||||
Runs a command within a node container.
|
Runs a command within a node container.
|
||||||
|
|
||||||
:param str args: command to run
|
:param str args: command to run
|
||||||
:param bool wait: True to wait for status, False otherwise
|
:param bool wait: True to wait for status, False otherwise
|
||||||
|
:param bool shell: True to use shell, False otherwise
|
||||||
:return: combined stdout and stderr
|
:return: combined stdout and stderr
|
||||||
:rtype: str
|
:rtype: str
|
||||||
:raises CoreCommandError: when a non-zero exit status occurs
|
:raises CoreCommandError: when a non-zero exit status occurs
|
||||||
|
@ -561,19 +562,20 @@ class CoreNode(CoreNodeBase):
|
||||||
finally:
|
finally:
|
||||||
self.rmnodedir()
|
self.rmnodedir()
|
||||||
|
|
||||||
def cmd(self, args, wait=True):
|
def cmd(self, args, wait=True, shell=False):
|
||||||
"""
|
"""
|
||||||
Runs a command that is used to configure and setup the network within a
|
Runs a command that is used to configure and setup the network within a
|
||||||
node.
|
node.
|
||||||
|
|
||||||
:param str args: command to run
|
:param str args: command to run
|
||||||
:param bool wait: True to wait for status, False otherwise
|
:param bool wait: True to wait for status, False otherwise
|
||||||
|
:param bool shell: True to use shell, False otherwise
|
||||||
:return: combined stdout and stderr
|
:return: combined stdout and stderr
|
||||||
:rtype: str
|
:rtype: str
|
||||||
:raises CoreCommandError: when a non-zero exit status occurs
|
:raises CoreCommandError: when a non-zero exit status occurs
|
||||||
"""
|
"""
|
||||||
if self.server is None:
|
if self.server is None:
|
||||||
return self.client.check_cmd(args, wait=wait)
|
return self.client.check_cmd(args, wait=wait, shell=shell)
|
||||||
else:
|
else:
|
||||||
args = self.client.create_cmd(args)
|
args = self.client.create_cmd(args)
|
||||||
return self.server.remote_cmd(args, wait=wait)
|
return self.server.remote_cmd(args, wait=wait)
|
||||||
|
|
|
@ -53,16 +53,17 @@ class VnodeClient:
|
||||||
def create_cmd(self, args):
|
def create_cmd(self, args):
|
||||||
return f"{VCMD_BIN} -c {self.ctrlchnlname} -- {args}"
|
return f"{VCMD_BIN} -c {self.ctrlchnlname} -- {args}"
|
||||||
|
|
||||||
def check_cmd(self, args, wait=True):
|
def check_cmd(self, args, wait=True, shell=False):
|
||||||
"""
|
"""
|
||||||
Run command and return exit status and combined stdout and stderr.
|
Run command and return exit status and combined stdout and stderr.
|
||||||
|
|
||||||
:param str args: command to run
|
:param str args: command to run
|
||||||
:param bool wait: True to wait for command status, False otherwise
|
:param bool wait: True to wait for command status, False otherwise
|
||||||
|
:param bool shell: True to use shell, False otherwise
|
||||||
:return: combined stdout and stderr
|
:return: combined stdout and stderr
|
||||||
:rtype: str
|
:rtype: str
|
||||||
:raises core.CoreCommandError: when there is a non-zero exit status
|
:raises core.CoreCommandError: when there is a non-zero exit status
|
||||||
"""
|
"""
|
||||||
self._verify_connection()
|
self._verify_connection()
|
||||||
args = self.create_cmd(args)
|
args = self.create_cmd(args)
|
||||||
return utils.cmd(args, wait=wait)
|
return utils.cmd(args, wait=wait, shell=shell)
|
||||||
|
|
|
@ -43,9 +43,9 @@ class DockerClient:
|
||||||
def stop_container(self):
|
def stop_container(self):
|
||||||
self.run(f"docker rm -f {self.name}")
|
self.run(f"docker rm -f {self.name}")
|
||||||
|
|
||||||
def check_cmd(self, cmd):
|
def check_cmd(self, cmd, wait=True, shell=False):
|
||||||
logging.info("docker cmd output: %s", cmd)
|
logging.info("docker cmd output: %s", cmd)
|
||||||
return utils.cmd(f"docker exec {self.name} {cmd}")
|
return utils.cmd(f"docker exec {self.name} {cmd}", wait=wait, shell=shell)
|
||||||
|
|
||||||
def create_ns_cmd(self, cmd):
|
def create_ns_cmd(self, cmd):
|
||||||
return f"nsenter -t {self.pid} -u -i -p -n {cmd}"
|
return f"nsenter -t {self.pid} -u -i -p -n {cmd}"
|
||||||
|
@ -148,10 +148,10 @@ class DockerNode(CoreNode):
|
||||||
self.client.stop_container()
|
self.client.stop_container()
|
||||||
self.up = False
|
self.up = False
|
||||||
|
|
||||||
def nsenter_cmd(self, args, wait=True):
|
def nsenter_cmd(self, args, wait=True, shell=False):
|
||||||
if self.server is None:
|
if self.server is None:
|
||||||
args = self.client.create_ns_cmd(args)
|
args = self.client.create_ns_cmd(args)
|
||||||
return utils.cmd(args, wait=wait)
|
return utils.cmd(args, wait=wait, shell=shell)
|
||||||
else:
|
else:
|
||||||
args = self.client.create_ns_cmd(args)
|
args = self.client.create_ns_cmd(args)
|
||||||
return self.server.remote_cmd(args, wait=wait)
|
return self.server.remote_cmd(args, wait=wait)
|
||||||
|
|
|
@ -47,9 +47,9 @@ class LxdClient:
|
||||||
def create_ns_cmd(self, cmd):
|
def create_ns_cmd(self, cmd):
|
||||||
return f"nsenter -t {self.pid} -m -u -i -p -n {cmd}"
|
return f"nsenter -t {self.pid} -m -u -i -p -n {cmd}"
|
||||||
|
|
||||||
def check_cmd(self, cmd, wait=True):
|
def check_cmd(self, cmd, wait=True, shell=False):
|
||||||
args = self.create_cmd(cmd)
|
args = self.create_cmd(cmd)
|
||||||
return utils.cmd(args, wait=wait)
|
return utils.cmd(args, wait=wait, shell=shell)
|
||||||
|
|
||||||
def copy_file(self, source, destination):
|
def copy_file(self, source, destination):
|
||||||
if destination[0] != "/":
|
if destination[0] != "/":
|
||||||
|
|
|
@ -126,7 +126,10 @@ class LinuxNetClient:
|
||||||
:param str device: device to flush
|
:param str device: device to flush
|
||||||
:return: nothing
|
:return: nothing
|
||||||
"""
|
"""
|
||||||
self.run(f"{IP_BIN} -6 address flush dev {device}")
|
self.run(
|
||||||
|
f"[ -e /sys/class/net/{device} ] && {IP_BIN} -6 address flush dev {device} || true",
|
||||||
|
shell=True,
|
||||||
|
)
|
||||||
|
|
||||||
def device_mac(self, device, mac):
|
def device_mac(self, device, mac):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -181,7 +181,7 @@ class EbtablesQueue:
|
||||||
|
|
||||||
def ebchange(self, wlan):
|
def ebchange(self, wlan):
|
||||||
"""
|
"""
|
||||||
Flag a change to the given WLAN"s _linked dict, so the ebtables
|
Flag a change to the given WLAN's _linked dict, so the ebtables
|
||||||
chain will be rebuilt at the next interval.
|
chain will be rebuilt at the next interval.
|
||||||
|
|
||||||
:return: nothing
|
:return: nothing
|
||||||
|
@ -197,8 +197,17 @@ class EbtablesQueue:
|
||||||
:return: nothing
|
:return: nothing
|
||||||
"""
|
"""
|
||||||
with wlan._linked_lock:
|
with wlan._linked_lock:
|
||||||
# flush the chain
|
if wlan.has_ebtables_chain:
|
||||||
self.cmds.append(f"-F {wlan.brname}")
|
# flush the chain
|
||||||
|
self.cmds.append(f"-F {wlan.brname}")
|
||||||
|
else:
|
||||||
|
wlan.has_ebtables_chain = True
|
||||||
|
self.cmds.extend(
|
||||||
|
[
|
||||||
|
f"-N {wlan.brname} -P {wlan.policy}",
|
||||||
|
f"-A FORWARD --logical-in {wlan.brname} -j {wlan.brname}",
|
||||||
|
]
|
||||||
|
)
|
||||||
# rebuild the chain
|
# rebuild the chain
|
||||||
for netif1, v in wlan._linked.items():
|
for netif1, v in wlan._linked.items():
|
||||||
for netif2, linked in v.items():
|
for netif2, linked in v.items():
|
||||||
|
@ -297,14 +306,7 @@ class CoreNetwork(CoreNetworkBase):
|
||||||
:raises CoreCommandError: when there is a command exception
|
:raises CoreCommandError: when there is a command exception
|
||||||
"""
|
"""
|
||||||
self.net_client.create_bridge(self.brname)
|
self.net_client.create_bridge(self.brname)
|
||||||
|
self.has_ebtables_chain = False
|
||||||
# create a new ebtables chain for this bridge
|
|
||||||
cmds = [
|
|
||||||
f"{EBTABLES_BIN} -N {self.brname} -P {self.policy}",
|
|
||||||
f"{EBTABLES_BIN} -A FORWARD --logical-in {self.brname} -j {self.brname}",
|
|
||||||
]
|
|
||||||
ebtablescmds(self.host_cmd, cmds)
|
|
||||||
|
|
||||||
self.up = True
|
self.up = True
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
|
@ -320,11 +322,12 @@ class CoreNetwork(CoreNetworkBase):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.net_client.delete_bridge(self.brname)
|
self.net_client.delete_bridge(self.brname)
|
||||||
cmds = [
|
if self.has_ebtables_chain:
|
||||||
f"{EBTABLES_BIN} -D FORWARD --logical-in {self.brname} -j {self.brname}",
|
cmds = [
|
||||||
f"{EBTABLES_BIN} -X {self.brname}",
|
f"{EBTABLES_BIN} -D FORWARD --logical-in {self.brname} -j {self.brname}",
|
||||||
]
|
f"{EBTABLES_BIN} -X {self.brname}",
|
||||||
ebtablescmds(self.host_cmd, cmds)
|
]
|
||||||
|
ebtablescmds(self.host_cmd, cmds)
|
||||||
except CoreCommandError:
|
except CoreCommandError:
|
||||||
logging.exception("error during shutdown")
|
logging.exception("error during shutdown")
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ import time
|
||||||
from core import utils
|
from core import utils
|
||||||
from core.constants import which
|
from core.constants import which
|
||||||
from core.emulator.data import FileData
|
from core.emulator.data import FileData
|
||||||
from core.emulator.enumerations import MessageFlags, RegisterTlvs
|
from core.emulator.enumerations import ExceptionLevels, MessageFlags, RegisterTlvs
|
||||||
from core.errors import CoreCommandError
|
from core.errors import CoreCommandError
|
||||||
|
|
||||||
|
|
||||||
|
@ -628,7 +628,13 @@ class CoreServices:
|
||||||
for args in service.shutdown:
|
for args in service.shutdown:
|
||||||
try:
|
try:
|
||||||
node.cmd(args)
|
node.cmd(args)
|
||||||
except CoreCommandError:
|
except CoreCommandError as e:
|
||||||
|
self.session.exception(
|
||||||
|
ExceptionLevels.ERROR,
|
||||||
|
"services",
|
||||||
|
node.id,
|
||||||
|
f"error stopping service {service.name}: {e.stderr}",
|
||||||
|
)
|
||||||
logging.exception("error running stop command %s", args)
|
logging.exception("error running stop command %s", args)
|
||||||
status = -1
|
status = -1
|
||||||
return status
|
return status
|
||||||
|
|
|
@ -1101,19 +1101,26 @@ class TestGrpc:
|
||||||
client = CoreGrpcClient()
|
client = CoreGrpcClient()
|
||||||
session = grpc_server.coreemu.create_session()
|
session = grpc_server.coreemu.create_session()
|
||||||
queue = Queue()
|
queue = Queue()
|
||||||
|
exception_level = ExceptionLevels.FATAL
|
||||||
|
source = "test"
|
||||||
|
node_id = None
|
||||||
|
text = "exception message"
|
||||||
|
|
||||||
def handle_event(event_data):
|
def handle_event(event_data):
|
||||||
assert event_data.session_id == session.id
|
assert event_data.session_id == session.id
|
||||||
assert event_data.HasField("exception_event")
|
assert event_data.HasField("exception_event")
|
||||||
|
exception_event = event_data.exception_event
|
||||||
|
assert exception_event.level == exception_level.value
|
||||||
|
assert exception_event.node_id == 0
|
||||||
|
assert exception_event.source == source
|
||||||
|
assert exception_event.text == text
|
||||||
queue.put(event_data)
|
queue.put(event_data)
|
||||||
|
|
||||||
# then
|
# then
|
||||||
with client.context_connect():
|
with client.context_connect():
|
||||||
client.events(session.id, handle_event)
|
client.events(session.id, handle_event)
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
session.exception(
|
session.exception(exception_level, source, node_id, text)
|
||||||
ExceptionLevels.FATAL.value, "test", None, "exception message"
|
|
||||||
)
|
|
||||||
|
|
||||||
# then
|
# then
|
||||||
queue.get(timeout=5)
|
queue.get(timeout=5)
|
||||||
|
|
Loading…
Add table
Reference in a new issue