2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
session.py: defines the Session class used by the core-daemon daemon program
|
|
|
|
that manages a CORE session.
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2019-02-16 17:50:19 +00:00
|
|
|
import logging
|
2017-04-25 16:45:34 +01:00
|
|
|
import os
|
2019-04-30 07:31:47 +01:00
|
|
|
import pwd
|
2017-04-25 16:45:34 +01:00
|
|
|
import shutil
|
2016-09-05 22:11:10 +01:00
|
|
|
import subprocess
|
2017-04-25 16:45:34 +01:00
|
|
|
import tempfile
|
|
|
|
import threading
|
|
|
|
import time
|
2020-06-14 06:01:07 +01:00
|
|
|
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type, TypeVar
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2019-09-28 07:29:15 +01:00
|
|
|
from core import constants, utils
|
2020-06-10 04:03:32 +01:00
|
|
|
from core.configservice.manager import ConfigServiceManager
|
2017-04-25 16:45:34 +01:00
|
|
|
from core.emane.emanemanager import EmaneManager
|
2019-09-28 06:31:56 +01:00
|
|
|
from core.emane.nodes import EmaneNet
|
2020-06-10 04:03:32 +01:00
|
|
|
from core.emulator.data import (
|
|
|
|
ConfigData,
|
|
|
|
EventData,
|
|
|
|
ExceptionData,
|
|
|
|
FileData,
|
2020-06-16 20:50:24 +01:00
|
|
|
InterfaceData,
|
2020-06-10 04:03:32 +01:00
|
|
|
LinkData,
|
2020-06-16 20:50:24 +01:00
|
|
|
LinkOptions,
|
2020-06-10 04:03:32 +01:00
|
|
|
NodeData,
|
2020-06-16 20:50:24 +01:00
|
|
|
NodeOptions,
|
2020-06-10 04:03:32 +01:00
|
|
|
)
|
2019-10-17 19:10:59 +01:00
|
|
|
from core.emulator.distributed import DistributedController
|
2020-03-22 23:38:15 +00:00
|
|
|
from core.emulator.enumerations import (
|
|
|
|
EventTypes,
|
|
|
|
ExceptionLevels,
|
|
|
|
LinkTypes,
|
|
|
|
MessageFlags,
|
|
|
|
NodeTypes,
|
|
|
|
)
|
2019-10-30 20:27:12 +00:00
|
|
|
from core.emulator.sessionconfig import SessionConfig
|
2019-09-28 07:29:15 +01:00
|
|
|
from core.errors import CoreError
|
2019-04-30 07:31:47 +01:00
|
|
|
from core.location.event import EventLoop
|
2020-02-21 23:54:55 +00:00
|
|
|
from core.location.geo import GeoLocation
|
2019-11-21 20:29:33 +00:00
|
|
|
from core.location.mobility import BasicRangeModel, MobilityManager
|
2020-01-11 06:37:19 +00:00
|
|
|
from core.nodes.base import CoreNetworkBase, CoreNode, CoreNodeBase, NodeBase
|
2019-09-26 21:00:12 +01:00
|
|
|
from core.nodes.docker import DockerNode
|
2020-06-10 05:03:19 +01:00
|
|
|
from core.nodes.interface import CoreInterface
|
2019-09-26 21:00:12 +01:00
|
|
|
from core.nodes.lxd import LxcNode
|
|
|
|
from core.nodes.network import (
|
|
|
|
CtrlNet,
|
|
|
|
GreTapBridge,
|
|
|
|
HubNode,
|
|
|
|
PtpNet,
|
|
|
|
SwitchNode,
|
|
|
|
TunnelNode,
|
|
|
|
WlanNode,
|
|
|
|
)
|
|
|
|
from core.nodes.physical import PhysicalNode, Rj45Node
|
2019-04-30 07:31:47 +01:00
|
|
|
from core.plugins.sdt import Sdt
|
2020-02-15 00:25:05 +00:00
|
|
|
from core.services.coreservices import CoreServices
|
2019-09-10 22:20:51 +01:00
|
|
|
from core.xml import corexml, corexmldeployment
|
2019-05-02 07:17:46 +01:00
|
|
|
from core.xml.corexml import CoreXmlReader, CoreXmlWriter
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-09-26 21:00:12 +01:00
|
|
|
# maps for converting from API call node type values to classes and vice versa
|
2020-06-14 06:01:07 +01:00
|
|
|
NODES: Dict[NodeTypes, Type[NodeBase]] = {
|
2019-09-26 21:00:12 +01:00
|
|
|
NodeTypes.DEFAULT: CoreNode,
|
|
|
|
NodeTypes.PHYSICAL: PhysicalNode,
|
|
|
|
NodeTypes.SWITCH: SwitchNode,
|
|
|
|
NodeTypes.HUB: HubNode,
|
|
|
|
NodeTypes.WIRELESS_LAN: WlanNode,
|
|
|
|
NodeTypes.RJ45: Rj45Node,
|
|
|
|
NodeTypes.TUNNEL: TunnelNode,
|
2019-09-28 06:31:56 +01:00
|
|
|
NodeTypes.EMANE: EmaneNet,
|
2019-09-26 21:00:12 +01:00
|
|
|
NodeTypes.TAP_BRIDGE: GreTapBridge,
|
|
|
|
NodeTypes.PEER_TO_PEER: PtpNet,
|
|
|
|
NodeTypes.CONTROL_NET: CtrlNet,
|
|
|
|
NodeTypes.DOCKER: DockerNode,
|
|
|
|
NodeTypes.LXC: LxcNode,
|
|
|
|
}
|
2020-06-14 06:01:07 +01:00
|
|
|
NODES_TYPE: Dict[Type[NodeBase], NodeTypes] = {NODES[x]: x for x in NODES}
|
|
|
|
CONTAINER_NODES: Set[Type[NodeBase]] = {DockerNode, LxcNode}
|
|
|
|
CTRL_NET_ID: int = 9001
|
|
|
|
LINK_COLORS: List[str] = ["green", "blue", "orange", "purple", "turquoise"]
|
|
|
|
NT: TypeVar = TypeVar("NT", bound=NodeBase)
|
2019-09-26 21:00:12 +01:00
|
|
|
|
2018-01-04 16:19:34 +00:00
|
|
|
|
2019-10-23 17:31:07 +01:00
|
|
|
class Session:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
CORE session manager.
|
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def __init__(
|
|
|
|
self, _id: int, config: Dict[str, str] = None, mkdir: bool = True
|
|
|
|
) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Create a Session instance.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _id: session id
|
|
|
|
:param config: session configuration
|
|
|
|
:param mkdir: flag to determine if a directory should be made
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
2020-06-10 04:03:32 +01:00
|
|
|
self.id: int = _id
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# define and create session directory when desired
|
2020-06-10 04:03:32 +01:00
|
|
|
self.session_dir: str = os.path.join(tempfile.gettempdir(), f"pycore.{self.id}")
|
2013-10-25 16:21:08 +01:00
|
|
|
if mkdir:
|
2017-04-25 16:45:34 +01:00
|
|
|
os.mkdir(self.session_dir)
|
|
|
|
|
2020-06-10 04:03:32 +01:00
|
|
|
self.name: Optional[str] = None
|
|
|
|
self.file_name: Optional[str] = None
|
|
|
|
self.thumbnail: Optional[str] = None
|
|
|
|
self.user: Optional[str] = None
|
|
|
|
self.event_loop: EventLoop = EventLoop()
|
|
|
|
self.link_colors: Dict[int, str] = {}
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-04-30 07:31:47 +01:00
|
|
|
# dict of nodes: all nodes and nets
|
2020-06-10 04:03:32 +01:00
|
|
|
self.nodes: Dict[int, NodeBase] = {}
|
2020-06-13 04:22:51 +01:00
|
|
|
self.nodes_lock = threading.Lock()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-06-13 04:22:51 +01:00
|
|
|
# states and hooks handlers
|
2020-06-10 04:03:32 +01:00
|
|
|
self.state: EventTypes = EventTypes.DEFINITION_STATE
|
2020-06-13 04:22:51 +01:00
|
|
|
self.state_time: float = time.monotonic()
|
|
|
|
self.hooks: Dict[EventTypes, Tuple[str, str]] = {}
|
|
|
|
self.state_hooks: Dict[EventTypes, List[Callable[[EventTypes], None]]] = {}
|
2019-09-10 23:10:24 +01:00
|
|
|
self.add_state_hook(
|
2020-03-07 06:35:23 +00:00
|
|
|
state=EventTypes.RUNTIME_STATE, hook=self.runtime_state_hook
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2017-08-07 23:37:41 +01:00
|
|
|
# handlers for broadcasting information
|
2020-06-10 04:03:32 +01:00
|
|
|
self.event_handlers: List[Callable[[EventData], None]] = []
|
|
|
|
self.exception_handlers: List[Callable[[ExceptionData], None]] = []
|
|
|
|
self.node_handlers: List[Callable[[NodeData], None]] = []
|
|
|
|
self.link_handlers: List[Callable[[LinkData], None]] = []
|
|
|
|
self.file_handlers: List[Callable[[FileData], None]] = []
|
|
|
|
self.config_handlers: List[Callable[[ConfigData], None]] = []
|
|
|
|
self.shutdown_handlers: List[Callable[[Session], None]] = []
|
2017-08-07 23:37:41 +01:00
|
|
|
|
2018-06-12 16:37:39 +01:00
|
|
|
# session options/metadata
|
2020-06-10 04:03:32 +01:00
|
|
|
self.options: SessionConfig = SessionConfig()
|
2018-06-12 16:37:39 +01:00
|
|
|
if not config:
|
|
|
|
config = {}
|
2019-05-06 05:23:43 +01:00
|
|
|
for key in config:
|
|
|
|
value = config[key]
|
2018-06-13 19:59:50 +01:00
|
|
|
self.options.set_config(key, value)
|
2020-06-10 04:03:32 +01:00
|
|
|
self.metadata: Dict[str, str] = {}
|
2018-06-12 16:37:39 +01:00
|
|
|
|
2019-10-17 19:10:59 +01:00
|
|
|
# distributed support and logic
|
2020-06-10 04:03:32 +01:00
|
|
|
self.distributed: DistributedController = DistributedController(self)
|
2019-10-11 20:57:37 +01:00
|
|
|
|
2018-06-12 16:37:39 +01:00
|
|
|
# initialize session feature helpers
|
2020-06-10 04:03:32 +01:00
|
|
|
self.location: GeoLocation = GeoLocation()
|
|
|
|
self.mobility: MobilityManager = MobilityManager(self)
|
|
|
|
self.services: CoreServices = CoreServices(self)
|
|
|
|
self.emane: EmaneManager = EmaneManager(self)
|
|
|
|
self.sdt: Sdt = Sdt(self)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-18 05:09:51 +00:00
|
|
|
# config services
|
2020-06-10 04:03:32 +01:00
|
|
|
self.service_manager: Optional[ConfigServiceManager] = None
|
2020-01-18 05:09:51 +00:00
|
|
|
|
2019-09-26 21:00:12 +01:00
|
|
|
@classmethod
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_node_class(cls, _type: NodeTypes) -> Type[NodeBase]:
|
2019-09-26 21:00:12 +01:00
|
|
|
"""
|
|
|
|
Retrieve the class for a given node type.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _type: node type to get class for
|
2019-09-26 21:00:12 +01:00
|
|
|
:return: node class
|
|
|
|
"""
|
|
|
|
node_class = NODES.get(_type)
|
|
|
|
if node_class is None:
|
2019-10-18 18:33:31 +01:00
|
|
|
raise CoreError(f"invalid node type: {_type}")
|
2019-09-26 21:00:12 +01:00
|
|
|
return node_class
|
|
|
|
|
|
|
|
@classmethod
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_node_type(cls, _class: Type[NodeBase]) -> NodeTypes:
|
2019-09-26 21:00:12 +01:00
|
|
|
"""
|
|
|
|
Retrieve node type for a given node class.
|
|
|
|
|
|
|
|
:param _class: node class to get a node type for
|
|
|
|
:return: node type
|
2020-01-18 05:12:14 +00:00
|
|
|
:raises CoreError: when node type does not exist
|
2019-09-26 21:00:12 +01:00
|
|
|
"""
|
|
|
|
node_type = NODES_TYPE.get(_class)
|
|
|
|
if node_type is None:
|
2019-10-18 18:33:31 +01:00
|
|
|
raise CoreError(f"invalid node class: {_class}")
|
2019-09-26 21:00:12 +01:00
|
|
|
return node_type
|
|
|
|
|
2020-06-11 21:59:29 +01:00
|
|
|
def _link_wireless(
|
2020-06-13 00:52:41 +01:00
|
|
|
self, node1: CoreNodeBase, node2: CoreNodeBase, connect: bool
|
2020-06-11 21:59:29 +01:00
|
|
|
) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Objects to deal with when connecting/disconnecting wireless links.
|
|
|
|
|
2020-06-13 00:52:41 +01:00
|
|
|
:param node1: node one for wireless link
|
|
|
|
:param node2: node two for wireless link
|
2020-01-16 19:00:57 +00:00
|
|
|
:param connect: link interfaces if True, unlink otherwise
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
2020-01-11 06:37:19 +00:00
|
|
|
:raises core.CoreError: when objects to link is less than 2, or no common
|
|
|
|
networks are found
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2020-06-11 21:59:29 +01:00
|
|
|
logging.info(
|
|
|
|
"handling wireless linking node1(%s) node2(%s): %s",
|
2020-06-13 00:52:41 +01:00
|
|
|
node1.name,
|
|
|
|
node2.name,
|
2020-06-11 21:59:29 +01:00
|
|
|
connect,
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2020-06-13 00:52:41 +01:00
|
|
|
common_networks = node1.commonnets(node1)
|
2019-05-02 07:17:46 +01:00
|
|
|
if not common_networks:
|
2019-09-11 21:12:42 +01:00
|
|
|
raise CoreError("no common network found for wireless link/unlink")
|
2020-06-16 17:30:16 +01:00
|
|
|
for common_network, iface1, iface2 in common_networks:
|
2019-09-28 06:31:56 +01:00
|
|
|
if not isinstance(common_network, (WlanNode, EmaneNet)):
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"skipping common network that is not wireless/emane: %s",
|
|
|
|
common_network,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
continue
|
|
|
|
if connect:
|
2020-06-16 17:30:16 +01:00
|
|
|
common_network.link(iface1, iface2)
|
2019-05-02 07:17:46 +01:00
|
|
|
else:
|
2020-06-16 17:30:16 +01:00
|
|
|
common_network.unlink(iface1, iface2)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
def add_link(
|
|
|
|
self,
|
2020-06-13 00:52:41 +01:00
|
|
|
node1_id: int,
|
|
|
|
node2_id: int,
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1_data: InterfaceData = None,
|
|
|
|
iface2_data: InterfaceData = None,
|
2020-06-09 21:46:26 +01:00
|
|
|
options: LinkOptions = None,
|
2020-06-17 05:53:12 +01:00
|
|
|
link_type: LinkTypes = LinkTypes.WIRED,
|
2020-02-14 21:18:05 +00:00
|
|
|
) -> Tuple[CoreInterface, CoreInterface]:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Add a link between nodes.
|
|
|
|
|
2020-06-13 00:52:41 +01:00
|
|
|
:param node1_id: node one id
|
|
|
|
:param node2_id: node two id
|
2020-06-16 17:30:16 +01:00
|
|
|
:param iface1_data: node one interface
|
2020-01-11 06:37:19 +00:00
|
|
|
data, defaults to none
|
2020-06-16 17:30:16 +01:00
|
|
|
:param iface2_data: node two interface
|
2020-01-11 06:37:19 +00:00
|
|
|
data, defaults to none
|
2020-06-09 21:46:26 +01:00
|
|
|
:param options: data for creating link,
|
2020-01-11 06:37:19 +00:00
|
|
|
defaults to no options
|
2020-06-17 05:53:12 +01:00
|
|
|
:param link_type: type of link to add
|
2020-02-14 21:18:05 +00:00
|
|
|
:return: tuple of created core interfaces, depending on link
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2020-06-09 21:46:26 +01:00
|
|
|
if not options:
|
|
|
|
options = LinkOptions()
|
2020-06-13 00:52:41 +01:00
|
|
|
node1 = self.get_node(node1_id, NodeBase)
|
|
|
|
node2 = self.get_node(node2_id, NodeBase)
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1 = None
|
|
|
|
iface2 = None
|
2020-06-11 21:59:29 +01:00
|
|
|
|
|
|
|
# wireless link
|
2020-06-17 05:53:12 +01:00
|
|
|
if link_type == LinkTypes.WIRELESS:
|
2020-06-11 21:59:29 +01:00
|
|
|
if isinstance(node1, CoreNodeBase) and isinstance(node2, CoreNodeBase):
|
|
|
|
self._link_wireless(node1, node2, connect=True)
|
2019-05-02 07:17:46 +01:00
|
|
|
else:
|
2020-06-11 21:59:29 +01:00
|
|
|
raise CoreError(
|
|
|
|
f"cannot wireless link node1({type(node1)}) node2({type(node2)})"
|
|
|
|
)
|
|
|
|
# wired link
|
|
|
|
else:
|
|
|
|
# peer to peer link
|
|
|
|
if isinstance(node1, CoreNodeBase) and isinstance(node2, CoreNodeBase):
|
|
|
|
logging.info("linking ptp: %s - %s", node1.name, node2.name)
|
|
|
|
start = self.state.should_start()
|
2020-06-14 17:37:58 +01:00
|
|
|
ptp = self.create_node(PtpNet, start)
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1 = node1.new_iface(ptp, iface1_data)
|
|
|
|
iface2 = node2.new_iface(ptp, iface2_data)
|
|
|
|
ptp.linkconfig(iface1, options)
|
2020-06-11 21:59:29 +01:00
|
|
|
if not options.unidirectional:
|
2020-06-16 17:30:16 +01:00
|
|
|
ptp.linkconfig(iface2, options)
|
2020-06-11 21:59:29 +01:00
|
|
|
# link node to net
|
|
|
|
elif isinstance(node1, CoreNodeBase) and isinstance(node2, CoreNetworkBase):
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1 = node1.new_iface(node2, iface1_data)
|
2020-06-11 21:59:29 +01:00
|
|
|
if not isinstance(node2, (EmaneNet, WlanNode)):
|
2020-06-16 17:30:16 +01:00
|
|
|
node2.linkconfig(iface1, options)
|
2020-06-11 21:59:29 +01:00
|
|
|
# link net to node
|
|
|
|
elif isinstance(node2, CoreNodeBase) and isinstance(node1, CoreNetworkBase):
|
2020-06-16 17:30:16 +01:00
|
|
|
iface2 = node2.new_iface(node1, iface2_data)
|
2020-06-11 21:59:29 +01:00
|
|
|
wireless_net = isinstance(node1, (EmaneNet, WlanNode))
|
|
|
|
if not options.unidirectional and not wireless_net:
|
2020-06-16 17:30:16 +01:00
|
|
|
node1.linkconfig(iface2, options)
|
2020-06-11 21:59:29 +01:00
|
|
|
# network to network
|
|
|
|
elif isinstance(node1, CoreNetworkBase) and isinstance(
|
|
|
|
node2, CoreNetworkBase
|
|
|
|
):
|
|
|
|
logging.info(
|
|
|
|
"linking network to network: %s - %s", node1.name, node2.name
|
|
|
|
)
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1 = node1.linknet(node2)
|
|
|
|
node1.linkconfig(iface1, options)
|
2020-06-11 21:59:29 +01:00
|
|
|
if not options.unidirectional:
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1.swapparams("_params_up")
|
|
|
|
node2.linkconfig(iface1, options)
|
|
|
|
iface1.swapparams("_params_up")
|
2020-06-11 21:59:29 +01:00
|
|
|
else:
|
|
|
|
raise CoreError(
|
|
|
|
f"cannot link node1({type(node1)}) node2({type(node2)})"
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-06-11 21:59:29 +01:00
|
|
|
# configure tunnel nodes
|
|
|
|
key = options.key
|
|
|
|
if isinstance(node1, TunnelNode):
|
|
|
|
logging.info("setting tunnel key for: %s", node1.name)
|
2020-06-16 17:30:16 +01:00
|
|
|
node1.setkey(key, iface1_data)
|
2020-06-11 21:59:29 +01:00
|
|
|
if isinstance(node2, TunnelNode):
|
|
|
|
logging.info("setting tunnel key for: %s", node2.name)
|
2020-06-16 17:30:16 +01:00
|
|
|
node2.setkey(key, iface2_data)
|
2020-06-13 00:52:41 +01:00
|
|
|
self.sdt.add_link(node1_id, node2_id)
|
2020-06-16 17:30:16 +01:00
|
|
|
return iface1, iface2
|
2020-02-14 21:18:05 +00:00
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
def delete_link(
|
|
|
|
self,
|
2020-06-13 00:52:41 +01:00
|
|
|
node1_id: int,
|
|
|
|
node2_id: int,
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1_id: int = None,
|
|
|
|
iface2_id: int = None,
|
2020-01-11 06:37:19 +00:00
|
|
|
link_type: LinkTypes = LinkTypes.WIRED,
|
|
|
|
) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Delete a link between nodes.
|
|
|
|
|
2020-06-13 00:52:41 +01:00
|
|
|
:param node1_id: node one id
|
|
|
|
:param node2_id: node two id
|
2020-06-16 17:30:16 +01:00
|
|
|
:param iface1_id: interface id for node one
|
|
|
|
:param iface2_id: interface id for node two
|
2020-01-16 19:00:57 +00:00
|
|
|
:param link_type: link type to delete
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
2019-09-11 21:12:42 +01:00
|
|
|
:raises core.CoreError: when no common network is found for link being deleted
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2020-06-13 00:52:41 +01:00
|
|
|
node1 = self.get_node(node1_id, NodeBase)
|
|
|
|
node2 = self.get_node(node2_id, NodeBase)
|
2020-06-11 21:59:29 +01:00
|
|
|
logging.info(
|
|
|
|
"deleting link(%s) node(%s):interface(%s) node(%s):interface(%s)",
|
|
|
|
link_type.name,
|
|
|
|
node1.name,
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1_id,
|
2020-06-11 21:59:29 +01:00
|
|
|
node2.name,
|
2020-06-16 17:30:16 +01:00
|
|
|
iface2_id,
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-06-11 21:59:29 +01:00
|
|
|
# wireless link
|
|
|
|
if link_type == LinkTypes.WIRELESS:
|
|
|
|
if isinstance(node1, CoreNodeBase) and isinstance(node2, CoreNodeBase):
|
|
|
|
self._link_wireless(node1, node2, connect=False)
|
2019-05-02 07:17:46 +01:00
|
|
|
else:
|
2020-06-11 21:59:29 +01:00
|
|
|
raise CoreError(
|
|
|
|
"cannot delete wireless link "
|
|
|
|
f"node1({type(node1)}) node2({type(node2)})"
|
|
|
|
)
|
|
|
|
# wired link
|
|
|
|
else:
|
|
|
|
if isinstance(node1, CoreNodeBase) and isinstance(node2, CoreNodeBase):
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1 = node1.get_iface(iface1_id)
|
|
|
|
iface2 = node2.get_iface(iface2_id)
|
|
|
|
if iface1.net != iface2.net:
|
2020-06-11 21:59:29 +01:00
|
|
|
raise CoreError(
|
|
|
|
f"node1({node1.name}) node2({node2.name}) "
|
|
|
|
"not connected to same net"
|
|
|
|
)
|
2020-06-16 17:30:16 +01:00
|
|
|
ptp = iface1.net
|
|
|
|
node1.delete_iface(iface1_id)
|
|
|
|
node2.delete_iface(iface2_id)
|
2020-06-11 21:59:29 +01:00
|
|
|
self.delete_node(ptp.id)
|
|
|
|
elif isinstance(node1, CoreNodeBase) and isinstance(node2, CoreNetworkBase):
|
2020-06-16 17:30:16 +01:00
|
|
|
node1.delete_iface(iface1_id)
|
2020-06-11 21:59:29 +01:00
|
|
|
elif isinstance(node2, CoreNodeBase) and isinstance(node1, CoreNetworkBase):
|
2020-06-16 17:30:16 +01:00
|
|
|
node2.delete_iface(iface2_id)
|
2020-06-13 00:52:41 +01:00
|
|
|
self.sdt.delete_link(node1_id, node2_id)
|
2020-02-28 05:39:18 +00:00
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
def update_link(
|
|
|
|
self,
|
2020-06-13 00:52:41 +01:00
|
|
|
node1_id: int,
|
|
|
|
node2_id: int,
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1_id: int = None,
|
|
|
|
iface2_id: int = None,
|
2020-06-09 21:46:26 +01:00
|
|
|
options: LinkOptions = None,
|
2020-06-17 05:53:12 +01:00
|
|
|
link_type: LinkTypes = LinkTypes.WIRED,
|
2020-01-11 06:37:19 +00:00
|
|
|
) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Update link information between nodes.
|
|
|
|
|
2020-06-13 00:52:41 +01:00
|
|
|
:param node1_id: node one id
|
|
|
|
:param node2_id: node two id
|
2020-06-16 17:30:16 +01:00
|
|
|
:param iface1_id: interface id for node one
|
|
|
|
:param iface2_id: interface id for node two
|
2020-06-09 21:46:26 +01:00
|
|
|
:param options: data to update link with
|
2020-06-17 05:53:12 +01:00
|
|
|
:param link_type: type of link to update
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
2020-06-11 21:59:29 +01:00
|
|
|
:raises core.CoreError: when updating a wireless type link, when there is a
|
|
|
|
unknown link between networks
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2020-06-09 21:46:26 +01:00
|
|
|
if not options:
|
|
|
|
options = LinkOptions()
|
2020-06-13 00:52:41 +01:00
|
|
|
node1 = self.get_node(node1_id, NodeBase)
|
|
|
|
node2 = self.get_node(node2_id, NodeBase)
|
2020-06-11 21:59:29 +01:00
|
|
|
logging.info(
|
|
|
|
"update link(%s) node(%s):interface(%s) node(%s):interface(%s)",
|
2020-06-17 05:53:12 +01:00
|
|
|
link_type.name,
|
2020-06-11 21:59:29 +01:00
|
|
|
node1.name,
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1_id,
|
2020-06-11 21:59:29 +01:00
|
|
|
node2.name,
|
2020-06-16 17:30:16 +01:00
|
|
|
iface2_id,
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-06-11 21:59:29 +01:00
|
|
|
# wireless link
|
2020-06-17 05:53:12 +01:00
|
|
|
if link_type == LinkTypes.WIRELESS:
|
2020-06-11 21:59:29 +01:00
|
|
|
raise CoreError("cannot update wireless link")
|
|
|
|
else:
|
|
|
|
if isinstance(node1, CoreNodeBase) and isinstance(node2, CoreNodeBase):
|
2020-06-16 17:30:16 +01:00
|
|
|
iface1 = node1.ifaces.get(iface1_id)
|
|
|
|
iface2 = node2.ifaces.get(iface2_id)
|
|
|
|
if not iface1:
|
2020-06-11 21:59:29 +01:00
|
|
|
raise CoreError(
|
2020-06-16 17:30:16 +01:00
|
|
|
f"node({node1.name}) missing interface({iface1_id})"
|
2020-06-11 21:59:29 +01:00
|
|
|
)
|
2020-06-16 17:30:16 +01:00
|
|
|
if not iface2:
|
2020-06-11 21:59:29 +01:00
|
|
|
raise CoreError(
|
2020-06-16 17:30:16 +01:00
|
|
|
f"node({node2.name}) missing interface({iface2_id})"
|
2020-06-11 21:59:29 +01:00
|
|
|
)
|
2020-06-16 17:30:16 +01:00
|
|
|
if iface1.net != iface2.net:
|
2020-06-11 21:59:29 +01:00
|
|
|
raise CoreError(
|
|
|
|
f"node1({node1.name}) node2({node2.name}) "
|
|
|
|
"not connected to same net"
|
|
|
|
)
|
2020-06-16 17:30:16 +01:00
|
|
|
ptp = iface1.net
|
|
|
|
ptp.linkconfig(iface1, options, iface2)
|
2020-06-11 21:59:29 +01:00
|
|
|
if not options.unidirectional:
|
2020-06-16 17:30:16 +01:00
|
|
|
ptp.linkconfig(iface2, options, iface1)
|
2020-06-11 21:59:29 +01:00
|
|
|
elif isinstance(node1, CoreNodeBase) and isinstance(node2, CoreNetworkBase):
|
2020-06-16 17:30:16 +01:00
|
|
|
iface = node1.get_iface(iface1_id)
|
|
|
|
node2.linkconfig(iface, options)
|
2020-06-11 21:59:29 +01:00
|
|
|
elif isinstance(node2, CoreNodeBase) and isinstance(node1, CoreNetworkBase):
|
2020-06-16 17:30:16 +01:00
|
|
|
iface = node2.get_iface(iface2_id)
|
|
|
|
node1.linkconfig(iface, options)
|
2020-06-11 21:59:29 +01:00
|
|
|
elif isinstance(node1, CoreNetworkBase) and isinstance(
|
|
|
|
node2, CoreNetworkBase
|
|
|
|
):
|
2020-06-16 17:30:16 +01:00
|
|
|
iface = node1.get_linked_iface(node2)
|
2020-06-11 21:59:29 +01:00
|
|
|
upstream = False
|
2020-06-16 17:30:16 +01:00
|
|
|
if not iface:
|
2020-06-11 21:59:29 +01:00
|
|
|
upstream = True
|
2020-06-16 17:30:16 +01:00
|
|
|
iface = node2.get_linked_iface(node1)
|
|
|
|
if not iface:
|
2020-06-11 21:59:29 +01:00
|
|
|
raise CoreError("modify unknown link between nets")
|
|
|
|
if upstream:
|
2020-06-16 17:30:16 +01:00
|
|
|
iface.swapparams("_params_up")
|
|
|
|
node1.linkconfig(iface, options)
|
|
|
|
iface.swapparams("_params_up")
|
2019-05-02 07:17:46 +01:00
|
|
|
else:
|
2020-06-16 17:30:16 +01:00
|
|
|
node1.linkconfig(iface, options)
|
2020-06-11 21:59:29 +01:00
|
|
|
if not options.unidirectional:
|
|
|
|
if upstream:
|
2020-06-16 17:30:16 +01:00
|
|
|
node2.linkconfig(iface, options)
|
2020-06-11 21:59:29 +01:00
|
|
|
else:
|
2020-06-16 17:30:16 +01:00
|
|
|
iface.swapparams("_params_up")
|
|
|
|
node2.linkconfig(iface, options)
|
|
|
|
iface.swapparams("_params_up")
|
2020-06-11 21:59:29 +01:00
|
|
|
else:
|
|
|
|
raise CoreError(
|
|
|
|
f"cannot update link node1({type(node1)}) node2({type(node2)})"
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-06-13 04:22:51 +01:00
|
|
|
def next_node_id(self) -> int:
|
2020-06-09 08:56:34 +01:00
|
|
|
"""
|
|
|
|
Find the next valid node id, starting from 1.
|
|
|
|
|
|
|
|
:return: next node id
|
|
|
|
"""
|
|
|
|
_id = 1
|
|
|
|
while True:
|
|
|
|
if _id not in self.nodes:
|
|
|
|
break
|
|
|
|
_id += 1
|
|
|
|
return _id
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def add_node(
|
2020-05-21 06:14:03 +01:00
|
|
|
self, _class: Type[NT], _id: int = None, options: NodeOptions = None
|
|
|
|
) -> NT:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Add a node to the session, based on the provided node data.
|
|
|
|
|
2020-05-21 06:14:03 +01:00
|
|
|
:param _class: node class to create
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _id: id for node, defaults to None for generated id
|
|
|
|
:param options: data to create node with
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: created node
|
2019-09-26 21:00:12 +01:00
|
|
|
:raises core.CoreError: when an invalid node type is given
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
# set node start based on current session state, override and check when rj45
|
2020-03-07 06:35:23 +00:00
|
|
|
start = self.state.should_start()
|
2019-05-02 07:17:46 +01:00
|
|
|
enable_rj45 = self.options.get_config("enablerj45") == "1"
|
2020-05-21 06:14:03 +01:00
|
|
|
if _class == Rj45Node and not enable_rj45:
|
2019-05-02 07:17:46 +01:00
|
|
|
start = False
|
|
|
|
|
|
|
|
# determine node id
|
|
|
|
if not _id:
|
2020-06-13 04:22:51 +01:00
|
|
|
_id = self.next_node_id()
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# generate name if not provided
|
2019-10-22 23:31:50 +01:00
|
|
|
if not options:
|
|
|
|
options = NodeOptions()
|
2020-04-23 18:26:12 +01:00
|
|
|
options.set_position(0, 0)
|
2019-10-22 23:31:50 +01:00
|
|
|
name = options.name
|
2019-05-02 07:17:46 +01:00
|
|
|
if not name:
|
2020-05-21 06:14:03 +01:00
|
|
|
name = f"{_class.__name__}{_id}"
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-10-07 19:58:27 +01:00
|
|
|
# verify distributed server
|
2019-10-23 04:50:01 +01:00
|
|
|
server = self.distributed.servers.get(options.server)
|
|
|
|
if options.server is not None and server is None:
|
|
|
|
raise CoreError(f"invalid distributed server: {options.server}")
|
2019-10-07 19:58:27 +01:00
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
# create node
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"creating node(%s) id(%s) name(%s) start(%s)",
|
2020-05-21 06:14:03 +01:00
|
|
|
_class.__name__,
|
2019-09-10 23:10:24 +01:00
|
|
|
_id,
|
|
|
|
name,
|
|
|
|
start,
|
|
|
|
)
|
2020-06-14 17:37:58 +01:00
|
|
|
kwargs = dict(_id=_id, name=name, server=server)
|
2020-05-21 06:14:03 +01:00
|
|
|
if _class in CONTAINER_NODES:
|
|
|
|
kwargs["image"] = options.image
|
2020-06-14 17:37:58 +01:00
|
|
|
node = self.create_node(_class, start, **kwargs)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# set node attributes
|
2019-10-22 23:31:50 +01:00
|
|
|
node.icon = options.icon
|
|
|
|
node.canvas = options.canvas
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# set node position and broadcast it
|
2019-10-22 23:31:50 +01:00
|
|
|
self.set_node_position(node, options)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-10-22 21:15:12 +01:00
|
|
|
# add services to needed nodes
|
|
|
|
if isinstance(node, (CoreNode, PhysicalNode, DockerNode, LxcNode)):
|
2019-10-22 23:31:50 +01:00
|
|
|
node.type = options.model
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.debug("set node type: %s", node.type)
|
2019-10-22 23:31:50 +01:00
|
|
|
self.services.add_services(node, node.type, options.services)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-18 05:09:51 +00:00
|
|
|
# add config services
|
2020-01-21 19:58:43 +00:00
|
|
|
logging.info("setting node config services: %s", options.config_services)
|
2020-01-18 05:09:51 +00:00
|
|
|
for name in options.config_services:
|
|
|
|
service_class = self.service_manager.get_service(name)
|
|
|
|
node.add_config_service(service_class)
|
|
|
|
|
2019-11-21 20:29:33 +00:00
|
|
|
# ensure default emane configuration
|
2019-11-22 01:03:18 +00:00
|
|
|
if isinstance(node, EmaneNet) and options.emane:
|
2019-11-21 20:44:50 +00:00
|
|
|
self.emane.set_model_config(_id, options.emane)
|
2019-11-21 20:29:33 +00:00
|
|
|
# set default wlan config if needed
|
2019-11-21 20:44:50 +00:00
|
|
|
if isinstance(node, WlanNode):
|
2019-11-21 20:29:33 +00:00
|
|
|
self.mobility.set_model_config(_id, BasicRangeModel.name)
|
|
|
|
|
2019-10-23 04:50:01 +01:00
|
|
|
# boot nodes after runtime, CoreNodes, Physical, and RJ45 are all nodes
|
2019-09-26 21:00:12 +01:00
|
|
|
is_boot_node = isinstance(node, CoreNodeBase) and not isinstance(node, Rj45Node)
|
2020-03-07 06:35:23 +00:00
|
|
|
if self.state == EventTypes.RUNTIME_STATE and is_boot_node:
|
2019-05-02 07:17:46 +01:00
|
|
|
self.write_nodes()
|
2020-06-16 17:30:16 +01:00
|
|
|
self.add_remove_control_iface(node=node, remove=False)
|
2019-05-02 07:17:46 +01:00
|
|
|
self.services.boot_services(node)
|
|
|
|
|
2020-02-28 05:39:18 +00:00
|
|
|
self.sdt.add_node(node)
|
2019-05-02 07:17:46 +01:00
|
|
|
return node
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def edit_node(self, node_id: int, options: NodeOptions) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-10-23 04:55:06 +01:00
|
|
|
Edit node information.
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_id: id of node to update
|
|
|
|
:param options: data to update node with
|
2020-02-28 05:39:18 +00:00
|
|
|
:return: nothing
|
2020-01-18 05:12:14 +00:00
|
|
|
:raises core.CoreError: when node to update does not exist
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-09-11 21:12:42 +01:00
|
|
|
# get node to update
|
2020-05-20 22:44:34 +01:00
|
|
|
node = self.get_node(node_id, NodeBase)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-09-11 21:12:42 +01:00
|
|
|
# set node position and broadcast it
|
2019-10-22 23:31:50 +01:00
|
|
|
self.set_node_position(node, options)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-09-11 21:12:42 +01:00
|
|
|
# update attributes
|
2019-10-22 23:31:50 +01:00
|
|
|
node.canvas = options.canvas
|
|
|
|
node.icon = options.icon
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-03-02 18:01:36 +00:00
|
|
|
# provide edits to sdt
|
|
|
|
self.sdt.edit_node(node, options.lon, options.lat, options.alt)
|
2020-02-28 05:39:18 +00:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_node_position(self, node: NodeBase, options: NodeOptions) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Set position for a node, use lat/lon/alt if needed.
|
|
|
|
|
|
|
|
:param node: node to set position for
|
2020-01-16 19:00:57 +00:00
|
|
|
:param options: data for node
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
# extract location values
|
2019-10-22 23:31:50 +01:00
|
|
|
x = options.x
|
|
|
|
y = options.y
|
|
|
|
lat = options.lat
|
|
|
|
lon = options.lon
|
|
|
|
alt = options.alt
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# check if we need to generate position from lat/lon/alt
|
|
|
|
has_empty_position = all(i is None for i in [x, y])
|
|
|
|
has_lat_lon_alt = all(i is not None for i in [lat, lon, alt])
|
|
|
|
using_lat_lon_alt = has_empty_position and has_lat_lon_alt
|
|
|
|
if using_lat_lon_alt:
|
|
|
|
x, y, _ = self.location.getxyz(lat, lon, alt)
|
2020-03-22 23:38:15 +00:00
|
|
|
node.setposition(x, y, None)
|
2020-03-19 23:40:43 +00:00
|
|
|
node.position.set_geo(lon, lat, alt)
|
2020-03-22 23:38:15 +00:00
|
|
|
self.broadcast_node(node)
|
2020-04-23 18:26:12 +01:00
|
|
|
elif not has_empty_position:
|
2019-05-02 07:17:46 +01:00
|
|
|
node.setposition(x, y, None)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def start_mobility(self, node_ids: List[int] = None) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Start mobility for the provided node ids.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_ids: nodes to start mobility for
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
self.mobility.startup(node_ids)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def is_active(self) -> bool:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Determine if this session is considered to be active. (Runtime or Data collect states)
|
|
|
|
|
|
|
|
:return: True if active, False otherwise
|
|
|
|
"""
|
2020-03-07 06:35:23 +00:00
|
|
|
result = self.state in {EventTypes.RUNTIME_STATE, EventTypes.DATACOLLECT_STATE}
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("session(%s) checking if active: %s", self.id, result)
|
|
|
|
return result
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def open_xml(self, file_name: str, start: bool = False) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Import a session from the EmulationScript XML format.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param file_name: xml file to load session from
|
|
|
|
:param start: instantiate session if true, false otherwise
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2019-10-22 20:08:55 +01:00
|
|
|
logging.info("opening xml: %s", file_name)
|
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
# clear out existing session
|
|
|
|
self.clear()
|
|
|
|
|
2019-06-24 23:49:12 +01:00
|
|
|
if start:
|
2019-10-22 20:08:55 +01:00
|
|
|
state = EventTypes.CONFIGURATION_STATE
|
|
|
|
else:
|
|
|
|
state = EventTypes.DEFINITION_STATE
|
|
|
|
self.set_state(state)
|
|
|
|
self.name = os.path.basename(file_name)
|
|
|
|
self.file_name = file_name
|
2019-06-24 23:49:12 +01:00
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
# write out xml file
|
|
|
|
CoreXmlReader(self).read(file_name)
|
|
|
|
|
|
|
|
# start session if needed
|
|
|
|
if start:
|
|
|
|
self.instantiate()
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def save_xml(self, file_name: str) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Export a session to the EmulationScript XML format.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param file_name: file name to write session xml to
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
CoreXmlWriter(self).write(file_name)
|
|
|
|
|
2020-03-07 06:35:23 +00:00
|
|
|
def add_hook(
|
2020-05-21 08:20:05 +01:00
|
|
|
self, state: EventTypes, file_name: str, data: str, source_name: str = None
|
2020-03-07 06:35:23 +00:00
|
|
|
) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Store a hook from a received file message.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: when to run hook
|
|
|
|
:param file_name: file name for hook
|
2019-05-02 07:17:46 +01:00
|
|
|
:param data: hook data
|
2020-05-21 08:20:05 +01:00
|
|
|
:param source_name: source name
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2020-03-07 06:35:23 +00:00
|
|
|
logging.info(
|
2020-05-21 08:20:05 +01:00
|
|
|
"setting state hook: %s - %s source(%s)", state, file_name, source_name
|
2020-03-07 06:35:23 +00:00
|
|
|
)
|
|
|
|
hook = file_name, data
|
2020-06-13 04:22:51 +01:00
|
|
|
state_hooks = self.hooks.setdefault(state, [])
|
2020-03-07 06:35:23 +00:00
|
|
|
state_hooks.append(hook)
|
|
|
|
|
|
|
|
# immediately run a hook if it is in the current state
|
|
|
|
if self.state == state:
|
|
|
|
logging.info("immediately running new state hook")
|
|
|
|
self.run_hook(hook)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def add_node_file(
|
|
|
|
self, node_id: int, source_name: str, file_name: str, data: str
|
|
|
|
) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Add a file to a node.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_id: node to add file to
|
|
|
|
:param source_name: source file name
|
|
|
|
:param file_name: file name to add
|
|
|
|
:param data: file data
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2020-05-20 22:44:34 +01:00
|
|
|
node = self.get_node(node_id, CoreNodeBase)
|
2019-05-02 07:17:46 +01:00
|
|
|
if source_name is not None:
|
|
|
|
node.addfile(source_name, file_name)
|
|
|
|
elif data is not None:
|
|
|
|
node.nodefile(file_name, data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def clear(self) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-10-15 22:13:42 +01:00
|
|
|
Clear all CORE session data. (nodes, hooks, etc)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
:return: nothing
|
|
|
|
"""
|
2019-10-24 21:05:02 +01:00
|
|
|
self.emane.shutdown()
|
2019-05-02 07:17:46 +01:00
|
|
|
self.delete_nodes()
|
2019-10-17 19:10:59 +01:00
|
|
|
self.distributed.shutdown()
|
2020-06-13 04:22:51 +01:00
|
|
|
self.hooks.clear()
|
2019-05-02 07:17:46 +01:00
|
|
|
self.emane.reset()
|
2019-10-24 21:05:02 +01:00
|
|
|
self.emane.config_reset()
|
|
|
|
self.location.reset()
|
|
|
|
self.services.reset()
|
|
|
|
self.mobility.config_reset()
|
2020-04-15 23:41:37 +01:00
|
|
|
self.link_colors.clear()
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def start_events(self) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Start event loop.
|
|
|
|
|
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
self.event_loop.run()
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def mobility_event(self, event_data: EventData) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Handle a mobility event.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param event_data: event data to handle
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
self.mobility.handleevent(event_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_location(self, lat: float, lon: float, alt: float, scale: float) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-10-23 05:27:31 +01:00
|
|
|
Set session geospatial location.
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param lat: latitude
|
|
|
|
:param lon: longitude
|
|
|
|
:param alt: altitude
|
|
|
|
:param scale: reference scale
|
2019-10-23 05:27:31 +01:00
|
|
|
:return: nothing
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-10-23 05:27:31 +01:00
|
|
|
self.location.setrefgeo(lat, lon, alt)
|
|
|
|
self.location.refscale = scale
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def shutdown(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Shutdown all session nodes and remove the session directory.
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("session(%s) shutting down", self.id)
|
|
|
|
self.set_state(EventTypes.DATACOLLECT_STATE, send_event=True)
|
|
|
|
self.set_state(EventTypes.SHUTDOWN_STATE, send_event=True)
|
|
|
|
|
2019-10-24 21:05:02 +01:00
|
|
|
# clear out current core session
|
|
|
|
self.clear()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-24 21:05:02 +01:00
|
|
|
# shutdown sdt
|
|
|
|
self.sdt.shutdown()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# remove this sessions working directory
|
2018-06-06 22:51:45 +01:00
|
|
|
preserve = self.options.get_config("preservedir") == "1"
|
2013-08-29 15:21:13 +01:00
|
|
|
if not preserve:
|
2017-04-25 16:45:34 +01:00
|
|
|
shutil.rmtree(self.session_dir, ignore_errors=True)
|
|
|
|
|
2017-05-04 21:49:14 +01:00
|
|
|
# call session shutdown handlers
|
|
|
|
for handler in self.shutdown_handlers:
|
|
|
|
handler(self)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_event(self, event_data: EventData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle event data that should be provided to event handler.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param event_data: event data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
for handler in self.event_handlers:
|
|
|
|
handler(event_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_exception(self, exception_data: ExceptionData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle exception data that should be provided to exception handlers.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param exception_data: exception data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
for handler in self.exception_handlers:
|
|
|
|
handler(exception_data)
|
|
|
|
|
2020-03-22 23:38:15 +00:00
|
|
|
def broadcast_node(
|
|
|
|
self,
|
|
|
|
node: NodeBase,
|
|
|
|
message_type: MessageFlags = MessageFlags.NONE,
|
|
|
|
source: str = None,
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle node data that should be provided to node handlers.
|
|
|
|
|
2020-03-22 23:38:15 +00:00
|
|
|
:param node: node to broadcast
|
|
|
|
:param message_type: type of message to broadcast, None by default
|
|
|
|
:param source: source of broadcast, None by default
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2020-03-22 23:38:15 +00:00
|
|
|
node_data = node.data(message_type, source)
|
|
|
|
if not node_data:
|
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
for handler in self.node_handlers:
|
|
|
|
handler(node_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_file(self, file_data: FileData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle file data that should be provided to file handlers.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param file_data: file data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
for handler in self.file_handlers:
|
|
|
|
handler(file_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_config(self, config_data: ConfigData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle config data that should be provided to config handlers.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param config_data: config data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
for handler in self.config_handlers:
|
|
|
|
handler(config_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_link(self, link_data: LinkData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle link data that should be provided to link handlers.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param link_data: link data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
for handler in self.link_handlers:
|
|
|
|
handler(link_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_state(self, state: EventTypes, send_event: bool = False) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Set the session's current state.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to set to
|
2017-04-25 16:45:34 +01:00
|
|
|
:param send_event: if true, generate core API event messages
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-03-07 06:35:23 +00:00
|
|
|
if self.state == state:
|
2017-05-04 23:24:45 +01:00
|
|
|
return
|
2020-03-07 06:35:23 +00:00
|
|
|
self.state = state
|
2020-06-13 04:22:51 +01:00
|
|
|
self.state_time = time.monotonic()
|
|
|
|
logging.info("changing session(%s) to state %s", self.id, state.name)
|
2020-03-07 06:35:23 +00:00
|
|
|
self.write_state(state)
|
|
|
|
self.run_hooks(state)
|
|
|
|
self.run_state_hooks(state)
|
2017-04-25 16:45:34 +01:00
|
|
|
if send_event:
|
2020-03-07 06:35:23 +00:00
|
|
|
event_data = EventData(event_type=state, time=str(time.monotonic()))
|
2017-04-25 16:45:34 +01:00
|
|
|
self.broadcast_event(event_data)
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-03-07 06:35:23 +00:00
|
|
|
def write_state(self, state: EventTypes) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-03-07 06:35:23 +00:00
|
|
|
Write the state to a state file in the session dir.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to write to file
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
state_file = os.path.join(self.session_dir, "state")
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2020-06-13 04:22:51 +01:00
|
|
|
with open(state_file, "w") as f:
|
|
|
|
f.write(f"{state.value} {state.name}\n")
|
2017-04-25 16:45:34 +01:00
|
|
|
except IOError:
|
2020-03-07 06:35:23 +00:00
|
|
|
logging.exception("error writing state file: %s", state.name)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-03-07 06:35:23 +00:00
|
|
|
def run_hooks(self, state: EventTypes) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-01-11 06:37:19 +00:00
|
|
|
Run hook scripts upon changing states. If hooks is not specified, run all hooks
|
|
|
|
in the given state.
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to run hooks for
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
hooks = self.hooks.get(state, [])
|
|
|
|
for hook in hooks:
|
2017-04-25 16:45:34 +01:00
|
|
|
self.run_hook(hook)
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def run_hook(self, hook: Tuple[str, str]) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Run a hook.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param hook: hook to run
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2017-04-25 16:45:34 +01:00
|
|
|
file_name, data = hook
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.info("running hook %s", file_name)
|
2020-06-13 04:22:51 +01:00
|
|
|
file_path = os.path.join(self.session_dir, file_name)
|
|
|
|
log_path = os.path.join(self.session_dir, f"{file_name}.log")
|
2017-04-25 16:45:34 +01:00
|
|
|
try:
|
2020-06-13 04:22:51 +01:00
|
|
|
with open(file_path, "w") as f:
|
|
|
|
f.write(data)
|
|
|
|
with open(log_path, "w") as f:
|
|
|
|
args = ["/bin/sh", file_name]
|
|
|
|
subprocess.check_call(
|
|
|
|
args,
|
|
|
|
stdout=f,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
close_fds=True,
|
|
|
|
cwd=self.session_dir,
|
|
|
|
env=self.get_environment(),
|
|
|
|
)
|
|
|
|
except (IOError, subprocess.CalledProcessError):
|
|
|
|
logging.exception("error running hook: %s", file_path)
|
2015-02-05 00:15:43 +00:00
|
|
|
|
2020-03-07 06:35:23 +00:00
|
|
|
def run_state_hooks(self, state: EventTypes) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Run state hooks.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to run hooks for
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
for hook in self.state_hooks.get(state, []):
|
|
|
|
self.run_state_hook(state, hook)
|
|
|
|
|
|
|
|
def run_state_hook(self, state: EventTypes, hook: Callable[[EventTypes], None]):
|
|
|
|
try:
|
|
|
|
hook(state)
|
|
|
|
except Exception:
|
|
|
|
message = f"exception occurred when running {state.name} state hook: {hook}"
|
|
|
|
logging.exception(message)
|
|
|
|
self.exception(ExceptionLevels.ERROR, "Session.run_state_hooks", message)
|
2015-02-05 00:15:43 +00:00
|
|
|
|
2020-03-07 06:35:23 +00:00
|
|
|
def add_state_hook(
|
|
|
|
self, state: EventTypes, hook: Callable[[EventTypes], None]
|
|
|
|
) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Add a state hook.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to add hook for
|
|
|
|
:param hook: hook callback for the state
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
hooks = self.state_hooks.setdefault(state, [])
|
2018-10-11 21:28:02 +01:00
|
|
|
if hook in hooks:
|
2019-09-11 21:12:42 +01:00
|
|
|
raise CoreError("attempting to add duplicate state hook")
|
2017-04-25 16:45:34 +01:00
|
|
|
hooks.append(hook)
|
|
|
|
if self.state == state:
|
2020-06-13 04:22:51 +01:00
|
|
|
self.run_state_hook(state, hook)
|
2015-02-05 00:15:43 +00:00
|
|
|
|
2020-06-13 04:22:51 +01:00
|
|
|
def del_state_hook(
|
|
|
|
self, state: EventTypes, hook: Callable[[EventTypes], None]
|
|
|
|
) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Delete a state hook.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to delete hook for
|
|
|
|
:param hook: hook to delete
|
2020-01-11 06:37:19 +00:00
|
|
|
:return: nothing
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
hooks = self.state_hooks.get(state, [])
|
|
|
|
if hook in hooks:
|
|
|
|
hooks.remove(hook)
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-06-13 04:22:51 +01:00
|
|
|
def runtime_state_hook(self, _state: EventTypes) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Runtime state hook check.
|
|
|
|
|
2020-06-13 04:22:51 +01:00
|
|
|
:param _state: state to check
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
self.emane.poststartup()
|
|
|
|
# create session deployed xml
|
|
|
|
xml_file_name = os.path.join(self.session_dir, "session-deployed.xml")
|
|
|
|
xml_writer = corexml.CoreXmlWriter(self)
|
|
|
|
corexmldeployment.CoreXmlDeployment(self, xml_writer.scenario)
|
|
|
|
xml_writer.write(xml_file_name)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_environment(self, state: bool = True) -> Dict[str, str]:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Get an environment suitable for a subprocess.Popen call.
|
|
|
|
This is the current process environment with some session-specific
|
|
|
|
variables.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: flag to determine if session state should be included
|
2019-06-07 00:34:26 +01:00
|
|
|
:return: environment variables
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
env = os.environ.copy()
|
2019-10-18 18:33:31 +01:00
|
|
|
env["SESSION"] = str(self.id)
|
|
|
|
env["SESSION_SHORT"] = self.short_session_id()
|
|
|
|
env["SESSION_DIR"] = self.session_dir
|
|
|
|
env["SESSION_NAME"] = str(self.name)
|
|
|
|
env["SESSION_FILENAME"] = str(self.file_name)
|
|
|
|
env["SESSION_USER"] = str(self.user)
|
2013-08-29 15:21:13 +01:00
|
|
|
if state:
|
2019-10-18 18:33:31 +01:00
|
|
|
env["SESSION_STATE"] = str(self.state)
|
2017-04-25 16:45:34 +01:00
|
|
|
# attempt to read and add environment config file
|
|
|
|
environment_config_file = os.path.join(constants.CORE_CONF_DIR, "environment")
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2017-04-25 16:45:34 +01:00
|
|
|
if os.path.isfile(environment_config_file):
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.load_config(environment_config_file, env)
|
2013-08-29 15:21:13 +01:00
|
|
|
except IOError:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.warning(
|
|
|
|
"environment configuration file does not exist: %s",
|
|
|
|
environment_config_file,
|
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
# attempt to read and add user environment file
|
2013-08-29 15:21:13 +01:00
|
|
|
if self.user:
|
2019-09-10 23:10:24 +01:00
|
|
|
environment_user_file = os.path.join(
|
|
|
|
"/home", self.user, ".core", "environment"
|
|
|
|
)
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.load_config(environment_user_file, env)
|
2013-08-29 15:21:13 +01:00
|
|
|
except IOError:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.debug(
|
|
|
|
"user core environment settings file not present: %s",
|
|
|
|
environment_user_file,
|
|
|
|
)
|
2013-08-29 15:21:13 +01:00
|
|
|
return env
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_thumbnail(self, thumb_file: str) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Set the thumbnail filename. Move files from /tmp to session dir.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param thumb_file: tumbnail file to set for session
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
if not os.path.exists(thumb_file):
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.error("thumbnail file to set does not exist: %s", thumb_file)
|
2013-08-29 15:21:13 +01:00
|
|
|
self.thumbnail = None
|
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
destination_file = os.path.join(self.session_dir, os.path.basename(thumb_file))
|
|
|
|
shutil.copy(thumb_file, destination_file)
|
|
|
|
self.thumbnail = destination_file
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_user(self, user: str) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Set the username for this session. Update the permissions of the
|
|
|
|
session dir to allow the user write access.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param user: user to give write permissions to for the session directory
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
if user:
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
|
|
|
uid = pwd.getpwnam(user).pw_uid
|
2017-04-25 16:45:34 +01:00
|
|
|
gid = os.stat(self.session_dir).st_gid
|
|
|
|
os.chown(self.session_dir, uid, gid)
|
|
|
|
except IOError:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception("failed to set permission on %s", self.session_dir)
|
2013-08-29 15:21:13 +01:00
|
|
|
self.user = user
|
|
|
|
|
2020-06-14 17:37:58 +01:00
|
|
|
def create_node(
|
|
|
|
self, _class: Type[NT], start: bool, *args: Any, **kwargs: Any
|
|
|
|
) -> NT:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-27 06:07:51 +01:00
|
|
|
Create an emulation node.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-05-21 06:14:03 +01:00
|
|
|
:param _class: node class to create
|
2020-06-14 17:37:58 +01:00
|
|
|
:param start: True to start node, False otherwise
|
2020-01-16 19:00:57 +00:00
|
|
|
:param args: list of arguments for the class to create
|
|
|
|
:param kwargs: dictionary of arguments for the class to create
|
2019-04-30 07:31:47 +01:00
|
|
|
:return: the created node instance
|
2019-09-11 21:12:42 +01:00
|
|
|
:raises core.CoreError: when id of the node to create already exists
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
with self.nodes_lock:
|
2020-06-14 07:50:08 +01:00
|
|
|
node = _class(self, *args, **kwargs)
|
2019-04-30 07:31:47 +01:00
|
|
|
if node.id in self.nodes:
|
2019-04-27 06:07:51 +01:00
|
|
|
node.shutdown()
|
2019-10-18 18:33:31 +01:00
|
|
|
raise CoreError(f"duplicate node id {node.id} for {node.name}")
|
2019-04-30 07:31:47 +01:00
|
|
|
self.nodes[node.id] = node
|
2020-06-14 17:37:58 +01:00
|
|
|
if start:
|
|
|
|
node.startup()
|
2019-04-27 06:07:51 +01:00
|
|
|
return node
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-05-20 22:44:34 +01:00
|
|
|
def get_node(self, _id: int, _class: Type[NT]) -> NT:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Get a session node.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _id: node id to retrieve
|
2020-05-20 22:44:34 +01:00
|
|
|
:param _class: expected node class
|
2019-04-30 07:31:47 +01:00
|
|
|
:return: node for the given id
|
2020-01-18 05:12:14 +00:00
|
|
|
:raises core.CoreError: when node does not exist
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
node = self.nodes.get(_id)
|
|
|
|
if node is None:
|
2019-10-18 18:33:31 +01:00
|
|
|
raise CoreError(f"unknown node id {_id}")
|
2020-05-20 22:44:34 +01:00
|
|
|
if not isinstance(node, _class):
|
|
|
|
actual = node.__class__.__name__
|
|
|
|
expected = _class.__name__
|
|
|
|
raise CoreError(f"node class({actual}) is not expected({expected})")
|
|
|
|
return node
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def delete_node(self, _id: int) -> bool:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Delete a node from the session and check if session should shutdown, if no nodes are left.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _id: id of node to delete
|
2019-04-30 07:31:47 +01:00
|
|
|
:return: True if node deleted, False otherwise
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
# delete node and check for session shutdown if a node was removed
|
2019-10-25 05:17:15 +01:00
|
|
|
node = None
|
2020-06-13 04:22:51 +01:00
|
|
|
with self.nodes_lock:
|
2019-04-30 07:31:47 +01:00
|
|
|
if _id in self.nodes:
|
|
|
|
node = self.nodes.pop(_id)
|
2020-06-11 21:59:29 +01:00
|
|
|
logging.info("deleted node(%s)", node.name)
|
2019-10-25 05:17:15 +01:00
|
|
|
if node:
|
|
|
|
node.shutdown()
|
2020-02-28 05:39:18 +00:00
|
|
|
self.sdt.delete_node(_id)
|
2020-04-09 19:42:27 +01:00
|
|
|
self.check_shutdown()
|
2019-10-25 05:17:15 +01:00
|
|
|
return node is not None
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def delete_nodes(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Clear the nodes dictionary, and call shutdown for each node.
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
with self.nodes_lock:
|
2019-10-29 17:25:39 +00:00
|
|
|
funcs = []
|
2019-04-30 07:31:47 +01:00
|
|
|
while self.nodes:
|
|
|
|
_, node = self.nodes.popitem()
|
2020-02-28 05:39:18 +00:00
|
|
|
self.sdt.delete_node(node.id)
|
2019-10-29 17:25:39 +00:00
|
|
|
funcs.append((node.shutdown, [], {}))
|
|
|
|
utils.threadpool(funcs)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def write_nodes(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Write nodes to a 'nodes' file in the session dir.
|
2017-05-03 17:30:49 +01:00
|
|
|
The 'nodes' file lists: number, name, api-type, class-type
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
file_path = os.path.join(self.session_dir, "nodes")
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2020-06-13 04:22:51 +01:00
|
|
|
with self.nodes_lock:
|
2019-04-30 07:31:47 +01:00
|
|
|
with open(file_path, "w") as f:
|
2020-06-13 04:22:51 +01:00
|
|
|
for _id, node in self.nodes.items():
|
2019-10-18 18:33:31 +01:00
|
|
|
f.write(f"{_id} {node.name} {node.apitype} {type(node)}\n")
|
2017-04-25 16:45:34 +01:00
|
|
|
except IOError:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception("error writing nodes file")
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def exception(
|
2020-04-01 00:39:27 +01:00
|
|
|
self, level: ExceptionLevels, source: str, text: str, node_id: int = None
|
2020-01-11 06:37:19 +00:00
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2017-05-03 17:30:49 +01:00
|
|
|
Generate and broadcast an exception event.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param level: exception level
|
|
|
|
:param source: source name
|
|
|
|
:param text: exception message
|
2020-04-01 00:39:27 +01:00
|
|
|
:param node_id: node related to exception
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
exception_data = ExceptionData(
|
2019-04-30 07:31:47 +01:00
|
|
|
node=node_id,
|
2020-03-31 17:41:29 +01:00
|
|
|
session=self.id,
|
2017-04-25 16:45:34 +01:00
|
|
|
level=level,
|
|
|
|
source=source,
|
|
|
|
date=time.ctime(),
|
2019-09-10 23:10:24 +01:00
|
|
|
text=text,
|
2017-04-25 16:45:34 +01:00
|
|
|
)
|
|
|
|
self.broadcast_exception(exception_data)
|
|
|
|
|
2020-02-15 00:25:05 +00:00
|
|
|
def instantiate(self) -> List[Exception]:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
We have entered the instantiation state, invoke startup methods
|
|
|
|
of various managers and boot the nodes. Validate nodes and check
|
|
|
|
for transition to the runtime state.
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
:return: list of service boot errors during startup
|
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
# write current nodes out to session directory file
|
|
|
|
self.write_nodes()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-15 22:13:42 +01:00
|
|
|
# create control net interfaces and network tunnels
|
2019-06-19 18:58:49 +01:00
|
|
|
# which need to exist for emane to sync on location events
|
|
|
|
# in distributed scenarios
|
2020-01-11 06:37:19 +00:00
|
|
|
self.add_remove_control_net(0, remove=False)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-08 23:09:26 +01:00
|
|
|
# initialize distributed tunnels
|
2019-10-17 19:10:59 +01:00
|
|
|
self.distributed.start()
|
2019-10-08 23:09:26 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
# instantiate will be invoked again upon emane configure
|
2013-08-30 19:41:39 +01:00
|
|
|
if self.emane.startup() == self.emane.NOT_READY:
|
2020-01-11 06:37:19 +00:00
|
|
|
return []
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-06-19 18:58:49 +01:00
|
|
|
# boot node services and then start mobility
|
2019-12-20 23:11:34 +00:00
|
|
|
exceptions = self.boot_nodes()
|
|
|
|
if not exceptions:
|
|
|
|
self.mobility.startup()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-12-20 23:11:34 +00:00
|
|
|
# notify listeners that instantiation is complete
|
2020-03-07 06:35:23 +00:00
|
|
|
event = EventData(event_type=EventTypes.INSTANTIATION_COMPLETE)
|
2019-12-20 23:11:34 +00:00
|
|
|
self.broadcast_event(event)
|
2017-07-31 17:08:57 +01:00
|
|
|
|
2019-12-20 23:11:34 +00:00
|
|
|
# assume either all nodes have booted already, or there are some
|
|
|
|
# nodes on slave servers that will be booted and those servers will
|
|
|
|
# send a node status response message
|
|
|
|
self.check_runtime()
|
|
|
|
return exceptions
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_node_count(self) -> int:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Returns the number of CoreNodes and CoreNets, except for those
|
2013-11-25 19:54:02 +00:00
|
|
|
that are not considered in the GUI's node count.
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
:return: created node count
|
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
with self.nodes_lock:
|
2019-05-06 05:23:43 +01:00
|
|
|
count = 0
|
2020-06-13 04:22:51 +01:00
|
|
|
for node in self.nodes.values():
|
2019-09-26 21:00:12 +01:00
|
|
|
is_p2p_ctrlnet = isinstance(node, (PtpNet, CtrlNet))
|
|
|
|
is_tap = isinstance(node, GreTapBridge) and not isinstance(
|
|
|
|
node, TunnelNode
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2019-05-06 05:23:43 +01:00
|
|
|
if is_p2p_ctrlnet or is_tap:
|
|
|
|
continue
|
|
|
|
count += 1
|
2013-11-25 19:54:02 +00:00
|
|
|
return count
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def check_runtime(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Check if we have entered the runtime state, that all nodes have been
|
|
|
|
started and the emulation is running. Start the event loop once we
|
|
|
|
have entered runtime (time=0).
|
2020-01-11 06:37:19 +00:00
|
|
|
|
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
# this is called from instantiate() after receiving an event message
|
2019-10-15 22:13:42 +01:00
|
|
|
# for the instantiation state
|
2019-09-11 23:05:05 +01:00
|
|
|
logging.debug(
|
2019-09-10 23:10:24 +01:00
|
|
|
"session(%s) checking if not in runtime state, current state: %s",
|
|
|
|
self.id,
|
2020-03-07 06:35:23 +00:00
|
|
|
self.state.name,
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2020-03-07 06:35:23 +00:00
|
|
|
if self.state == EventTypes.RUNTIME_STATE:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.info("valid runtime state found, returning")
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
# start event loop and set to runtime
|
|
|
|
self.event_loop.run()
|
2018-04-26 00:33:58 +01:00
|
|
|
self.set_state(EventTypes.RUNTIME_STATE, send_event=True)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def data_collect(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Tear down a running session. Stop the event loop and any running
|
|
|
|
nodes, and perform clean-up.
|
2020-01-11 06:37:19 +00:00
|
|
|
|
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
# stop event loop
|
|
|
|
self.event_loop.stop()
|
|
|
|
|
|
|
|
# stop node services
|
2020-06-13 04:22:51 +01:00
|
|
|
with self.nodes_lock:
|
2019-10-29 17:25:39 +00:00
|
|
|
funcs = []
|
2019-05-06 05:23:43 +01:00
|
|
|
for node_id in self.nodes:
|
|
|
|
node = self.nodes[node_id]
|
2020-02-04 18:13:37 +00:00
|
|
|
if not isinstance(node, CoreNodeBase) or not node.up:
|
|
|
|
continue
|
|
|
|
args = (node,)
|
|
|
|
funcs.append((self.services.stop_services, args, {}))
|
2019-10-29 17:25:39 +00:00
|
|
|
utils.threadpool(funcs)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# shutdown emane
|
2013-08-29 15:21:13 +01:00
|
|
|
self.emane.shutdown()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# update control interface hosts
|
2020-06-16 17:30:16 +01:00
|
|
|
self.update_control_iface_hosts(remove=True)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
# remove all four possible control networks
|
|
|
|
self.add_remove_control_net(0, remove=True)
|
|
|
|
self.add_remove_control_net(1, remove=True)
|
|
|
|
self.add_remove_control_net(2, remove=True)
|
|
|
|
self.add_remove_control_net(3, remove=True)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def check_shutdown(self) -> bool:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Check if we have entered the shutdown state, when no running nodes
|
|
|
|
and links remain.
|
2020-01-11 06:37:19 +00:00
|
|
|
|
|
|
|
:return: True if should shutdown, False otherwise
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
node_count = self.get_node_count()
|
2019-09-11 23:05:05 +01:00
|
|
|
logging.debug(
|
2019-09-10 23:10:24 +01:00
|
|
|
"session(%s) checking shutdown: %s nodes remaining", self.id, node_count
|
|
|
|
)
|
2017-07-28 00:03:27 +01:00
|
|
|
shutdown = False
|
2017-04-25 16:45:34 +01:00
|
|
|
if node_count == 0:
|
2017-07-28 00:03:27 +01:00
|
|
|
shutdown = True
|
2018-04-26 00:33:58 +01:00
|
|
|
self.set_state(EventTypes.SHUTDOWN_STATE)
|
2020-04-09 19:42:27 +01:00
|
|
|
# clearing sdt saved data here for legacy gui
|
|
|
|
self.sdt.shutdown()
|
2017-07-28 00:03:27 +01:00
|
|
|
return shutdown
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def short_session_id(self) -> str:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Return a shorter version of the session ID, appropriate for
|
|
|
|
interface names, where length may be limited.
|
2020-01-11 06:37:19 +00:00
|
|
|
|
|
|
|
:return: short session id
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-08 18:39:36 +01:00
|
|
|
ssid = (self.id >> 8) ^ (self.id & ((1 << 8) - 1))
|
2019-10-18 18:33:31 +01:00
|
|
|
return f"{ssid:x}"
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def boot_node(self, node: CoreNode) -> None:
|
2019-10-29 17:25:39 +00:00
|
|
|
"""
|
|
|
|
Boot node by adding a control interface when necessary and starting
|
|
|
|
node services.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node: node to boot
|
2019-10-29 17:25:39 +00:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
logging.info("booting node(%s): %s", node.name, [x.name for x in node.services])
|
2020-06-16 17:30:16 +01:00
|
|
|
self.add_remove_control_iface(node=node, remove=False)
|
2019-10-29 17:25:39 +00:00
|
|
|
self.services.boot_services(node)
|
2020-01-23 21:22:47 +00:00
|
|
|
node.start_config_services()
|
2019-10-29 17:25:39 +00:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def boot_nodes(self) -> List[Exception]:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Invoke the boot() procedure for all nodes and send back node
|
|
|
|
messages to the GUI for node messages that had the status
|
|
|
|
request flag.
|
2019-12-20 23:11:34 +00:00
|
|
|
|
|
|
|
:return: service boot exceptions
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
with self.nodes_lock:
|
2019-10-29 17:25:39 +00:00
|
|
|
funcs = []
|
|
|
|
start = time.monotonic()
|
2019-05-02 07:17:46 +01:00
|
|
|
for _id in self.nodes:
|
|
|
|
node = self.nodes[_id]
|
2019-09-26 21:00:12 +01:00
|
|
|
if isinstance(node, CoreNodeBase) and not isinstance(node, Rj45Node):
|
2019-10-29 17:25:39 +00:00
|
|
|
args = (node,)
|
|
|
|
funcs.append((self.boot_node, args, {}))
|
|
|
|
results, exceptions = utils.threadpool(funcs)
|
|
|
|
total = time.monotonic() - start
|
|
|
|
logging.debug("boot run time: %s", total)
|
2019-12-20 23:11:34 +00:00
|
|
|
if not exceptions:
|
2020-06-16 17:30:16 +01:00
|
|
|
self.update_control_iface_hosts()
|
2019-12-20 23:11:34 +00:00
|
|
|
return exceptions
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_control_net_prefixes(self) -> List[str]:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Retrieve control net prefixes.
|
|
|
|
|
|
|
|
:return: control net prefix list
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2018-06-06 22:51:45 +01:00
|
|
|
p = self.options.get_config("controlnet")
|
|
|
|
p0 = self.options.get_config("controlnet0")
|
|
|
|
p1 = self.options.get_config("controlnet1")
|
|
|
|
p2 = self.options.get_config("controlnet2")
|
|
|
|
p3 = self.options.get_config("controlnet3")
|
2015-05-22 01:53:43 +01:00
|
|
|
if not p0 and p:
|
|
|
|
p0 = p
|
2017-04-25 16:45:34 +01:00
|
|
|
return [p0, p1, p2, p3]
|
|
|
|
|
2020-06-16 17:30:16 +01:00
|
|
|
def get_control_net_server_ifaces(self) -> List[str]:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Retrieve control net server interfaces.
|
|
|
|
|
|
|
|
:return: list of control net server interfaces
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2018-06-12 16:37:39 +01:00
|
|
|
d0 = self.options.get_config("controlnetif0")
|
2015-05-22 01:53:43 +01:00
|
|
|
if d0:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.error("controlnet0 cannot be assigned with a host interface")
|
2018-06-12 16:37:39 +01:00
|
|
|
d1 = self.options.get_config("controlnetif1")
|
|
|
|
d2 = self.options.get_config("controlnetif2")
|
|
|
|
d3 = self.options.get_config("controlnetif3")
|
2017-04-25 16:45:34 +01:00
|
|
|
return [None, d1, d2, d3]
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_control_net_index(self, dev: str) -> int:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Retrieve control net index.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param dev: device to get control net index for
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: control net index, -1 otherwise
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2017-04-25 16:45:34 +01:00
|
|
|
if dev[0:4] == "ctrl" and int(dev[4]) in [0, 1, 2, 3]:
|
|
|
|
index = int(dev[4])
|
|
|
|
if index == 0:
|
|
|
|
return index
|
2017-05-04 22:43:57 +01:00
|
|
|
if index < 4 and self.get_control_net_prefixes()[index] is not None:
|
2017-04-25 16:45:34 +01:00
|
|
|
return index
|
2015-05-22 01:53:59 +01:00
|
|
|
return -1
|
2015-05-22 01:53:43 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_control_net(self, net_index: int) -> CtrlNet:
|
|
|
|
"""
|
|
|
|
Retrieve a control net based on index.
|
|
|
|
|
|
|
|
:param net_index: control net index
|
|
|
|
:return: control net
|
|
|
|
:raises CoreError: when control net is not found
|
|
|
|
"""
|
2020-05-20 22:44:34 +01:00
|
|
|
return self.get_node(CTRL_NET_ID + net_index, CtrlNet)
|
2015-05-22 01:53:43 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def add_remove_control_net(
|
|
|
|
self, net_index: int, remove: bool = False, conf_required: bool = True
|
|
|
|
) -> Optional[CtrlNet]:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Create a control network bridge as necessary.
|
2013-08-29 18:51:19 +01:00
|
|
|
When the remove flag is True, remove the bridge that connects control
|
2014-09-23 21:24:19 +01:00
|
|
|
interfaces. The conf_reqd flag, when False, causes a control network
|
|
|
|
bridge to be added even if one has not been configured.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param net_index: network index
|
|
|
|
:param remove: flag to check if it should be removed
|
|
|
|
:param conf_required: flag to check if conf is required
|
2019-04-30 07:31:47 +01:00
|
|
|
:return: control net node
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.debug(
|
|
|
|
"add/remove control net: index(%s) remove(%s) conf_required(%s)",
|
|
|
|
net_index,
|
|
|
|
remove,
|
|
|
|
conf_required,
|
|
|
|
)
|
2017-05-04 22:43:57 +01:00
|
|
|
prefix_spec_list = self.get_control_net_prefixes()
|
2017-04-25 16:45:34 +01:00
|
|
|
prefix_spec = prefix_spec_list[net_index]
|
|
|
|
if not prefix_spec:
|
|
|
|
if conf_required:
|
2017-05-03 17:30:49 +01:00
|
|
|
# no controlnet needed
|
|
|
|
return None
|
2014-09-23 17:26:22 +01:00
|
|
|
else:
|
2019-09-26 21:00:12 +01:00
|
|
|
prefix_spec = CtrlNet.DEFAULT_PREFIX_LIST[net_index]
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.debug("prefix spec: %s", prefix_spec)
|
2020-06-16 17:30:16 +01:00
|
|
|
server_iface = self.get_control_net_server_ifaces()[net_index]
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2013-08-29 18:51:19 +01:00
|
|
|
# return any existing controlnet bridge
|
|
|
|
try:
|
2019-04-30 07:31:47 +01:00
|
|
|
control_net = self.get_control_net(net_index)
|
2013-08-29 18:51:19 +01:00
|
|
|
if remove:
|
2019-04-30 07:31:47 +01:00
|
|
|
self.delete_node(control_net.id)
|
2013-08-29 18:51:19 +01:00
|
|
|
return None
|
2017-04-25 16:45:34 +01:00
|
|
|
return control_net
|
2019-09-12 23:48:09 +01:00
|
|
|
except CoreError:
|
2013-08-29 18:51:19 +01:00
|
|
|
if remove:
|
|
|
|
return None
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2013-08-29 18:51:19 +01:00
|
|
|
# build a new controlnet bridge
|
2019-10-21 17:36:07 +01:00
|
|
|
_id = CTRL_NET_ID + net_index
|
2015-05-22 01:53:43 +01:00
|
|
|
|
|
|
|
# use the updown script for control net 0 only.
|
2013-08-29 15:21:13 +01:00
|
|
|
updown_script = None
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
if net_index == 0:
|
2018-06-12 16:37:39 +01:00
|
|
|
updown_script = self.options.get_config("controlnet_updown_script")
|
2017-07-10 18:44:10 +01:00
|
|
|
if not updown_script:
|
2019-09-30 20:58:13 +01:00
|
|
|
logging.debug("controlnet updown script not configured")
|
2017-07-10 18:44:10 +01:00
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
prefixes = prefix_spec.split()
|
|
|
|
if len(prefixes) > 1:
|
|
|
|
# a list of per-host prefixes is provided
|
2019-10-26 06:06:30 +01:00
|
|
|
try:
|
|
|
|
# split first (master) entry into server and prefix
|
|
|
|
prefix = prefixes[0].split(":", 1)[1]
|
|
|
|
except IndexError:
|
|
|
|
# no server name. possibly only one server
|
|
|
|
prefix = prefixes[0]
|
2017-05-03 17:30:49 +01:00
|
|
|
else:
|
2015-05-22 01:53:43 +01:00
|
|
|
prefix = prefixes[0]
|
|
|
|
|
2019-10-09 05:06:22 +01:00
|
|
|
logging.info(
|
2019-10-26 06:06:30 +01:00
|
|
|
"controlnet(%s) prefix(%s) updown(%s) serverintf(%s)",
|
2019-10-09 05:06:22 +01:00
|
|
|
_id,
|
|
|
|
prefix,
|
|
|
|
updown_script,
|
2020-06-16 17:30:16 +01:00
|
|
|
server_iface,
|
2019-10-09 05:06:22 +01:00
|
|
|
)
|
2019-09-10 23:10:24 +01:00
|
|
|
control_net = self.create_node(
|
2020-05-25 19:17:28 +01:00
|
|
|
CtrlNet,
|
2020-06-14 17:37:58 +01:00
|
|
|
True,
|
2020-05-25 19:17:28 +01:00
|
|
|
prefix,
|
2019-09-10 23:10:24 +01:00
|
|
|
_id=_id,
|
|
|
|
updown_script=updown_script,
|
2020-06-16 17:30:16 +01:00
|
|
|
serverintf=server_iface,
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
return control_net
|
|
|
|
|
2020-06-16 17:30:16 +01:00
|
|
|
def add_remove_control_iface(
|
2020-01-11 06:37:19 +00:00
|
|
|
self,
|
|
|
|
node: CoreNode,
|
|
|
|
net_index: int = 0,
|
|
|
|
remove: bool = False,
|
|
|
|
conf_required: bool = True,
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Add a control interface to a node when a 'controlnet' prefix is
|
|
|
|
listed in the config file or session options. Uses
|
|
|
|
addremovectrlnet() to build or remove the control bridge.
|
|
|
|
If conf_reqd is False, the control network may be built even
|
|
|
|
when the user has not configured one (e.g. for EMANE.)
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node: node to add or remove control interface
|
|
|
|
:param net_index: network index
|
|
|
|
:param remove: flag to check if it should be removed
|
|
|
|
:param conf_required: flag to check if conf is required
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
control_net = self.add_remove_control_net(net_index, remove, conf_required)
|
|
|
|
if not control_net:
|
2013-08-29 18:51:19 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
if not node:
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-05-03 17:30:49 +01:00
|
|
|
# ctrl# already exists
|
2020-06-16 17:30:16 +01:00
|
|
|
if node.ifaces.get(control_net.CTRLIF_IDX_BASE + net_index):
|
2017-05-03 17:30:49 +01:00
|
|
|
return
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2020-06-09 20:42:15 +01:00
|
|
|
ip4 = control_net.prefix[node.id]
|
|
|
|
ip4_mask = control_net.prefix.prefixlen
|
2020-06-16 17:30:16 +01:00
|
|
|
iface_data = InterfaceData(
|
2020-06-09 20:42:15 +01:00
|
|
|
id=control_net.CTRLIF_IDX_BASE + net_index,
|
|
|
|
name=f"ctrl{net_index}",
|
|
|
|
mac=utils.random_mac(),
|
|
|
|
ip4=ip4,
|
|
|
|
ip4_mask=ip4_mask,
|
|
|
|
)
|
2020-06-16 17:30:16 +01:00
|
|
|
iface = node.new_iface(control_net, iface_data)
|
|
|
|
iface.control = True
|
2013-08-29 15:21:13 +01:00
|
|
|
except ValueError:
|
2019-10-18 18:33:31 +01:00
|
|
|
msg = f"Control interface not added to node {node.id}. "
|
|
|
|
msg += f"Invalid control network prefix ({control_net.prefix}). "
|
2013-08-29 15:21:13 +01:00
|
|
|
msg += "A longer prefix length may be required for this many nodes."
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception(msg)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-06-16 17:30:16 +01:00
|
|
|
def update_control_iface_hosts(
|
2020-01-11 06:37:19 +00:00
|
|
|
self, net_index: int = 0, remove: bool = False
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Add the IP addresses of control interfaces to the /etc/hosts file.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param net_index: network index to update
|
|
|
|
:param remove: flag to check if it should be removed
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2018-06-12 16:37:39 +01:00
|
|
|
if not self.options.get_config_bool("update_etc_hosts", default=False):
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2019-04-30 07:31:47 +01:00
|
|
|
control_net = self.get_control_net(net_index)
|
2019-09-12 23:48:09 +01:00
|
|
|
except CoreError:
|
2019-04-30 07:31:47 +01:00
|
|
|
logging.exception("error retrieving control net node")
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-18 18:33:31 +01:00
|
|
|
header = f"CORE session {self.id} host entries"
|
2013-08-29 15:21:13 +01:00
|
|
|
if remove:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.info("Removing /etc/hosts file entries.")
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.file_demunge("/etc/hosts", header)
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
entries = []
|
2020-06-16 17:30:16 +01:00
|
|
|
for iface in control_net.get_ifaces():
|
|
|
|
name = iface.node.name
|
|
|
|
for address in iface.addrlist:
|
2019-10-18 18:33:31 +01:00
|
|
|
address = address.split("/")[0]
|
|
|
|
entries.append(f"{address} {name}")
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-18 18:33:31 +01:00
|
|
|
logging.info("Adding %d /etc/hosts file entries.", len(entries))
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.file_munge("/etc/hosts", header, "\n".join(entries) + "\n")
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def runtime(self) -> float:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Return the current time we have been in the runtime state, or zero
|
|
|
|
if not in runtime.
|
|
|
|
"""
|
2020-03-22 21:59:42 +00:00
|
|
|
if self.state == EventTypes.RUNTIME_STATE:
|
2020-06-13 04:22:51 +01:00
|
|
|
return time.monotonic() - self.state_time
|
2013-08-29 15:21:13 +01:00
|
|
|
else:
|
|
|
|
return 0.0
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def add_event(
|
2020-06-14 05:48:51 +01:00
|
|
|
self, event_time: float, node_id: int = None, name: str = None, data: str = None
|
2020-01-11 06:37:19 +00:00
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Add an event to the event queue, with a start time relative to the
|
|
|
|
start of the runtime state.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
|
|
|
:param event_time: event time
|
2020-06-14 05:48:51 +01:00
|
|
|
:param node_id: node to add event for
|
2020-01-16 19:00:57 +00:00
|
|
|
:param name: name of event
|
2017-05-03 17:30:49 +01:00
|
|
|
:param data: data for event
|
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
current_time = self.runtime()
|
2019-05-02 07:17:46 +01:00
|
|
|
if current_time > 0:
|
|
|
|
if event_time <= current_time:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.warning(
|
|
|
|
"could not schedule past event for time %s (run time is now %s)",
|
|
|
|
event_time,
|
|
|
|
current_time,
|
|
|
|
)
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
event_time = event_time - current_time
|
2019-09-10 23:10:24 +01:00
|
|
|
self.event_loop.add_event(
|
2020-06-14 05:48:51 +01:00
|
|
|
event_time, self.run_event, node_id=node_id, name=name, data=data
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
if not name:
|
2013-08-29 15:21:13 +01:00
|
|
|
name = ""
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"scheduled event %s at time %s data=%s",
|
|
|
|
name,
|
|
|
|
event_time + current_time,
|
|
|
|
data,
|
|
|
|
)
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def run_event(
|
|
|
|
self, node_id: int = None, name: str = None, data: str = None
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Run a scheduled event, executing commands in the data string.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_id: node id to run event
|
|
|
|
:param name: event name
|
|
|
|
:param data: event data
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-06-13 04:22:51 +01:00
|
|
|
if data is None:
|
|
|
|
logging.warning("no data for event node(%s) name(%s)", node_id, name)
|
|
|
|
return
|
2013-08-29 15:21:13 +01:00
|
|
|
now = self.runtime()
|
2017-04-25 16:45:34 +01:00
|
|
|
if not name:
|
2013-08-29 15:21:13 +01:00
|
|
|
name = ""
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("running event %s at time %s cmd=%s", name, now, data)
|
2017-04-25 16:45:34 +01:00
|
|
|
if not node_id:
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.mute_detach(data)
|
2013-08-29 15:21:13 +01:00
|
|
|
else:
|
2020-05-20 22:44:34 +01:00
|
|
|
node = self.get_node(node_id, CoreNodeBase)
|
2019-10-19 07:28:09 +01:00
|
|
|
node.cmd(data, wait=False)
|
2020-04-15 23:41:37 +01:00
|
|
|
|
|
|
|
def get_link_color(self, network_id: int) -> str:
|
|
|
|
"""
|
|
|
|
Assign a color for links associated with a network.
|
|
|
|
|
|
|
|
:param network_id: network to get a link color for
|
|
|
|
:return: link color
|
|
|
|
"""
|
|
|
|
color = self.link_colors.get(network_id)
|
|
|
|
if not color:
|
|
|
|
index = len(self.link_colors) % len(LINK_COLORS)
|
|
|
|
color = LINK_COLORS[index]
|
|
|
|
self.link_colors[network_id] = color
|
|
|
|
return color
|