2015-01-14 03:26:32 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
|
|
|
# Copyright (C) 2015 GNS3 Technologies Inc.
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2015-01-23 03:04:24 +02:00
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import struct
|
|
|
|
import stat
|
2015-01-14 03:26:32 +02:00
|
|
|
import asyncio
|
2019-03-06 18:00:01 +02:00
|
|
|
import aiofiles
|
2018-01-29 11:18:13 +02:00
|
|
|
|
2015-01-14 03:26:32 +02:00
|
|
|
import aiohttp
|
2015-01-23 03:04:24 +02:00
|
|
|
import socket
|
2015-02-09 03:10:04 +02:00
|
|
|
import shutil
|
2015-11-12 16:37:34 +02:00
|
|
|
import re
|
2015-01-23 03:04:24 +02:00
|
|
|
|
|
|
|
import logging
|
2018-01-29 11:18:13 +02:00
|
|
|
|
|
|
|
from gns3server.utils.asyncio import cancellable_wait_run_in_executor
|
|
|
|
|
2015-01-23 03:04:24 +02:00
|
|
|
log = logging.getLogger(__name__)
|
2015-01-14 03:26:32 +02:00
|
|
|
|
2015-01-20 03:30:57 +02:00
|
|
|
from uuid import UUID, uuid4
|
2015-06-07 22:51:33 +03:00
|
|
|
from gns3server.utils.interfaces import is_interface_up
|
2015-01-21 04:02:22 +02:00
|
|
|
from ..config import Config
|
2015-02-09 03:10:04 +02:00
|
|
|
from ..utils.asyncio import wait_run_in_executor
|
2015-10-05 21:12:20 +03:00
|
|
|
from ..utils import force_unix_path
|
2015-01-20 13:46:15 +02:00
|
|
|
from .project_manager import ProjectManager
|
2016-10-24 22:39:35 +03:00
|
|
|
from .port_manager import PortManager
|
2015-01-14 03:26:32 +02:00
|
|
|
|
2015-02-24 04:00:34 +02:00
|
|
|
from .nios.nio_udp import NIOUDP
|
|
|
|
from .nios.nio_tap import NIOTAP
|
2016-06-02 02:50:31 +03:00
|
|
|
from .nios.nio_ethernet import NIOEthernet
|
2016-11-28 20:49:50 +02:00
|
|
|
from ..utils.images import md5sum, remove_checksum, images_directories, default_images_directory, list_images
|
2016-06-07 16:34:04 +03:00
|
|
|
from .error import NodeError, ImageMissingError
|
2015-01-23 03:04:24 +02:00
|
|
|
|
2019-03-06 18:00:01 +02:00
|
|
|
CHUNK_SIZE = 1024 * 8 # 8KB
|
|
|
|
|
2015-01-14 03:26:32 +02:00
|
|
|
|
2015-01-14 19:52:02 +02:00
|
|
|
class BaseManager:
|
2015-01-20 14:24:00 +02:00
|
|
|
|
2015-01-14 03:26:32 +02:00
|
|
|
"""
|
2015-04-08 20:17:34 +03:00
|
|
|
Base class for all Manager classes.
|
2016-05-11 20:35:36 +03:00
|
|
|
Responsible of management of a node pool of the same type.
|
2015-01-14 03:26:32 +02:00
|
|
|
"""
|
|
|
|
|
2015-02-28 07:12:43 +02:00
|
|
|
_convert_lock = None
|
2015-02-26 03:55:35 +02:00
|
|
|
|
2015-01-14 03:26:32 +02:00
|
|
|
def __init__(self):
|
2015-01-19 23:43:35 +02:00
|
|
|
|
2015-02-28 07:12:43 +02:00
|
|
|
BaseManager._convert_lock = asyncio.Lock()
|
2016-05-11 20:35:36 +03:00
|
|
|
self._nodes = {}
|
2015-01-19 23:43:35 +02:00
|
|
|
self._port_manager = None
|
2015-01-21 04:02:22 +02:00
|
|
|
self._config = Config.instance()
|
2015-01-14 03:26:32 +02:00
|
|
|
|
2016-08-29 16:53:10 +03:00
|
|
|
@classmethod
|
|
|
|
def node_types(cls):
|
|
|
|
"""
|
|
|
|
:returns: Array of supported node type on this computer
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-08-29 16:53:10 +03:00
|
|
|
# By default we transform DockerVM => docker but you can override this (see builtins)
|
|
|
|
return [cls._NODE_CLASS.__name__.rstrip('VM').lower()]
|
|
|
|
|
2016-08-16 20:41:59 +03:00
|
|
|
@property
|
|
|
|
def nodes(self):
|
|
|
|
"""
|
|
|
|
List of nodes manage by the module
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-08-16 20:41:59 +03:00
|
|
|
return self._nodes.values()
|
|
|
|
|
2015-01-14 03:26:32 +02:00
|
|
|
@classmethod
|
|
|
|
def instance(cls):
|
|
|
|
"""
|
2015-01-19 00:41:53 +02:00
|
|
|
Singleton to return only one instance of BaseManager.
|
2015-01-14 03:26:32 +02:00
|
|
|
|
2015-01-19 23:43:35 +02:00
|
|
|
:returns: instance of BaseManager
|
2015-01-14 03:26:32 +02:00
|
|
|
"""
|
|
|
|
|
2015-01-16 18:09:45 +02:00
|
|
|
if not hasattr(cls, "_instance") or cls._instance is None:
|
2015-01-14 03:26:32 +02:00
|
|
|
cls._instance = cls()
|
|
|
|
return cls._instance
|
|
|
|
|
2015-01-22 00:21:15 +02:00
|
|
|
@property
|
|
|
|
def module_name(self):
|
|
|
|
"""
|
|
|
|
Returns the module name.
|
|
|
|
|
|
|
|
:returns: module name
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self.__class__.__name__
|
|
|
|
|
2015-01-19 12:22:24 +02:00
|
|
|
@property
|
|
|
|
def port_manager(self):
|
|
|
|
"""
|
2015-01-21 04:02:22 +02:00
|
|
|
Returns the port manager.
|
2015-01-19 12:22:24 +02:00
|
|
|
|
|
|
|
:returns: Port manager
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-10-24 22:39:35 +03:00
|
|
|
if self._port_manager is None:
|
|
|
|
self._port_manager = PortManager.instance()
|
2015-01-19 12:22:24 +02:00
|
|
|
return self._port_manager
|
|
|
|
|
|
|
|
@port_manager.setter
|
|
|
|
def port_manager(self, new_port_manager):
|
|
|
|
|
2015-01-19 23:43:35 +02:00
|
|
|
self._port_manager = new_port_manager
|
2015-01-19 12:22:24 +02:00
|
|
|
|
2015-01-21 04:02:22 +02:00
|
|
|
@property
|
|
|
|
def config(self):
|
|
|
|
"""
|
|
|
|
Returns the server config.
|
|
|
|
|
|
|
|
:returns: Config
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self._config
|
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def unload(self):
|
2015-01-23 08:40:51 +02:00
|
|
|
|
2015-02-05 23:24:06 +02:00
|
|
|
tasks = []
|
2016-05-11 20:35:36 +03:00
|
|
|
for node_id in self._nodes.keys():
|
2018-10-15 13:05:49 +03:00
|
|
|
tasks.append(asyncio.ensure_future(self.close_node(node_id)))
|
2015-02-05 23:24:06 +02:00
|
|
|
|
|
|
|
if tasks:
|
2018-10-15 13:05:49 +03:00
|
|
|
done, _ = await asyncio.wait(tasks)
|
2015-02-05 23:24:06 +02:00
|
|
|
for future in done:
|
|
|
|
try:
|
|
|
|
future.result()
|
2015-07-26 01:46:23 +03:00
|
|
|
except (Exception, GeneratorExit) as e:
|
2016-05-11 20:35:36 +03:00
|
|
|
log.error("Could not close node {}".format(e), exc_info=1)
|
2015-02-05 23:24:06 +02:00
|
|
|
continue
|
2015-01-20 03:30:57 +02:00
|
|
|
|
2015-01-23 08:40:51 +02:00
|
|
|
if hasattr(BaseManager, "_instance"):
|
|
|
|
BaseManager._instance = None
|
2015-01-23 22:01:23 +02:00
|
|
|
log.debug("Module {} unloaded".format(self.module_name))
|
2015-01-14 03:26:32 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
def get_node(self, node_id, project_id=None):
|
2015-01-14 03:26:32 +02:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
Returns a Node instance.
|
2015-01-14 03:26:32 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:param node_id: Node identifier
|
2015-02-05 02:13:35 +02:00
|
|
|
:param project_id: Project identifier
|
2015-01-14 03:26:32 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:returns: Node instance
|
2015-01-14 03:26:32 +02:00
|
|
|
"""
|
|
|
|
|
2015-02-05 02:13:35 +02:00
|
|
|
if project_id:
|
|
|
|
# check the project_id exists
|
|
|
|
project = ProjectManager.instance().get_project(project_id)
|
|
|
|
|
2015-01-20 03:30:57 +02:00
|
|
|
try:
|
2016-05-11 20:35:36 +03:00
|
|
|
UUID(node_id, version=4)
|
2015-01-20 03:30:57 +02:00
|
|
|
except ValueError:
|
2016-05-11 20:35:36 +03:00
|
|
|
raise aiohttp.web.HTTPBadRequest(text="Node ID {} is not a valid UUID".format(node_id))
|
2015-01-20 03:30:57 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
if node_id not in self._nodes:
|
|
|
|
raise aiohttp.web.HTTPNotFound(text="Node ID {} doesn't exist".format(node_id))
|
2015-02-05 02:13:35 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
node = self._nodes[node_id]
|
2015-02-05 02:13:35 +02:00
|
|
|
if project_id:
|
2016-05-11 20:35:36 +03:00
|
|
|
if node.project.id != project.id:
|
|
|
|
raise aiohttp.web.HTTPNotFound(text="Project ID {} doesn't belong to node {}".format(project_id, node.name))
|
2015-02-05 02:13:35 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
return node
|
2015-01-14 03:26:32 +02:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def convert_old_project(self, project, legacy_id, name):
|
2015-02-26 03:55:35 +02:00
|
|
|
"""
|
2015-03-01 22:05:51 +02:00
|
|
|
Convert projects made before version 1.3
|
2015-02-26 03:55:35 +02:00
|
|
|
|
|
|
|
:param project: Project instance
|
|
|
|
:param legacy_id: old identifier
|
2015-03-01 22:05:51 +02:00
|
|
|
:param name: node name
|
2015-02-26 03:55:35 +02:00
|
|
|
|
2015-03-01 22:05:51 +02:00
|
|
|
:returns: new identifier
|
2015-02-26 03:55:35 +02:00
|
|
|
"""
|
|
|
|
|
2015-03-01 22:05:51 +02:00
|
|
|
new_id = str(uuid4())
|
2015-03-09 03:13:01 +02:00
|
|
|
legacy_project_files_path = os.path.join(project.path, "{}-files".format(project.name))
|
2015-03-01 03:55:53 +02:00
|
|
|
new_project_files_path = os.path.join(project.path, "project-files")
|
|
|
|
if os.path.exists(legacy_project_files_path) and not os.path.exists(new_project_files_path):
|
|
|
|
# move the project files
|
2015-03-01 22:05:51 +02:00
|
|
|
log.info("Converting old project...")
|
2015-03-01 03:55:53 +02:00
|
|
|
try:
|
|
|
|
log.info('Moving "{}" to "{}"'.format(legacy_project_files_path, new_project_files_path))
|
2018-10-15 13:05:49 +03:00
|
|
|
await wait_run_in_executor(shutil.move, legacy_project_files_path, new_project_files_path)
|
2015-03-01 03:55:53 +02:00
|
|
|
except OSError as e:
|
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not move project files directory: {} to {} {}".format(legacy_project_files_path,
|
2015-03-01 22:05:51 +02:00
|
|
|
new_project_files_path, e))
|
2015-03-01 03:55:53 +02:00
|
|
|
|
2015-03-09 03:13:01 +02:00
|
|
|
if project.is_local() is False:
|
2015-03-10 05:46:23 +02:00
|
|
|
legacy_remote_project_path = os.path.join(project.location, project.name, self.module_name.lower())
|
|
|
|
new_remote_project_path = os.path.join(project.path, "project-files", self.module_name.lower())
|
|
|
|
if os.path.exists(legacy_remote_project_path) and not os.path.exists(new_remote_project_path):
|
|
|
|
# move the legacy remote project (remote servers only)
|
|
|
|
log.info("Converting old remote project...")
|
2015-03-09 03:13:01 +02:00
|
|
|
try:
|
2015-03-10 05:46:23 +02:00
|
|
|
log.info('Moving "{}" to "{}"'.format(legacy_remote_project_path, new_remote_project_path))
|
2018-10-15 13:05:49 +03:00
|
|
|
await wait_run_in_executor(shutil.move, legacy_remote_project_path, new_remote_project_path)
|
2015-03-09 03:13:01 +02:00
|
|
|
except OSError as e:
|
2015-03-10 05:46:23 +02:00
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not move directory: {} to {} {}".format(legacy_remote_project_path,
|
|
|
|
new_remote_project_path, e))
|
2015-03-09 01:45:29 +02:00
|
|
|
|
2015-02-26 03:55:35 +02:00
|
|
|
if hasattr(self, "get_legacy_vm_workdir"):
|
2016-05-11 20:35:36 +03:00
|
|
|
# rename old project node working dir
|
|
|
|
log.info("Converting old node working directory...")
|
2015-02-26 03:55:35 +02:00
|
|
|
legacy_vm_dir = self.get_legacy_vm_workdir(legacy_id, name)
|
2015-03-01 03:55:53 +02:00
|
|
|
legacy_vm_working_path = os.path.join(new_project_files_path, legacy_vm_dir)
|
2015-03-01 22:05:51 +02:00
|
|
|
new_vm_working_path = os.path.join(new_project_files_path, self.module_name.lower(), new_id)
|
2015-03-02 22:04:30 +02:00
|
|
|
if os.path.exists(legacy_vm_working_path) and not os.path.exists(new_vm_working_path):
|
|
|
|
try:
|
|
|
|
log.info('Moving "{}" to "{}"'.format(legacy_vm_working_path, new_vm_working_path))
|
2018-10-15 13:05:49 +03:00
|
|
|
await wait_run_in_executor(shutil.move, legacy_vm_working_path, new_vm_working_path)
|
2015-03-02 22:04:30 +02:00
|
|
|
except OSError as e:
|
2016-05-11 20:35:36 +03:00
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not move vm working directory: {} to {} {}".format(legacy_vm_working_path,
|
2016-05-16 15:30:09 +03:00
|
|
|
new_vm_working_path, e))
|
2015-03-01 00:00:00 +02:00
|
|
|
|
2015-03-01 22:05:51 +02:00
|
|
|
return new_id
|
2015-02-26 03:55:35 +02:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def create_node(self, name, project_id, node_id, *args, **kwargs):
|
2015-01-20 13:46:15 +02:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
Create a new node
|
2015-01-20 13:46:15 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:param name: Node name
|
2015-02-04 22:48:29 +02:00
|
|
|
:param project_id: Project identifier
|
2016-05-11 20:35:36 +03:00
|
|
|
:param node_id: restore a node identifier
|
2015-01-20 13:46:15 +02:00
|
|
|
"""
|
2015-01-21 00:28:40 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
if node_id in self._nodes:
|
|
|
|
return self._nodes[node_id]
|
2015-03-02 16:35:36 +02:00
|
|
|
|
2015-02-04 22:48:29 +02:00
|
|
|
project = ProjectManager.instance().get_project(project_id)
|
2016-05-11 20:35:36 +03:00
|
|
|
if node_id and isinstance(node_id, int):
|
|
|
|
# old project
|
2018-10-15 13:05:49 +03:00
|
|
|
async with BaseManager._convert_lock:
|
|
|
|
node_id = await self.convert_old_project(project, node_id, name)
|
2015-01-20 03:30:57 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
if not node_id:
|
|
|
|
node_id = str(uuid4())
|
2015-01-20 03:30:57 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
node = self._NODE_CLASS(name, node_id, project, self, *args, **kwargs)
|
|
|
|
if asyncio.iscoroutinefunction(node.create):
|
2018-10-15 13:05:49 +03:00
|
|
|
await node.create()
|
2015-01-22 02:41:35 +02:00
|
|
|
else:
|
2016-05-11 20:35:36 +03:00
|
|
|
node.create()
|
|
|
|
self._nodes[node.id] = node
|
|
|
|
project.add_node(node)
|
|
|
|
return node
|
2015-01-22 12:34:10 +02:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def duplicate_node(self, source_node_id, destination_node_id):
|
2017-07-20 18:29:42 +03:00
|
|
|
"""
|
|
|
|
Duplicate a node
|
|
|
|
|
|
|
|
:param source_node_id: Source node identifier
|
|
|
|
:param destination_node_id: Destination node identifier
|
|
|
|
:returns: New node instance
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2017-07-20 18:29:42 +03:00
|
|
|
source_node = self.get_node(source_node_id)
|
|
|
|
destination_node = self.get_node(destination_node_id)
|
|
|
|
|
|
|
|
# Some node don't have working dir like switch
|
|
|
|
if not hasattr(destination_node, "working_dir"):
|
|
|
|
return destination_node
|
|
|
|
|
|
|
|
destination_dir = destination_node.working_dir
|
|
|
|
try:
|
|
|
|
shutil.rmtree(destination_dir)
|
2019-02-18 18:09:59 +02:00
|
|
|
shutil.copytree(source_node.working_dir, destination_dir, symlinks=True, ignore_dangling_symlinks=True)
|
2017-07-20 18:29:42 +03:00
|
|
|
except OSError as e:
|
2018-03-15 09:17:39 +02:00
|
|
|
raise aiohttp.web.HTTPConflict(text="Cannot duplicate node data: {}".format(e))
|
2017-07-20 18:29:42 +03:00
|
|
|
|
2018-03-15 09:17:39 +02:00
|
|
|
# We force a refresh of the name. This forces the rewrite
|
2017-07-20 18:29:42 +03:00
|
|
|
# of some configuration files
|
|
|
|
node_name = destination_node.name
|
2017-07-24 11:52:14 +03:00
|
|
|
destination_node.name = node_name + str(uuid4())
|
2017-07-20 18:29:42 +03:00
|
|
|
destination_node.name = node_name
|
|
|
|
|
|
|
|
return destination_node
|
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def close_node(self, node_id):
|
2015-01-22 12:34:10 +02:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
Close a node
|
2015-01-22 12:34:10 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:param node_id: Node identifier
|
2015-02-04 22:48:29 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:returns: Node instance
|
2015-01-22 12:34:10 +02:00
|
|
|
"""
|
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
node = self.get_node(node_id)
|
|
|
|
if asyncio.iscoroutinefunction(node.close):
|
2018-10-15 13:05:49 +03:00
|
|
|
await node.close()
|
2015-01-22 12:34:10 +02:00
|
|
|
else:
|
2016-05-11 20:35:36 +03:00
|
|
|
node.close()
|
|
|
|
return node
|
2015-01-23 12:28:58 +02:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def project_closing(self, project):
|
2015-03-02 04:20:33 +02:00
|
|
|
"""
|
|
|
|
Called when a project is about to be closed.
|
|
|
|
|
|
|
|
:param project: Project instance
|
|
|
|
"""
|
|
|
|
|
|
|
|
pass
|
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def project_closed(self, project):
|
2015-02-16 07:13:24 +02:00
|
|
|
"""
|
|
|
|
Called when a project is closed.
|
|
|
|
|
2015-02-28 01:51:17 +02:00
|
|
|
:param project: Project instance
|
|
|
|
"""
|
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
for node in project.nodes:
|
|
|
|
if node.id in self._nodes:
|
|
|
|
del self._nodes[node.id]
|
2015-02-28 01:51:17 +02:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def delete_node(self, node_id):
|
2015-01-23 12:28:58 +02:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
Delete a node. The node working directory will be destroyed when a commit is received.
|
2015-01-23 12:28:58 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:param node_id: Node identifier
|
|
|
|
:returns: Node instance
|
2015-01-23 12:28:58 +02:00
|
|
|
"""
|
|
|
|
|
2018-01-14 14:06:35 +02:00
|
|
|
node = None
|
2017-11-23 06:19:41 +02:00
|
|
|
try:
|
2018-01-14 14:06:35 +02:00
|
|
|
node = self.get_node(node_id)
|
2018-10-15 13:05:49 +03:00
|
|
|
await self.close_node(node_id)
|
2017-11-23 06:19:41 +02:00
|
|
|
finally:
|
2018-01-14 14:06:35 +02:00
|
|
|
if node:
|
|
|
|
node.project.emit("node.deleted", node)
|
2018-10-15 13:05:49 +03:00
|
|
|
await node.project.remove_node(node)
|
2016-05-11 20:35:36 +03:00
|
|
|
if node.id in self._nodes:
|
|
|
|
del self._nodes[node.id]
|
|
|
|
return node
|
2015-01-23 03:04:24 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2015-09-15 00:05:25 +03:00
|
|
|
def has_privileged_access(executable):
|
2015-01-23 03:04:24 +02:00
|
|
|
"""
|
2016-06-02 02:50:31 +03:00
|
|
|
Check if an executable have the right to attach to Ethernet and TAP adapters.
|
2015-01-23 03:04:24 +02:00
|
|
|
|
|
|
|
:param executable: executable path
|
|
|
|
|
|
|
|
:returns: True or False
|
|
|
|
"""
|
|
|
|
|
|
|
|
if sys.platform.startswith("win"):
|
|
|
|
# do not check anything on Windows
|
|
|
|
return True
|
|
|
|
|
2016-12-19 10:28:27 +02:00
|
|
|
if sys.platform.startswith("darwin"):
|
|
|
|
if os.stat(executable).st_uid == 0:
|
|
|
|
return True
|
|
|
|
|
2015-01-23 03:04:24 +02:00
|
|
|
if os.geteuid() == 0:
|
|
|
|
# we are root, so we should have privileged access.
|
|
|
|
return True
|
2015-09-15 00:05:25 +03:00
|
|
|
|
|
|
|
if os.stat(executable).st_uid == 0 and (os.stat(executable).st_mode & stat.S_ISUID or os.stat(executable).st_mode & stat.S_ISGID):
|
2015-01-23 03:04:24 +02:00
|
|
|
# the executable has set UID bit.
|
|
|
|
return True
|
|
|
|
|
|
|
|
# test if the executable has the CAP_NET_RAW capability (Linux only)
|
2015-09-15 00:05:25 +03:00
|
|
|
try:
|
|
|
|
if sys.platform.startswith("linux") and "security.capability" in os.listxattr(executable):
|
2015-01-23 03:04:24 +02:00
|
|
|
caps = os.getxattr(executable, "security.capability")
|
|
|
|
# test the 2nd byte and check if the 13th bit (CAP_NET_RAW) is set
|
|
|
|
if struct.unpack("<IIIII", caps)[1] & 1 << 13:
|
|
|
|
return True
|
2017-05-08 17:02:23 +03:00
|
|
|
except (AttributeError, OSError) as e:
|
2015-09-15 00:05:25 +03:00
|
|
|
log.error("could not determine if CAP_NET_RAW capability is set for {}: {}".format(executable, e))
|
2015-01-23 03:04:24 +02:00
|
|
|
|
|
|
|
return False
|
|
|
|
|
2016-06-25 03:35:39 +03:00
|
|
|
def create_nio(self, nio_settings):
|
2015-01-23 03:04:24 +02:00
|
|
|
"""
|
|
|
|
Creates a new NIO.
|
|
|
|
|
|
|
|
:param nio_settings: information to create the NIO
|
|
|
|
|
|
|
|
:returns: a NIO object
|
|
|
|
"""
|
|
|
|
|
|
|
|
nio = None
|
|
|
|
if nio_settings["type"] == "nio_udp":
|
|
|
|
lport = nio_settings["lport"]
|
|
|
|
rhost = nio_settings["rhost"]
|
|
|
|
rport = nio_settings["rport"]
|
|
|
|
try:
|
2015-06-07 06:37:34 +03:00
|
|
|
info = socket.getaddrinfo(rhost, rport, socket.AF_UNSPEC, socket.SOCK_DGRAM, 0, socket.AI_PASSIVE)
|
|
|
|
if not info:
|
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="getaddrinfo returns an empty list on {}:{}".format(rhost, rport))
|
|
|
|
for res in info:
|
|
|
|
af, socktype, proto, _, sa = res
|
|
|
|
with socket.socket(af, socktype, proto) as sock:
|
|
|
|
sock.connect(sa)
|
2015-01-23 03:04:24 +02:00
|
|
|
except OSError as e:
|
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not create an UDP connection to {}:{}: {}".format(rhost, rport, e))
|
2018-03-19 11:26:12 +02:00
|
|
|
nio = NIOUDP(lport, rhost, rport)
|
|
|
|
nio.filters = nio_settings.get("filters", {})
|
|
|
|
nio.suspend = nio_settings.get("suspend", False)
|
2015-01-23 03:04:24 +02:00
|
|
|
elif nio_settings["type"] == "nio_tap":
|
|
|
|
tap_device = nio_settings["tap_device"]
|
2015-10-05 21:12:20 +03:00
|
|
|
# if not is_interface_up(tap_device):
|
2015-09-26 19:09:50 +03:00
|
|
|
# raise aiohttp.web.HTTPConflict(text="TAP interface {} does not exist or is down".format(tap_device))
|
2015-04-28 11:43:27 +03:00
|
|
|
# FIXME: check for permissions on tap device
|
2015-09-15 00:05:25 +03:00
|
|
|
# if not self.has_privileged_access(executable):
|
2015-04-27 23:38:15 +03:00
|
|
|
# raise aiohttp.web.HTTPForbidden(text="{} has no privileged access to {}.".format(executable, tap_device))
|
2015-02-24 04:00:34 +02:00
|
|
|
nio = NIOTAP(tap_device)
|
2016-06-02 02:50:31 +03:00
|
|
|
elif nio_settings["type"] in ("nio_generic_ethernet", "nio_ethernet"):
|
2015-06-07 22:51:33 +03:00
|
|
|
ethernet_device = nio_settings["ethernet_device"]
|
|
|
|
if not is_interface_up(ethernet_device):
|
2015-07-22 07:58:28 +03:00
|
|
|
raise aiohttp.web.HTTPConflict(text="Ethernet interface {} does not exist or is down".format(ethernet_device))
|
2016-06-02 02:50:31 +03:00
|
|
|
nio = NIOEthernet(ethernet_device)
|
2015-01-23 03:04:24 +02:00
|
|
|
assert nio is not None
|
|
|
|
return nio
|
2015-04-14 19:46:55 +03:00
|
|
|
|
2018-10-27 10:47:17 +03:00
|
|
|
async def stream_pcap_file(self, nio, project_id, request, response):
|
|
|
|
"""
|
|
|
|
Streams a PCAP file.
|
|
|
|
|
|
|
|
:param nio: NIO object
|
|
|
|
:param project_id: Project identifier
|
|
|
|
:param request: request object
|
|
|
|
:param response: response object
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not nio.capturing:
|
|
|
|
raise aiohttp.web.HTTPConflict(text="Nothing to stream because there is no packet capture active")
|
|
|
|
|
|
|
|
project = ProjectManager.instance().get_project(project_id)
|
|
|
|
path = os.path.normpath(os.path.join(project.capture_working_directory(), nio.pcap_output_file))
|
|
|
|
# Raise an error if user try to escape
|
|
|
|
#if path[0] == ".":
|
|
|
|
# raise aiohttp.web.HTTPForbidden()
|
|
|
|
#path = os.path.join(project.path, path)
|
|
|
|
|
|
|
|
response.content_type = "application/vnd.tcpdump.pcap"
|
|
|
|
response.set_status(200)
|
|
|
|
response.enable_chunked_encoding()
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(path, "rb") as f:
|
|
|
|
await response.prepare(request)
|
|
|
|
while nio.capturing:
|
2019-03-06 18:00:01 +02:00
|
|
|
data = f.read(CHUNK_SIZE)
|
2018-10-27 10:47:17 +03:00
|
|
|
if not data:
|
|
|
|
await asyncio.sleep(0.1)
|
|
|
|
continue
|
2019-02-23 16:08:52 +02:00
|
|
|
await response.write(data)
|
2018-10-27 10:47:17 +03:00
|
|
|
except FileNotFoundError:
|
|
|
|
raise aiohttp.web.HTTPNotFound()
|
|
|
|
except PermissionError:
|
|
|
|
raise aiohttp.web.HTTPForbidden()
|
|
|
|
|
2018-11-19 10:53:43 +02:00
|
|
|
def get_abs_image_path(self, path, extra_dir=None):
|
2015-04-14 19:46:55 +03:00
|
|
|
"""
|
|
|
|
Get the absolute path of an image
|
|
|
|
|
|
|
|
:param path: file path
|
2018-11-19 10:53:43 +02:00
|
|
|
:param extra_dir: an additional directory to be added to the search path
|
|
|
|
|
|
|
|
:returns: file path
|
2015-04-14 19:46:55 +03:00
|
|
|
"""
|
|
|
|
|
2015-04-25 02:27:32 +03:00
|
|
|
if not path:
|
|
|
|
return ""
|
2021-05-15 11:05:32 +03:00
|
|
|
orig_path = os.path.normpath(path)
|
2015-11-12 16:37:34 +02:00
|
|
|
|
2016-06-02 16:19:34 +03:00
|
|
|
server_config = self.config.get_section_config("Server")
|
2015-04-14 19:46:55 +03:00
|
|
|
img_directory = self.get_images_directory()
|
2018-11-19 10:53:43 +02:00
|
|
|
valid_directory_prefices = images_directories(self._NODE_TYPE)
|
|
|
|
if extra_dir:
|
|
|
|
valid_directory_prefices.append(extra_dir)
|
2015-11-12 16:37:34 +02:00
|
|
|
|
|
|
|
# Windows path should not be send to a unix server
|
|
|
|
if not sys.platform.startswith("win"):
|
|
|
|
if re.match(r"^[A-Z]:", path) is not None:
|
2019-07-10 12:16:50 +03:00
|
|
|
raise NodeError("{} is not allowed on this remote server. Please only use a file from '{}'".format(path, img_directory))
|
2015-11-12 16:37:34 +02:00
|
|
|
|
2021-05-15 13:13:36 +03:00
|
|
|
if not os.path.isabs(orig_path):
|
|
|
|
|
2018-11-19 10:53:43 +02:00
|
|
|
for directory in valid_directory_prefices:
|
2019-07-10 12:16:50 +03:00
|
|
|
log.debug("Searching for image '{}' in '{}'".format(orig_path, directory))
|
2016-06-02 17:44:38 +03:00
|
|
|
path = self._recursive_search_file_in_directory(directory, orig_path)
|
|
|
|
if path:
|
2016-06-02 16:19:34 +03:00
|
|
|
return force_unix_path(path)
|
2016-06-07 16:34:04 +03:00
|
|
|
|
|
|
|
# Not found we try the default directory
|
2019-07-10 12:16:50 +03:00
|
|
|
log.debug("Searching for image '{}' in default directory".format(orig_path))
|
2021-05-15 11:05:32 +03:00
|
|
|
|
|
|
|
# check that the image path is in the default image directory
|
2021-05-15 13:13:36 +03:00
|
|
|
#common_prefix = os.path.commonprefix([orig_path, img_directory])
|
|
|
|
#if common_prefix != img_directory:
|
|
|
|
# raise NodeError("{} is not allowed. Please only use a file from '{}'".format(orig_path, img_directory))
|
2021-05-15 11:05:32 +03:00
|
|
|
|
2016-06-02 17:44:38 +03:00
|
|
|
s = os.path.split(orig_path)
|
2017-01-18 13:39:10 +02:00
|
|
|
path = force_unix_path(os.path.join(img_directory, *s))
|
2016-06-07 16:34:04 +03:00
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
2016-06-07 20:38:01 +03:00
|
|
|
raise ImageMissingError(orig_path)
|
2015-05-04 11:57:08 +03:00
|
|
|
|
2021-05-15 13:13:36 +03:00
|
|
|
# For local server we allow using absolute path outside image directory
|
|
|
|
if server_config.getboolean("local", False) is True:
|
|
|
|
log.debug("Searching for '{}'".format(orig_path))
|
|
|
|
path = force_unix_path(path)
|
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
|
|
|
raise ImageMissingError(orig_path)
|
|
|
|
|
|
|
|
path = force_unix_path(path)
|
|
|
|
for directory in valid_directory_prefices:
|
|
|
|
log.debug("Searching for image '{}' in '{}'".format(orig_path, directory))
|
|
|
|
if os.path.commonprefix([directory, path]) == directory:
|
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
|
|
|
raise ImageMissingError(orig_path)
|
|
|
|
raise NodeError("{} is not allowed on this remote server. Please only use a file from '{}'"
|
|
|
|
.format(path, img_directory))
|
|
|
|
|
2016-06-02 17:44:38 +03:00
|
|
|
def _recursive_search_file_in_directory(self, directory, searched_file):
|
|
|
|
"""
|
|
|
|
Search for a file in directory and is subdirectories
|
|
|
|
|
|
|
|
:returns: Path or None if not found
|
|
|
|
"""
|
|
|
|
|
2021-05-15 11:05:32 +03:00
|
|
|
s = os.path.split(searched_file)
|
2016-06-02 17:44:38 +03:00
|
|
|
for root, dirs, files in os.walk(directory):
|
|
|
|
for file in files:
|
2021-08-25 10:53:21 +03:00
|
|
|
if s[1] == file and (s[0] == '' or root == os.path.join(directory, s[0])):
|
2016-06-02 17:44:38 +03:00
|
|
|
path = os.path.normpath(os.path.join(root, s[1]))
|
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
|
|
|
return None
|
|
|
|
|
2018-11-19 10:53:43 +02:00
|
|
|
def get_relative_image_path(self, path, extra_dir=None):
|
2015-04-14 19:46:55 +03:00
|
|
|
"""
|
|
|
|
Get a path relative to images directory path
|
|
|
|
or an abspath if the path is not located inside
|
|
|
|
image directory
|
|
|
|
|
|
|
|
:param path: file path
|
2018-11-19 10:53:43 +02:00
|
|
|
:param extra_dir: an additional directory to be added to the search path
|
|
|
|
|
|
|
|
:returns: file path
|
2015-04-14 19:46:55 +03:00
|
|
|
"""
|
|
|
|
|
2015-04-25 02:27:32 +03:00
|
|
|
if not path:
|
|
|
|
return ""
|
2017-01-18 13:39:10 +02:00
|
|
|
|
2018-11-19 10:53:43 +02:00
|
|
|
path = force_unix_path(self.get_abs_image_path(path, extra_dir))
|
2017-01-18 13:39:10 +02:00
|
|
|
img_directory = self.get_images_directory()
|
|
|
|
|
2018-11-19 10:53:43 +02:00
|
|
|
valid_directory_prefices = images_directories(self._NODE_TYPE)
|
|
|
|
if extra_dir:
|
|
|
|
valid_directory_prefices.append(extra_dir)
|
|
|
|
|
|
|
|
for directory in valid_directory_prefices:
|
2016-06-02 16:19:34 +03:00
|
|
|
if os.path.commonprefix([directory, path]) == directory:
|
2017-01-18 13:39:10 +02:00
|
|
|
relpath = os.path.relpath(path, directory)
|
|
|
|
# We don't allow to recurse search from the top image directory just for image type directory (compatibility with old releases)
|
|
|
|
if os.sep not in relpath or directory == img_directory:
|
|
|
|
return relpath
|
2015-04-14 19:46:55 +03:00
|
|
|
return path
|
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def list_images(self):
|
2015-04-13 15:33:13 +03:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
Return the list of available images for this node type
|
2015-04-13 15:33:13 +03:00
|
|
|
|
|
|
|
:returns: Array of hash
|
|
|
|
"""
|
|
|
|
|
2016-12-21 10:33:44 +02:00
|
|
|
try:
|
|
|
|
return list_images(self._NODE_TYPE)
|
|
|
|
except OSError as e:
|
|
|
|
raise aiohttp.web.HTTPConflict(text="Can not list images {}".format(e))
|
2015-04-13 15:33:13 +03:00
|
|
|
|
2015-04-14 19:46:55 +03:00
|
|
|
def get_images_directory(self):
|
|
|
|
"""
|
|
|
|
Get the image directory on disk
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-11-28 20:49:50 +02:00
|
|
|
if hasattr(self, "_NODE_TYPE"):
|
|
|
|
return default_images_directory(self._NODE_TYPE)
|
2015-04-14 19:46:55 +03:00
|
|
|
raise NotImplementedError
|
2015-04-24 11:15:23 +03:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def write_image(self, filename, stream):
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2015-04-24 11:15:23 +03:00
|
|
|
directory = self.get_images_directory()
|
2015-10-05 21:12:20 +03:00
|
|
|
path = os.path.abspath(os.path.join(directory, *os.path.split(filename)))
|
|
|
|
if os.path.commonprefix([directory, path]) != directory:
|
2017-11-17 13:13:34 +02:00
|
|
|
raise aiohttp.web.HTTPForbidden(text="Could not write image: {}, {} is forbidden".format(filename, path))
|
2019-03-06 18:00:01 +02:00
|
|
|
log.info("Writing image file to '{}'".format(path))
|
2015-04-24 11:15:23 +03:00
|
|
|
try:
|
2015-06-17 18:11:25 +03:00
|
|
|
remove_checksum(path)
|
2016-05-02 18:25:46 +03:00
|
|
|
# We store the file under his final name only when the upload is finished
|
|
|
|
tmp_path = path + ".tmp"
|
2015-10-05 21:12:20 +03:00
|
|
|
os.makedirs(os.path.dirname(path), exist_ok=True)
|
2019-03-06 18:00:01 +02:00
|
|
|
async with aiofiles.open(tmp_path, 'wb') as f:
|
2015-04-24 11:15:23 +03:00
|
|
|
while True:
|
2019-03-06 18:00:01 +02:00
|
|
|
chunk = await stream.read(CHUNK_SIZE)
|
|
|
|
if not chunk:
|
2015-04-24 11:15:23 +03:00
|
|
|
break
|
2019-03-06 18:00:01 +02:00
|
|
|
await f.write(chunk)
|
2016-05-02 18:25:46 +03:00
|
|
|
os.chmod(tmp_path, stat.S_IWRITE | stat.S_IREAD | stat.S_IEXEC)
|
|
|
|
shutil.move(tmp_path, path)
|
2018-10-15 13:05:49 +03:00
|
|
|
await cancellable_wait_run_in_executor(md5sum, path)
|
2015-04-24 11:15:23 +03:00
|
|
|
except OSError as e:
|
2015-10-05 21:12:20 +03:00
|
|
|
raise aiohttp.web.HTTPConflict(text="Could not write image: {} because {}".format(filename, e))
|
2016-10-24 22:39:35 +03:00
|
|
|
|
|
|
|
def reset(self):
|
|
|
|
"""
|
|
|
|
Reset module for tests
|
|
|
|
"""
|
|
|
|
self._nodes = {}
|