2015-01-19 17:23:41 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
|
|
|
# Copyright (C) 2015 GNS3 Technologies Inc.
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
import os
|
2016-04-05 19:32:48 +03:00
|
|
|
import aiohttp
|
2015-01-23 12:28:58 +02:00
|
|
|
import shutil
|
2015-01-26 13:10:30 +02:00
|
|
|
import asyncio
|
2015-05-14 13:03:17 +03:00
|
|
|
import hashlib
|
2016-03-30 12:43:31 +03:00
|
|
|
import zipstream
|
|
|
|
import zipfile
|
2016-04-05 19:32:48 +03:00
|
|
|
import json
|
2015-01-19 17:23:41 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
from uuid import UUID
|
2015-02-24 04:59:19 +02:00
|
|
|
from .port_manager import PortManager
|
2016-03-17 16:15:30 +02:00
|
|
|
from .notification_manager import NotificationManager
|
2015-01-26 13:10:30 +02:00
|
|
|
from ..config import Config
|
|
|
|
from ..utils.asyncio import wait_run_in_executor
|
2016-05-11 19:42:55 +03:00
|
|
|
from ..utils.path import check_path_allowed, get_default_project_directory
|
2015-01-19 17:23:41 +02:00
|
|
|
|
2016-04-12 11:10:33 +03:00
|
|
|
|
2015-01-23 19:37:29 +02:00
|
|
|
import logging
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2015-01-19 17:23:41 +02:00
|
|
|
class Project:
|
2015-01-31 23:34:49 +02:00
|
|
|
|
2015-01-19 17:23:41 +02:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
A project contains a list of nodes.
|
|
|
|
In theory nodes are isolated project/project.
|
2015-01-19 17:23:41 +02:00
|
|
|
|
2015-04-08 20:17:34 +03:00
|
|
|
:param project_id: force project identifier (None by default auto generate an UUID)
|
|
|
|
:param path: path of the project. (None use the standard directory)
|
2015-01-19 17:23:41 +02:00
|
|
|
"""
|
2015-01-19 23:43:35 +02:00
|
|
|
|
2016-05-24 18:54:08 +03:00
|
|
|
def __init__(self, name=None, project_id=None, path=None):
|
2015-01-19 23:43:35 +02:00
|
|
|
|
2015-03-09 03:13:01 +02:00
|
|
|
self._name = name
|
2016-03-10 11:32:07 +02:00
|
|
|
try:
|
|
|
|
UUID(project_id, version=4)
|
|
|
|
except ValueError:
|
|
|
|
raise aiohttp.web.HTTPBadRequest(text="{} is not a valid UUID".format(project_id))
|
|
|
|
self._id = project_id
|
2015-01-19 17:23:41 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
self._nodes = set()
|
2015-03-22 01:19:12 +02:00
|
|
|
self._used_tcp_ports = set()
|
|
|
|
self._used_udp_ports = set()
|
2015-02-05 18:52:37 +02:00
|
|
|
|
|
|
|
if path is None:
|
2016-05-11 19:42:55 +03:00
|
|
|
location = get_default_project_directory()
|
2016-03-10 11:32:07 +02:00
|
|
|
path = os.path.join(location, self._id)
|
2015-01-21 04:02:22 +02:00
|
|
|
try:
|
2015-02-05 18:52:37 +02:00
|
|
|
os.makedirs(path, exist_ok=True)
|
2015-01-21 04:02:22 +02:00
|
|
|
except OSError as e:
|
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not create project directory: {}".format(e))
|
2015-02-05 18:52:37 +02:00
|
|
|
self.path = path
|
|
|
|
|
2016-04-21 18:27:49 +03:00
|
|
|
try:
|
|
|
|
if os.path.exists(self.tmp_working_directory()):
|
|
|
|
shutil.rmtree(self.tmp_working_directory())
|
|
|
|
except OSError:
|
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not clean project directory: {}".format(e))
|
|
|
|
|
2015-02-26 01:05:57 +02:00
|
|
|
log.info("Project {id} with path '{path}' created".format(path=self._path, id=self._id))
|
2015-01-20 15:31:47 +02:00
|
|
|
|
2015-02-16 07:13:24 +02:00
|
|
|
def __json__(self):
|
|
|
|
|
|
|
|
return {
|
2015-03-09 03:13:01 +02:00
|
|
|
"name": self._name,
|
2016-05-24 18:54:08 +03:00
|
|
|
"project_id": self._id
|
2015-02-16 07:13:24 +02:00
|
|
|
}
|
|
|
|
|
2015-02-04 22:17:00 +02:00
|
|
|
def _config(self):
|
|
|
|
|
|
|
|
return Config.instance().get_section_config("Server")
|
|
|
|
|
2015-03-09 03:13:01 +02:00
|
|
|
def is_local(self):
|
|
|
|
|
2015-03-09 20:45:02 +02:00
|
|
|
return self._config().getboolean("local", False)
|
2015-03-09 03:13:01 +02:00
|
|
|
|
2015-01-19 23:43:35 +02:00
|
|
|
@property
|
2015-02-04 22:48:29 +02:00
|
|
|
def id(self):
|
2015-01-19 23:43:35 +02:00
|
|
|
|
2015-02-04 22:48:29 +02:00
|
|
|
return self._id
|
2015-01-19 17:23:41 +02:00
|
|
|
|
2015-01-20 03:30:57 +02:00
|
|
|
@property
|
|
|
|
def path(self):
|
|
|
|
|
|
|
|
return self._path
|
|
|
|
|
2015-02-04 22:17:00 +02:00
|
|
|
@path.setter
|
|
|
|
def path(self, path):
|
2016-05-11 16:59:32 +03:00
|
|
|
check_path_allowed(path)
|
2015-02-04 22:17:00 +02:00
|
|
|
|
2015-02-05 18:52:37 +02:00
|
|
|
if hasattr(self, "_path"):
|
2015-03-09 03:13:01 +02:00
|
|
|
if path != self._path and self.is_local() is False:
|
2016-02-04 12:46:05 +02:00
|
|
|
raise aiohttp.web.HTTPForbidden(text="You are not allowed to modify the project directory path")
|
|
|
|
|
2015-02-04 22:17:00 +02:00
|
|
|
self._path = path
|
2015-05-04 15:04:57 +03:00
|
|
|
|
2015-03-09 03:13:01 +02:00
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
|
|
|
|
return self._name
|
|
|
|
|
|
|
|
@name.setter
|
|
|
|
def name(self, name):
|
|
|
|
|
2016-04-05 19:32:48 +03:00
|
|
|
if "/" in name or "\\" in name:
|
|
|
|
raise aiohttp.web.HTTPForbidden(text="Name can not contain path separator")
|
2015-03-09 03:13:01 +02:00
|
|
|
self._name = name
|
|
|
|
|
2015-01-23 15:07:10 +02:00
|
|
|
@property
|
2016-05-11 20:35:36 +03:00
|
|
|
def nodes(self):
|
2015-01-23 15:07:10 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
return self._nodes
|
2015-01-23 15:07:10 +02:00
|
|
|
|
2015-03-22 01:19:12 +02:00
|
|
|
def record_tcp_port(self, port):
|
|
|
|
"""
|
|
|
|
Associate a reserved TCP port number with this project.
|
|
|
|
|
|
|
|
:param port: TCP port number
|
|
|
|
"""
|
|
|
|
|
|
|
|
if port not in self._used_tcp_ports:
|
|
|
|
self._used_tcp_ports.add(port)
|
|
|
|
|
|
|
|
def record_udp_port(self, port):
|
|
|
|
"""
|
|
|
|
Associate a reserved UDP port number with this project.
|
|
|
|
|
|
|
|
:param port: UDP port number
|
|
|
|
"""
|
|
|
|
|
|
|
|
if port not in self._used_udp_ports:
|
|
|
|
self._used_udp_ports.add(port)
|
|
|
|
|
|
|
|
def remove_tcp_port(self, port):
|
|
|
|
"""
|
|
|
|
Removes an associated TCP port number from this project.
|
|
|
|
|
|
|
|
:param port: TCP port number
|
|
|
|
"""
|
|
|
|
|
|
|
|
if port in self._used_tcp_ports:
|
|
|
|
self._used_tcp_ports.remove(port)
|
|
|
|
|
|
|
|
def remove_udp_port(self, port):
|
|
|
|
"""
|
|
|
|
Removes an associated UDP port number from this project.
|
|
|
|
|
|
|
|
:param port: UDP port number
|
|
|
|
"""
|
|
|
|
|
|
|
|
if port in self._used_udp_ports:
|
|
|
|
self._used_udp_ports.remove(port)
|
|
|
|
|
2015-02-16 07:13:24 +02:00
|
|
|
def module_working_directory(self, module_name):
|
|
|
|
"""
|
2015-04-08 20:17:34 +03:00
|
|
|
Returns a working directory for the module
|
2016-05-14 03:48:10 +03:00
|
|
|
The directory is created if the directory doesn't exist.
|
2015-02-16 07:13:24 +02:00
|
|
|
|
|
|
|
:param module_name: name for the module
|
|
|
|
:returns: working directory
|
|
|
|
"""
|
|
|
|
|
2015-03-02 18:17:28 +02:00
|
|
|
workdir = self.module_working_path(module_name)
|
2015-02-16 07:13:24 +02:00
|
|
|
try:
|
|
|
|
os.makedirs(workdir, exist_ok=True)
|
|
|
|
except OSError as e:
|
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not create module working directory: {}".format(e))
|
|
|
|
return workdir
|
|
|
|
|
2015-03-02 18:17:28 +02:00
|
|
|
def module_working_path(self, module_name):
|
|
|
|
"""
|
2015-04-08 20:17:34 +03:00
|
|
|
Returns the working directory for the module. If you want
|
2015-03-02 18:17:28 +02:00
|
|
|
to be sure to have the directory on disk take a look on:
|
|
|
|
module_working_directory
|
|
|
|
"""
|
2015-04-08 20:17:34 +03:00
|
|
|
|
2015-03-02 18:17:28 +02:00
|
|
|
return os.path.join(self._path, "project-files", module_name)
|
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
def node_working_directory(self, node):
|
2015-01-20 15:31:47 +02:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
Returns a working directory for a specific node.
|
2015-01-20 15:31:47 +02:00
|
|
|
If the directory doesn't exist, the directory is created.
|
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:param node: Node instance
|
2015-04-08 20:17:34 +03:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:returns: Node working directory
|
2015-01-20 15:31:47 +02:00
|
|
|
"""
|
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
workdir = os.path.join(self._path, "project-files", node.manager.module_name.lower(), node.id)
|
2015-01-21 04:02:22 +02:00
|
|
|
try:
|
2015-01-22 04:28:52 +02:00
|
|
|
os.makedirs(workdir, exist_ok=True)
|
2015-01-21 04:02:22 +02:00
|
|
|
except OSError as e:
|
2016-05-11 20:35:36 +03:00
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not create the node working directory: {}".format(e))
|
2015-01-24 03:33:49 +02:00
|
|
|
return workdir
|
|
|
|
|
2016-04-21 18:27:49 +03:00
|
|
|
def tmp_working_directory(self):
|
|
|
|
"""
|
|
|
|
A temporary directory. Will be clean at project open and close
|
|
|
|
"""
|
2016-04-22 17:22:03 +03:00
|
|
|
return os.path.join(self._path, "tmp")
|
2016-04-21 18:27:49 +03:00
|
|
|
|
2015-01-24 03:33:49 +02:00
|
|
|
def capture_working_directory(self):
|
|
|
|
"""
|
2016-04-21 18:27:49 +03:00
|
|
|
Returns a working directory where to temporary store packet capture files.
|
2015-01-24 03:33:49 +02:00
|
|
|
|
|
|
|
:returns: path to the directory
|
|
|
|
"""
|
|
|
|
|
2016-04-22 17:22:03 +03:00
|
|
|
workdir = os.path.join(self._path, "tmp", "captures")
|
2015-01-24 03:33:49 +02:00
|
|
|
try:
|
|
|
|
os.makedirs(workdir, exist_ok=True)
|
|
|
|
except OSError as e:
|
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not create the capture working directory: {}".format(e))
|
2015-01-22 04:28:52 +02:00
|
|
|
return workdir
|
2015-01-20 15:31:47 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
def add_node(self, node):
|
2015-01-23 15:07:10 +02:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
Adds a node to the project.
|
|
|
|
In theory this should be called by the node manager.
|
2015-01-23 15:07:10 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:param node: Node instance
|
2015-01-23 15:07:10 +02:00
|
|
|
"""
|
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
self._nodes.add(node)
|
2015-01-23 15:07:10 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
def remove_node(self, node):
|
2015-01-23 15:34:50 +02:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
Removes a node from the project.
|
|
|
|
In theory this should be called by the node manager.
|
2015-01-23 15:34:50 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
:param node: Node instance
|
2015-01-23 15:34:50 +02:00
|
|
|
"""
|
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
if node in self._nodes:
|
2016-06-14 11:16:33 +03:00
|
|
|
yield from node.delete()
|
2016-05-11 20:35:36 +03:00
|
|
|
self._nodes.remove(node)
|
2015-01-23 15:34:50 +02:00
|
|
|
|
2015-01-26 13:10:30 +02:00
|
|
|
@asyncio.coroutine
|
2015-01-23 12:48:20 +02:00
|
|
|
def close(self):
|
2015-04-08 20:17:34 +03:00
|
|
|
"""
|
|
|
|
Closes the project, but keep information on disk
|
|
|
|
"""
|
2015-01-23 12:48:20 +02:00
|
|
|
|
2016-04-15 18:57:06 +03:00
|
|
|
for module in self.compute():
|
2015-03-02 18:17:28 +02:00
|
|
|
yield from module.instance().project_closing(self)
|
2016-05-24 18:54:08 +03:00
|
|
|
yield from self._close_and_clean(False)
|
2016-04-15 18:57:06 +03:00
|
|
|
for module in self.compute():
|
2015-03-02 18:17:28 +02:00
|
|
|
yield from module.instance().project_closed(self)
|
2015-01-23 17:02:26 +02:00
|
|
|
|
2016-04-21 18:27:49 +03:00
|
|
|
try:
|
|
|
|
if os.path.exists(self.tmp_working_directory()):
|
|
|
|
shutil.rmtree(self.tmp_working_directory())
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
2015-01-26 13:10:30 +02:00
|
|
|
@asyncio.coroutine
|
2015-01-23 17:02:26 +02:00
|
|
|
def _close_and_clean(self, cleanup):
|
|
|
|
"""
|
2015-04-08 20:17:34 +03:00
|
|
|
Closes the project, and cleanup the disk if cleanup is True
|
2015-01-23 17:02:26 +02:00
|
|
|
|
2016-05-14 03:48:10 +03:00
|
|
|
:param cleanup: Whether to delete the project directory
|
2015-01-23 17:02:26 +02:00
|
|
|
"""
|
|
|
|
|
2015-02-05 23:24:06 +02:00
|
|
|
tasks = []
|
2016-05-11 20:35:36 +03:00
|
|
|
for node in self._nodes:
|
|
|
|
tasks.append(asyncio.async(node.manager.close_node(node.id)))
|
2015-02-05 23:24:06 +02:00
|
|
|
|
|
|
|
if tasks:
|
|
|
|
done, _ = yield from asyncio.wait(tasks)
|
|
|
|
for future in done:
|
|
|
|
try:
|
|
|
|
future.result()
|
2015-07-26 01:46:23 +03:00
|
|
|
except (Exception, GeneratorExit) as e:
|
2016-05-11 20:35:36 +03:00
|
|
|
log.error("Could not close node {}".format(e), exc_info=1)
|
2015-02-05 23:24:06 +02:00
|
|
|
|
2015-01-23 17:02:26 +02:00
|
|
|
if cleanup and os.path.exists(self.path):
|
2015-01-26 14:54:44 +02:00
|
|
|
try:
|
|
|
|
yield from wait_run_in_executor(shutil.rmtree, self.path)
|
2015-02-26 01:05:57 +02:00
|
|
|
log.info("Project {id} with path '{path}' deleted".format(path=self._path, id=self._id))
|
2015-01-26 14:54:44 +02:00
|
|
|
except OSError as e:
|
|
|
|
raise aiohttp.web.HTTPInternalServerError(text="Could not delete the project directory: {}".format(e))
|
2015-02-26 01:05:57 +02:00
|
|
|
else:
|
|
|
|
log.info("Project {id} with path '{path}' closed".format(path=self._path, id=self._id))
|
2015-01-23 12:48:20 +02:00
|
|
|
|
2015-03-22 01:19:12 +02:00
|
|
|
if self._used_tcp_ports:
|
|
|
|
log.warning("Project {} has TCP ports still in use: {}".format(self.id, self._used_tcp_ports))
|
|
|
|
if self._used_udp_ports:
|
|
|
|
log.warning("Project {} has UDP ports still in use: {}".format(self.id, self._used_udp_ports))
|
2015-02-24 04:59:19 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
# clean the remaining ports that have not been cleaned by their respective node.
|
2015-03-22 01:19:12 +02:00
|
|
|
port_manager = PortManager.instance()
|
|
|
|
for port in self._used_tcp_ports.copy():
|
|
|
|
port_manager.release_tcp_port(port, self)
|
|
|
|
for port in self._used_udp_ports.copy():
|
2015-04-15 16:58:31 +03:00
|
|
|
port_manager.release_udp_port(port, self)
|
2015-02-24 04:59:19 +02:00
|
|
|
|
2015-01-26 13:10:30 +02:00
|
|
|
@asyncio.coroutine
|
2015-01-23 12:48:20 +02:00
|
|
|
def delete(self):
|
2015-04-08 20:17:34 +03:00
|
|
|
"""
|
|
|
|
Removes project from disk
|
|
|
|
"""
|
2015-01-23 12:48:20 +02:00
|
|
|
|
2016-04-15 18:57:06 +03:00
|
|
|
for module in self.compute():
|
2015-03-02 18:17:28 +02:00
|
|
|
yield from module.instance().project_closing(self)
|
2015-01-26 13:10:30 +02:00
|
|
|
yield from self._close_and_clean(True)
|
2016-04-15 18:57:06 +03:00
|
|
|
for module in self.compute():
|
2015-03-02 18:17:28 +02:00
|
|
|
yield from module.instance().project_closed(self)
|
2015-02-04 18:18:53 +02:00
|
|
|
|
2016-04-15 18:57:06 +03:00
|
|
|
def compute(self):
|
2015-04-08 20:17:34 +03:00
|
|
|
"""
|
2016-05-11 20:35:36 +03:00
|
|
|
Returns all loaded modules from compute.
|
2015-04-08 20:17:34 +03:00
|
|
|
"""
|
2015-03-02 18:17:28 +02:00
|
|
|
|
|
|
|
# We import it at the last time to avoid circular dependencies
|
2016-04-15 18:57:06 +03:00
|
|
|
from ..compute import MODULES
|
2015-03-02 18:17:28 +02:00
|
|
|
return MODULES
|
2015-03-04 17:01:56 +02:00
|
|
|
|
|
|
|
def emit(self, action, event):
|
|
|
|
"""
|
2015-05-13 22:53:42 +03:00
|
|
|
Send an event to all the client listening for notifications
|
2015-03-04 17:01:56 +02:00
|
|
|
|
2015-05-13 22:53:42 +03:00
|
|
|
:param action: Action name
|
|
|
|
:param event: Event to send
|
2015-03-04 17:01:56 +02:00
|
|
|
"""
|
2016-03-17 16:15:30 +02:00
|
|
|
NotificationManager.instance().emit(action, event, project_id=self.id)
|
2015-12-22 16:19:38 +02:00
|
|
|
|
2015-05-14 13:03:17 +03:00
|
|
|
@asyncio.coroutine
|
|
|
|
def list_files(self):
|
|
|
|
"""
|
2016-05-14 03:48:10 +03:00
|
|
|
:returns: Array of files in project without temporary files. The files are dictionary {"path": "test.bin", "md5sum": "aaaaa"}
|
2015-05-14 13:03:17 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
files = []
|
2016-06-02 02:50:31 +03:00
|
|
|
for dirpath, dirnames, filenames in os.walk(self.path):
|
2015-05-14 13:03:17 +03:00
|
|
|
for filename in filenames:
|
|
|
|
if not filename.endswith(".ghost"):
|
|
|
|
path = os.path.relpath(dirpath, self.path)
|
|
|
|
path = os.path.join(path, filename)
|
|
|
|
path = os.path.normpath(path)
|
|
|
|
file_info = {"path": path}
|
|
|
|
|
|
|
|
try:
|
|
|
|
file_info["md5sum"] = yield from wait_run_in_executor(self._hash_file, os.path.join(dirpath, filename))
|
|
|
|
except OSError:
|
|
|
|
continue
|
|
|
|
files.append(file_info)
|
|
|
|
|
|
|
|
return files
|
|
|
|
|
|
|
|
def _hash_file(self, path):
|
|
|
|
"""
|
|
|
|
Compute and md5 hash for file
|
|
|
|
|
|
|
|
:returns: hexadecimal md5
|
|
|
|
"""
|
|
|
|
|
|
|
|
m = hashlib.md5()
|
|
|
|
with open(path, "rb") as f:
|
|
|
|
while True:
|
|
|
|
buf = f.read(128)
|
|
|
|
if not buf:
|
|
|
|
break
|
|
|
|
m.update(buf)
|
|
|
|
return m.hexdigest()
|
2016-03-30 12:43:31 +03:00
|
|
|
|
2016-04-12 11:10:33 +03:00
|
|
|
def export(self, include_images=False):
|
2016-03-30 12:43:31 +03:00
|
|
|
"""
|
|
|
|
Export the project as zip. It's a ZipStream object.
|
|
|
|
The file will be read chunk by chunk when you iterate on
|
|
|
|
the zip.
|
|
|
|
|
|
|
|
It will ignore some files like snapshots and
|
|
|
|
|
|
|
|
:returns: ZipStream object
|
|
|
|
"""
|
|
|
|
|
|
|
|
z = zipstream.ZipFile()
|
2016-05-14 03:48:10 +03:00
|
|
|
# topdown allows to modify the list of directory in order to ignore the directory
|
2016-03-30 12:43:31 +03:00
|
|
|
for root, dirs, files in os.walk(self._path, topdown=True):
|
2016-04-21 18:02:05 +03:00
|
|
|
# Remove snapshots and capture
|
2016-04-06 00:53:18 +03:00
|
|
|
if os.path.split(root)[-1:][0] == "project-files":
|
2016-04-21 18:27:49 +03:00
|
|
|
dirs[:] = [d for d in dirs if d not in ("snapshots", "tmp")]
|
2016-03-30 12:43:31 +03:00
|
|
|
|
2016-05-18 12:23:45 +03:00
|
|
|
# Ignore log files and OS noise
|
2016-03-30 12:43:31 +03:00
|
|
|
files = [f for f in files if not f.endswith('_log.txt') and not f.endswith('.log') and f != '.DS_Store']
|
|
|
|
|
|
|
|
for file in files:
|
|
|
|
path = os.path.join(root, file)
|
2016-05-27 15:45:02 +03:00
|
|
|
# Try open the file
|
|
|
|
try:
|
|
|
|
open(path).close()
|
|
|
|
except OSError as e:
|
|
|
|
msg = "Could not export file {}: {}".format(path, e)
|
|
|
|
log.warn(msg)
|
|
|
|
self.emit("log.warning", {"message": msg})
|
|
|
|
continue
|
2016-03-30 18:56:55 +03:00
|
|
|
# We rename the .gns3 project.gns3 to avoid the task to the client to guess the file name
|
|
|
|
if file.endswith(".gns3"):
|
2016-04-12 11:10:33 +03:00
|
|
|
self._export_project_file(path, z, include_images)
|
2016-03-30 18:56:55 +03:00
|
|
|
else:
|
2016-04-06 00:53:18 +03:00
|
|
|
# We merge the data from all server in the same project-files directory
|
2016-05-12 11:39:50 +03:00
|
|
|
node_directory = os.path.join(self._path, "servers", "vm")
|
|
|
|
if os.path.commonprefix([root, node_directory]) == node_directory:
|
|
|
|
z.write(path, os.path.relpath(path, node_directory))
|
2016-04-06 00:53:18 +03:00
|
|
|
else:
|
|
|
|
z.write(path, os.path.relpath(path, self._path))
|
2016-03-30 12:43:31 +03:00
|
|
|
return z
|
2016-04-05 19:32:48 +03:00
|
|
|
|
2016-04-12 11:10:33 +03:00
|
|
|
def _export_images(self, image, type, z):
|
|
|
|
"""
|
|
|
|
Take a project file (.gns3) and export images to the zip
|
|
|
|
|
|
|
|
:param image: Image path
|
|
|
|
:param type: Type of image
|
|
|
|
:param z: Zipfile instance for the export
|
|
|
|
"""
|
|
|
|
from . import MODULES
|
|
|
|
|
|
|
|
for module in MODULES:
|
|
|
|
try:
|
|
|
|
img_directory = module.instance().get_images_directory()
|
|
|
|
except NotImplementedError:
|
|
|
|
# Some modules don't have images
|
|
|
|
continue
|
|
|
|
|
|
|
|
directory = os.path.split(img_directory)[-1:][0]
|
|
|
|
|
|
|
|
if os.path.exists(image):
|
|
|
|
path = image
|
|
|
|
else:
|
|
|
|
path = os.path.join(img_directory, image)
|
|
|
|
|
|
|
|
if os.path.exists(path):
|
|
|
|
arcname = os.path.join("images", directory, os.path.basename(image))
|
|
|
|
z.write(path, arcname)
|
|
|
|
break
|
|
|
|
|
|
|
|
def _export_project_file(self, path, z, include_images):
|
2016-04-11 18:16:06 +03:00
|
|
|
"""
|
|
|
|
Take a project file (.gns3) and patch it for the export
|
|
|
|
|
2016-04-12 11:10:33 +03:00
|
|
|
:param path: Path of the .gns3
|
2016-04-11 18:16:06 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
with open(path) as f:
|
|
|
|
topology = json.load(f)
|
|
|
|
if "topology" in topology and "nodes" in topology["topology"]:
|
|
|
|
for node in topology["topology"]["nodes"]:
|
2016-04-12 11:10:33 +03:00
|
|
|
if "properties" in node and node["type"] != "DockerVM":
|
2016-04-11 18:16:06 +03:00
|
|
|
for prop, value in node["properties"].items():
|
|
|
|
if prop.endswith("image"):
|
|
|
|
node["properties"][prop] = os.path.basename(value)
|
2016-05-07 19:39:32 +03:00
|
|
|
if include_images is True:
|
2016-04-12 11:10:33 +03:00
|
|
|
self._export_images(value, node["type"], z)
|
|
|
|
z.writestr("project.gns3", json.dumps(topology).encode())
|
2016-04-11 18:16:06 +03:00
|
|
|
|
2016-04-06 00:53:18 +03:00
|
|
|
def import_zip(self, stream, gns3vm=True):
|
2016-04-05 19:32:48 +03:00
|
|
|
"""
|
|
|
|
Import a project contain in a zip file
|
|
|
|
|
2016-04-06 00:53:18 +03:00
|
|
|
:param stream: A io.BytesIO of the zipfile
|
2016-05-11 20:35:36 +03:00
|
|
|
:param gns3vm: True move Docker, IOU and Qemu to the GNS3 VM
|
2016-04-05 19:32:48 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
with zipfile.ZipFile(stream) as myzip:
|
|
|
|
myzip.extractall(self.path)
|
2016-04-06 00:53:18 +03:00
|
|
|
|
2016-04-05 19:32:48 +03:00
|
|
|
project_file = os.path.join(self.path, "project.gns3")
|
|
|
|
if os.path.exists(project_file):
|
|
|
|
with open(project_file) as f:
|
|
|
|
topology = json.load(f)
|
|
|
|
topology["project_id"] = self.id
|
|
|
|
topology["name"] = self.name
|
2016-04-06 00:53:18 +03:00
|
|
|
topology.setdefault("topology", {})
|
|
|
|
topology["topology"].setdefault("nodes", [])
|
|
|
|
topology["topology"]["servers"] = [
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"local": True,
|
|
|
|
"vm": False
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
# By default all node run on local server
|
|
|
|
for node in topology["topology"]["nodes"]:
|
|
|
|
node["server_id"] = 1
|
|
|
|
|
|
|
|
if gns3vm:
|
|
|
|
# Move to servers/vm directory the data that should be import on remote server
|
|
|
|
modules_to_vm = {
|
|
|
|
"qemu": "QemuVM",
|
|
|
|
"iou": "IOUDevice",
|
|
|
|
"docker": "DockerVM"
|
|
|
|
}
|
|
|
|
|
2016-05-12 11:39:50 +03:00
|
|
|
node_directory = os.path.join(self.path, "servers", "vm", "project-files")
|
2016-04-06 00:53:18 +03:00
|
|
|
vm_server_use = False
|
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
for module, vm_type in modules_to_vm.items():
|
2016-04-06 00:53:18 +03:00
|
|
|
module_directory = os.path.join(self.path, "project-files", module)
|
|
|
|
if os.path.exists(module_directory):
|
2016-05-12 11:39:50 +03:00
|
|
|
os.makedirs(node_directory, exist_ok=True)
|
|
|
|
shutil.move(module_directory, os.path.join(node_directory, module))
|
2016-04-06 00:53:18 +03:00
|
|
|
|
|
|
|
# Patch node to use the GNS3 VM
|
|
|
|
for node in topology["topology"]["nodes"]:
|
2016-05-11 20:35:36 +03:00
|
|
|
if node["type"] == vm_type:
|
2016-04-06 00:53:18 +03:00
|
|
|
node["server_id"] = 2
|
|
|
|
vm_server_use = True
|
|
|
|
|
|
|
|
# We use the GNS3 VM. We need to add the server to the list
|
|
|
|
if vm_server_use:
|
|
|
|
topology["topology"]["servers"].append({
|
|
|
|
"id": 2,
|
|
|
|
"vm": True,
|
|
|
|
"local": False
|
|
|
|
})
|
|
|
|
|
|
|
|
# Write the modified topology
|
2016-04-05 19:32:48 +03:00
|
|
|
with open(project_file, "w") as f:
|
|
|
|
json.dump(topology, f, indent=4)
|
|
|
|
|
2016-04-06 00:53:18 +03:00
|
|
|
# Rename to a human distinctive name
|
2016-04-05 19:32:48 +03:00
|
|
|
shutil.move(project_file, os.path.join(self.path, self.name + ".gns3"))
|
2016-04-12 11:10:33 +03:00
|
|
|
if os.path.exists(os.path.join(self.path, "images")):
|
|
|
|
self._import_images()
|
|
|
|
|
|
|
|
def _import_images(self):
|
|
|
|
"""
|
|
|
|
Copy images to the images directory or delete them if they
|
|
|
|
already exists.
|
|
|
|
"""
|
|
|
|
image_dir = self._config().get("images_path")
|
|
|
|
|
|
|
|
root = os.path.join(self.path, "images")
|
|
|
|
for (dirpath, dirnames, filenames) in os.walk(root):
|
|
|
|
for filename in filenames:
|
|
|
|
path = os.path.join(dirpath, filename)
|
|
|
|
dst = os.path.join(image_dir, os.path.relpath(path, root))
|
|
|
|
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
|
|
|
shutil.move(path, dst)
|
|
|
|
|
|
|
|
# Cleanup the project
|
2016-05-14 03:48:10 +03:00
|
|
|
shutil.rmtree(root, ignore_errors=True)
|