2016-07-21 19:15:35 +03:00
#!/usr/bin/env python
#
# Copyright (C) 2016 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
2018-08-25 14:10:40 +03:00
import sys
2016-07-21 19:15:35 +03:00
import json
2016-07-21 21:17:36 +03:00
import asyncio
2016-07-21 19:15:35 +03:00
import aiohttp
import zipfile
2016-07-21 21:17:36 +03:00
import tempfile
2016-07-21 19:15:35 +03:00
import zipstream
2018-08-25 14:10:40 +03:00
from datetime import datetime
2016-07-21 19:15:35 +03:00
2016-10-26 17:50:01 +03:00
import logging
log = logging . getLogger ( __name__ )
2019-02-22 13:04:49 +02:00
async def export_project ( project , temporary_dir , include_images = False , keep_compute_id = False , allow_all_nodes = False , reset_mac_addresses = False ) :
2016-07-21 19:15:35 +03:00
"""
2018-04-28 12:01:43 +03:00
Export a project to a zip file .
2016-07-21 19:15:35 +03:00
2018-04-28 12:01:43 +03:00
The file will be read chunk by chunk when you iterate over the zip stream .
Some files like snapshots and packet captures are ignored .
2016-07-21 19:15:35 +03:00
2016-07-21 21:17:36 +03:00
: param temporary_dir : A temporary dir where to store intermediate data
2018-04-28 12:01:43 +03:00
: param include images : save OS images to the zip file
: param keep_compute_id : If false replace all compute id by local ( standard behavior for . gns3project to make it portable )
: param allow_all_nodes : Allow all nodes type to be include in the zip even if not portable
2019-02-20 11:38:43 +02:00
: param reset_mac_addresses : Reset MAC addresses for every nodes .
2018-04-28 12:01:43 +03:00
2016-07-21 19:15:35 +03:00
: returns : ZipStream object
"""
2018-04-28 12:01:43 +03:00
# To avoid issue with data not saved we disallow the export of a running project
2016-07-21 19:15:35 +03:00
if project . is_running ( ) :
2018-04-28 12:01:43 +03:00
raise aiohttp . web . HTTPConflict ( text = " Project must be stopped in order to export it " )
2016-07-21 19:15:35 +03:00
2017-02-27 12:08:58 +02:00
# Make sure we save the project
project . dump ( )
2018-04-28 12:01:43 +03:00
zstream = zipstream . ZipFile ( allowZip64 = True )
2016-07-21 19:15:35 +03:00
2016-11-25 18:18:23 +02:00
if not os . path . exists ( project . _path ) :
2018-04-28 12:01:43 +03:00
raise aiohttp . web . HTTPNotFound ( text = " Project could not be found at ' {} ' " . format ( project . _path ) )
2016-11-25 18:18:23 +02:00
2016-07-21 19:15:35 +03:00
# First we process the .gns3 in order to be sure we don't have an error
for file in os . listdir ( project . _path ) :
if file . endswith ( " .gns3 " ) :
2019-02-22 13:04:49 +02:00
await _patch_project_file ( project , os . path . join ( project . _path , file ) , zstream , include_images , keep_compute_id , allow_all_nodes , temporary_dir , reset_mac_addresses )
2016-07-21 19:15:35 +03:00
2018-04-28 12:01:43 +03:00
# Export the local files
2019-02-19 07:43:44 +02:00
for root , dirs , files in os . walk ( project . _path , topdown = True , followlinks = False ) :
2018-04-28 12:01:43 +03:00
files = [ f for f in files if _is_exportable ( os . path . join ( root , f ) ) ]
2016-07-21 19:15:35 +03:00
for file in files :
path = os . path . join ( root , file )
2018-04-28 12:01:43 +03:00
# check if we can export the file
2016-07-21 19:15:35 +03:00
try :
open ( path ) . close ( )
except OSError as e :
msg = " Could not export file {} : {} " . format ( path , e )
2018-03-15 09:17:39 +02:00
log . warning ( msg )
2019-02-23 16:08:52 +02:00
project . emit_notification ( " log.warning " , { " message " : msg } )
2016-07-21 19:15:35 +03:00
continue
2018-04-28 12:01:43 +03:00
# ignore the .gns3 file
2016-07-25 15:47:37 +03:00
if file . endswith ( " .gns3 " ) :
2018-04-28 12:01:43 +03:00
continue
2018-08-25 14:10:40 +03:00
_patch_mtime ( path )
2018-04-28 12:01:43 +03:00
zstream . write ( path , os . path . relpath ( path , project . _path ) , compress_type = zipfile . ZIP_DEFLATED )
2016-07-21 21:17:36 +03:00
2018-04-28 12:01:43 +03:00
# Export files from remote computes
2017-11-13 23:12:39 +02:00
downloaded_files = set ( )
2016-07-21 21:17:36 +03:00
for compute in project . computes :
2016-07-22 12:43:14 +03:00
if compute . id != " local " :
2018-10-15 13:05:49 +03:00
compute_files = await compute . list_files ( project )
2016-07-21 21:17:36 +03:00
for compute_file in compute_files :
2018-04-28 12:01:43 +03:00
if _is_exportable ( compute_file [ " path " ] ) :
2016-07-22 12:43:14 +03:00
( fd , temp_path ) = tempfile . mkstemp ( dir = temporary_dir )
f = open ( fd , " wb " , closefd = True )
2018-10-15 13:05:49 +03:00
response = await compute . download_file ( project , compute_file [ " path " ] )
2016-07-21 21:17:36 +03:00
while True :
2018-04-28 13:42:02 +03:00
try :
2018-10-15 13:05:49 +03:00
data = await response . content . read ( 1024 )
2018-04-28 13:42:02 +03:00
except asyncio . TimeoutError :
2018-11-27 13:14:51 +02:00
raise aiohttp . web . HTTPRequestTimeout ( text = " Timeout when downloading file ' {} ' from remote compute {} : {} " . format ( compute_file [ " path " ] , compute . host , compute . port ) )
2016-07-21 21:17:36 +03:00
if not data :
break
2016-07-22 12:43:14 +03:00
f . write ( data )
2016-09-19 17:51:15 +03:00
response . close ( )
2016-07-22 12:43:14 +03:00
f . close ( )
2018-08-25 14:10:40 +03:00
_patch_mtime ( temp_path )
2018-04-28 12:01:43 +03:00
zstream . write ( temp_path , arcname = compute_file [ " path " ] , compress_type = zipfile . ZIP_DEFLATED )
2017-11-13 23:12:39 +02:00
downloaded_files . add ( compute_file [ ' path ' ] )
2018-04-28 12:01:43 +03:00
return zstream
2016-07-21 19:15:35 +03:00
2018-08-25 14:10:40 +03:00
def _patch_mtime ( path ) :
"""
Patch the file mtime because ZIP does not support timestamps before 1980
: param path : file path
"""
if sys . platform . startswith ( " win " ) :
# only UNIX type platforms
return
st = os . stat ( path )
file_date = datetime . fromtimestamp ( st . st_mtime )
if file_date . year < 1980 :
new_mtime = file_date . replace ( year = 1980 ) . timestamp ( )
os . utime ( path , ( st . st_atime , new_mtime ) )
2019-02-19 07:43:44 +02:00
2018-04-28 12:01:43 +03:00
def _is_exportable ( path ) :
2016-07-21 21:17:36 +03:00
"""
: returns : True if file should not be included in the final archive
"""
2016-07-26 11:32:43 +03:00
2018-04-28 12:01:43 +03:00
# do not export snapshots
2016-07-26 11:32:43 +03:00
if path . endswith ( " snapshots " ) :
2018-04-28 12:01:43 +03:00
return False
2016-07-26 11:32:43 +03:00
2019-02-19 07:43:44 +02:00
# do not export symlinks
if os . path . islink ( path ) :
return False
2018-04-28 12:01:43 +03:00
# do not export directories of snapshots
2018-02-28 17:33:20 +02:00
if " {sep} snapshots {sep} " . format ( sep = os . path . sep ) in path :
2018-04-28 12:01:43 +03:00
return False
2018-02-28 17:33:20 +02:00
2016-07-21 21:17:36 +03:00
try :
2018-04-28 12:01:43 +03:00
# do not export captures and other temporary directory
s = os . path . normpath ( path ) . split ( os . path . sep )
2016-07-21 21:17:36 +03:00
i = s . index ( " project-files " )
if s [ i + 1 ] in ( " tmp " , " captures " , " snapshots " ) :
2018-04-28 12:01:43 +03:00
return False
2016-07-21 21:17:36 +03:00
except ( ValueError , IndexError ) :
pass
2018-04-28 12:01:43 +03:00
# do not export log files and OS noise
filename = os . path . basename ( path )
if filename . endswith ( ' _log.txt ' ) or filename . endswith ( ' .log ' ) or filename == ' .DS_Store ' :
return False
return True
2016-07-21 21:17:36 +03:00
2019-02-22 13:04:49 +02:00
async def _patch_project_file ( project , path , zstream , include_images , keep_compute_id , allow_all_nodes , temporary_dir , reset_mac_addresses ) :
2016-07-21 19:15:35 +03:00
"""
2018-04-28 12:01:43 +03:00
Patch a project file ( . gns3 ) to export a project .
The . gns3 file is renamed to project . gns3
2016-07-21 19:15:35 +03:00
2018-04-28 12:01:43 +03:00
: param path : path of the . gns3 file
2016-07-21 19:15:35 +03:00
"""
2018-04-28 12:01:43 +03:00
# image files that we need to include in the exported archive
2017-11-13 23:12:39 +02:00
images = [ ]
2016-07-21 19:15:35 +03:00
2018-04-28 12:01:43 +03:00
try :
with open ( path ) as f :
topology = json . load ( f )
except ( OSError , ValueError ) as e :
raise aiohttp . web . HTTPConflict ( text = " Project file ' {} ' cannot be read: {} " . format ( path , e ) )
2016-07-21 19:15:35 +03:00
2016-07-22 14:39:57 +03:00
if " topology " in topology :
2016-07-25 15:47:37 +03:00
if " nodes " in topology [ " topology " ] :
for node in topology [ " topology " ] [ " nodes " ] :
2017-11-13 23:12:39 +02:00
compute_id = node . get ( ' compute_id ' , ' local ' )
2017-02-14 17:41:31 +02:00
if node [ " node_type " ] == " virtualbox " and node . get ( " properties " , { } ) . get ( " linked_clone " ) :
2018-04-28 12:01:43 +03:00
raise aiohttp . web . HTTPConflict ( text = " Projects with a linked {} clone node cannot not be exported. Please use Qemu instead. " . format ( node [ " node_type " ] ) )
2019-02-23 06:07:01 +02:00
if not allow_all_nodes and node [ " node_type " ] in [ " virtualbox " , " vmware " ] :
2018-04-28 12:01:43 +03:00
raise aiohttp . web . HTTPConflict ( text = " Projects with a {} node cannot be exported " . format ( node [ " node_type " ] ) )
2016-07-25 15:47:37 +03:00
if not keep_compute_id :
node [ " compute_id " ] = " local " # To make project portable all node by default run on local
2016-12-14 19:57:59 +02:00
if " properties " in node and node [ " node_type " ] != " docker " :
2016-07-25 15:47:37 +03:00
for prop , value in node [ " properties " ] . items ( ) :
2017-11-13 23:12:39 +02:00
2019-02-20 11:38:43 +02:00
# reset the MAC address
if reset_mac_addresses and prop in ( " mac_addr " , " mac_address " ) :
node [ " properties " ] [ prop ] = None
2018-03-12 08:38:50 +02:00
if node [ " node_type " ] == " iou " :
if not prop == " path " :
continue
elif not prop . endswith ( " image " ) :
continue
2017-11-13 23:12:39 +02:00
if value is None or value . strip ( ) == ' ' :
continue
if not keep_compute_id : # If we keep the original compute we can keep the image path
node [ " properties " ] [ prop ] = os . path . basename ( value )
if include_images is True :
images . append ( {
' compute_id ' : compute_id ,
' image ' : value ,
' image_type ' : node [ ' node_type ' ]
} )
2016-07-25 15:47:37 +03:00
if not keep_compute_id :
2017-11-13 23:12:39 +02:00
topology [ " topology " ] [ " computes " ] = [ ] # Strip compute information because could contain secret info like password
local_images = set ( [ i [ ' image ' ] for i in images if i [ ' compute_id ' ] == ' local ' ] )
for image in local_images :
2018-04-28 12:01:43 +03:00
_export_local_image ( image , zstream )
2017-11-13 23:12:39 +02:00
remote_images = set ( [
( i [ ' compute_id ' ] , i [ ' image_type ' ] , i [ ' image ' ] )
for i in images if i [ ' compute_id ' ] != ' local ' ] )
2016-07-22 14:39:57 +03:00
2017-11-13 23:12:39 +02:00
for compute_id , image_type , image in remote_images :
2018-10-15 13:05:49 +03:00
await _export_remote_images ( project , compute_id , image_type , image , zstream , temporary_dir )
2016-07-21 19:15:35 +03:00
2018-04-28 12:38:52 +03:00
zstream . writestr ( " project.gns3 " , json . dumps ( topology ) . encode ( ) )
2017-11-13 23:12:39 +02:00
return images
2016-07-21 19:15:35 +03:00
2018-04-28 12:01:43 +03:00
def _export_local_image ( image , zstream ) :
2016-07-21 19:15:35 +03:00
"""
2018-04-28 12:01:43 +03:00
Exports a local image to the zip file .
2016-07-21 19:15:35 +03:00
2018-04-28 12:01:43 +03:00
: param image : image path
: param zstream : Zipfile instance for the export
2016-07-21 19:15:35 +03:00
"""
2018-04-28 12:01:43 +03:00
from . . compute import MODULES
2016-07-21 19:15:35 +03:00
for module in MODULES :
try :
2018-04-28 12:01:43 +03:00
images_directory = module . instance ( ) . get_images_directory ( )
2016-07-21 19:15:35 +03:00
except NotImplementedError :
# Some modules don't have images
continue
2018-04-28 12:01:43 +03:00
directory = os . path . split ( images_directory ) [ - 1 : ] [ 0 ]
2016-07-21 19:15:35 +03:00
if os . path . exists ( image ) :
path = image
else :
2018-04-28 12:01:43 +03:00
path = os . path . join ( images_directory , image )
2016-07-21 19:15:35 +03:00
if os . path . exists ( path ) :
arcname = os . path . join ( " images " , directory , os . path . basename ( image ) )
2018-08-25 14:10:40 +03:00
_patch_mtime ( path )
2018-04-28 12:01:43 +03:00
zstream . write ( path , arcname )
2017-05-11 18:59:57 +03:00
return
2017-11-13 23:12:39 +02:00
2018-10-15 13:05:49 +03:00
async def _export_remote_images ( project , compute_id , image_type , image , project_zipfile , temporary_dir ) :
2017-11-13 23:12:39 +02:00
"""
2018-04-28 12:01:43 +03:00
Export specific image from remote compute .
2017-11-13 23:12:39 +02:00
"""
2018-11-27 13:14:51 +02:00
log . info ( " Downloading image ' {} ' from compute ' {} ' " . format ( image , compute_id ) )
2017-11-13 23:12:39 +02:00
try :
compute = [ compute for compute in project . computes if compute . id == compute_id ] [ 0 ]
except IndexError :
2018-04-28 12:01:43 +03:00
raise aiohttp . web . HTTPConflict ( text = " Cannot export image from ' {} ' compute. Compute doesn ' t exist. " . format ( compute_id ) )
2017-11-13 23:12:39 +02:00
( fd , temp_path ) = tempfile . mkstemp ( dir = temporary_dir )
f = open ( fd , " wb " , closefd = True )
2018-10-15 13:05:49 +03:00
response = await compute . download_image ( image_type , image )
2017-11-13 23:12:39 +02:00
if response . status != 200 :
2018-04-28 12:01:43 +03:00
raise aiohttp . web . HTTPConflict ( text = " Cannot export image from ' {} ' compute. Compute returned status code {} . " . format ( compute_id , response . status ) )
2017-11-13 23:12:39 +02:00
while True :
2018-04-28 13:42:02 +03:00
try :
2018-10-15 13:05:49 +03:00
data = await response . content . read ( 1024 )
2018-04-28 13:42:02 +03:00
except asyncio . TimeoutError :
2018-11-27 13:14:51 +02:00
raise aiohttp . web . HTTPRequestTimeout ( text = " Timeout when downloading image ' {} ' from remote compute {} : {} " . format ( image , compute . host , compute . port ) )
2017-11-13 23:12:39 +02:00
if not data :
break
f . write ( data )
response . close ( )
f . close ( )
arcname = os . path . join ( " images " , image_type , image )
log . info ( " Saved {} " . format ( arcname ) )
project_zipfile . write ( temp_path , arcname = arcname , compress_type = zipfile . ZIP_DEFLATED )