Merge pull request #2393 from GNS3/feature/keep-compute-ids

Option to keep the compute IDs unchanged when exporting a project
This commit is contained in:
Jeremy Grossmann 2024-07-06 17:12:38 +02:00 committed by GitHub
commit b48bd92da3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 35 additions and 23 deletions

View File

@ -32,7 +32,7 @@ log = logging.getLogger(__name__)
CHUNK_SIZE = 1024 * 8 # 8KB
async def export_project(zstream, project, temporary_dir, include_images=False, include_snapshots=False, keep_compute_id=False, allow_all_nodes=False, reset_mac_addresses=False):
async def export_project(zstream, project, temporary_dir, include_images=False, include_snapshots=False, keep_compute_ids=False, allow_all_nodes=False, reset_mac_addresses=False):
"""
Export a project to a zip file.
@ -44,9 +44,9 @@ async def export_project(zstream, project, temporary_dir, include_images=False,
:param temporary_dir: A temporary dir where to store intermediate data
:param include_images: save OS images to the zip file
:param include_snapshots: save snapshots to the zip file
:param keep_compute_id: If false replace all compute id by local (standard behavior for .gns3project to make it portable)
:param allow_all_nodes: Allow all nodes type to be include in the zip even if not portable
:param reset_mac_addresses: Reset MAC addresses for every nodes.
:param keep_compute_ids: If false replace all compute IDs y local (standard behavior for .gns3project to make it portable)
:param allow_all_nodes: Allow all nodes type to be included in the zip even if not portable
:param reset_mac_addresses: Reset MAC addresses for each node.
"""
# To avoid issue with data not saved we disallow the export of a running project
@ -62,7 +62,7 @@ async def export_project(zstream, project, temporary_dir, include_images=False,
# First we process the .gns3 in order to be sure we don't have an error
for file in os.listdir(project._path):
if file.endswith(".gns3"):
await _patch_project_file(project, os.path.join(project._path, file), zstream, include_images, keep_compute_id, allow_all_nodes, temporary_dir, reset_mac_addresses)
await _patch_project_file(project, os.path.join(project._path, file), zstream, include_images, keep_compute_ids, allow_all_nodes, temporary_dir, reset_mac_addresses)
# Export the local files
for root, dirs, files in os.walk(project._path, topdown=True, followlinks=False):
@ -170,7 +170,7 @@ def _is_exportable(path, include_snapshots=False):
return True
async def _patch_project_file(project, path, zstream, include_images, keep_compute_id, allow_all_nodes, temporary_dir, reset_mac_addresses):
async def _patch_project_file(project, path, zstream, include_images, keep_compute_ids, allow_all_nodes, temporary_dir, reset_mac_addresses):
"""
Patch a project file (.gns3) to export a project.
The .gns3 file is renamed to project.gns3
@ -197,7 +197,7 @@ async def _patch_project_file(project, path, zstream, include_images, keep_compu
if not allow_all_nodes and node["node_type"] in ["virtualbox", "vmware"]:
raise aiohttp.web.HTTPConflict(text="Projects with a {} node cannot be exported".format(node["node_type"]))
if not keep_compute_id:
if not keep_compute_ids:
node["compute_id"] = "local" # To make project portable all node by default run on local
if "properties" in node and node["node_type"] != "docker":
@ -215,7 +215,7 @@ async def _patch_project_file(project, path, zstream, include_images, keep_compu
if value is None or value.strip() == '':
continue
if not keep_compute_id: # If we keep the original compute we can keep the image path
if not keep_compute_ids: # If we keep the original compute we can keep the image path
node["properties"][prop] = os.path.basename(value)
if include_images is True:
@ -225,7 +225,7 @@ async def _patch_project_file(project, path, zstream, include_images, keep_compu
'image_type': node['node_type']
})
if not keep_compute_id:
if not keep_compute_ids:
topology["topology"]["computes"] = [] # Strip compute information because could contain secret info like password
local_images = set([i['image'] for i in images if i['compute_id'] == 'local'])

View File

@ -38,7 +38,7 @@ Handle the import of project from a .gns3project
"""
async def import_project(controller, project_id, stream, location=None, name=None, keep_compute_id=False,
async def import_project(controller, project_id, stream, location=None, name=None, keep_compute_ids=False,
auto_start=False, auto_open=False, auto_close=True):
"""
Import a project contain in a zip file
@ -50,7 +50,7 @@ async def import_project(controller, project_id, stream, location=None, name=Non
:param stream: A io.BytesIO of the zipfile
:param location: Directory for the project if None put in the default directory
:param name: Wanted project name, generate one from the .gns3 if None
:param keep_compute_id: If true do not touch the compute id
:param keep_compute_ids: keep compute IDs unchanged
:returns: Project
"""
@ -124,7 +124,7 @@ async def import_project(controller, project_id, stream, location=None, name=Non
drawing["drawing_id"] = str(uuid.uuid4())
# Modify the compute id of the node depending of compute capacity
if not keep_compute_id:
if not keep_compute_ids:
# For some VM type we move them to the GNS3 VM if possible
# unless it's a linux host without GNS3 VM
if not sys.platform.startswith("linux") or controller.has_compute("vm"):

View File

@ -1066,7 +1066,7 @@ class Project:
with tempfile.TemporaryDirectory(dir=working_dir) as tmpdir:
# Do not compress the exported project when duplicating
with aiozipstream.ZipFile(compression=zipfile.ZIP_STORED) as zstream:
await export_project(zstream, self, tmpdir, keep_compute_id=True, allow_all_nodes=True, reset_mac_addresses=reset_mac_addresses)
await export_project(zstream, self, tmpdir, keep_compute_ids=True, allow_all_nodes=True, reset_mac_addresses=reset_mac_addresses)
# export the project to a temporary location
project_path = os.path.join(tmpdir, "project.gns3p")
@ -1077,7 +1077,7 @@ class Project:
# import the temporary project
with open(project_path, "rb") as f:
project = await import_project(self._controller, str(uuid.uuid4()), f, location=location, name=name, keep_compute_id=True)
project = await import_project(self._controller, str(uuid.uuid4()), f, location=location, name=name, keep_compute_ids=True)
log.info("Project '{}' duplicated in {:.4f} seconds".format(project.name, time.time() - begin))
except (ValueError, OSError, UnicodeEncodeError) as e:

View File

@ -96,7 +96,7 @@ class Snapshot:
with tempfile.TemporaryDirectory(dir=snapshot_directory) as tmpdir:
# Do not compress the snapshots
with aiozipstream.ZipFile(compression=zipfile.ZIP_STORED) as zstream:
await export_project(zstream, self._project, tmpdir, keep_compute_id=True, allow_all_nodes=True)
await export_project(zstream, self._project, tmpdir, keep_compute_ids=True, allow_all_nodes=True)
async with aiofiles.open(self.path, 'wb') as f:
async for chunk in zstream:
await f.write(chunk)

View File

@ -319,6 +319,10 @@ class ProjectHandler:
reset_mac_addresses = True
else:
reset_mac_addresses = False
if request.query.get("keep_compute_ids", "no").lower() == "yes":
keep_compute_ids = True
else:
keep_compute_ids = False
compression_query = request.query.get("compression", "zip").lower()
if compression_query == "zip":
@ -336,9 +340,17 @@ class ProjectHandler:
working_dir = os.path.abspath(os.path.join(project.path, os.pardir))
with tempfile.TemporaryDirectory(dir=working_dir) as tmpdir:
with aiozipstream.ZipFile(compression=compression) as zstream:
await export_project(zstream, project, tmpdir, include_snapshots=include_snapshots, include_images=include_images, reset_mac_addresses=reset_mac_addresses)
await export_project(
zstream,
project,
tmpdir,
include_snapshots=include_snapshots,
include_images=include_images,
reset_mac_addresses=reset_mac_addresses,
keep_compute_ids=keep_compute_ids
)
# We need to do that now because export could failed and raise an HTTP error
# We need to do that now because export could fail and raise an HTTP error
# that why response start need to be the later possible
response.content_type = 'application/gns3project'
response.headers['CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format(project.name)
@ -350,7 +362,7 @@ class ProjectHandler:
log.info("Project '{}' exported in {:.4f} seconds".format(project.name, time.time() - begin))
# Will be raise if you have no space left or permission issue on your temporary directory
# Will be raised if you have no space left or permission issue on your temporary directory
# RuntimeError: something was wrong during the zip process
except (ValueError, OSError, RuntimeError) as e:
raise aiohttp.web.HTTPNotFound(text="Cannot export project: {}".format(str(e)))

View File

@ -325,7 +325,7 @@ async def test_export_with_images(tmpdir, project):
myzip.getinfo("images/IOS/test.image")
async def test_export_keep_compute_id(tmpdir, project):
async def test_export_keep_compute_ids(tmpdir, project):
"""
If we want to restore the same computes we could ask to keep them
in the file
@ -354,7 +354,7 @@ async def test_export_keep_compute_id(tmpdir, project):
json.dump(data, f)
with aiozipstream.ZipFile() as z:
await export_project(z, project, str(tmpdir), keep_compute_id=True)
await export_project(z, project, str(tmpdir), keep_compute_ids=True)
await write_file(str(tmpdir / 'zipfile.zip'), z)
with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip:
@ -458,7 +458,7 @@ async def test_export_with_ignoring_snapshots(tmpdir, project):
Path(os.path.join(snapshots_dir, 'snap.gns3project')).touch()
with aiozipstream.ZipFile() as z:
await export_project(z, project, str(tmpdir), keep_compute_id=True)
await export_project(z, project, str(tmpdir), keep_compute_ids=True)
await write_file(str(tmpdir / 'zipfile.zip'), z)
with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip:

View File

@ -449,7 +449,7 @@ async def test_import_node_id(linux_platform, tmpdir, controller):
assert os.path.exists(os.path.join(project.path, "project-files", "iou", topo["topology"]["nodes"][0]["node_id"], "startup.cfg"))
async def test_import_keep_compute_id(windows_platform, tmpdir, controller):
async def test_import_keep_compute_ids(windows_platform, tmpdir, controller):
"""
On linux host IOU should be moved to the GNS3 VM
"""
@ -487,7 +487,7 @@ async def test_import_keep_compute_id(windows_platform, tmpdir, controller):
myzip.write(str(tmpdir / "project.gns3"), "project.gns3")
with open(zip_path, "rb") as f:
project = await import_project(controller, project_id, f, keep_compute_id=True)
project = await import_project(controller, project_id, f, keep_compute_ids=True)
with open(os.path.join(project.path, "test.gns3")) as f:
topo = json.load(f)