2015-06-17 18:11:25 +03:00
|
|
|
#
|
|
|
|
# Copyright (C) 2014 GNS3 Technologies Inc.
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
import os
|
|
|
|
import hashlib
|
2021-06-06 10:22:47 +03:00
|
|
|
import stat
|
|
|
|
import aiofiles
|
|
|
|
import shutil
|
2015-06-17 18:11:25 +03:00
|
|
|
|
2022-04-17 12:58:20 +03:00
|
|
|
from typing import List, AsyncGenerator
|
2016-06-07 20:38:01 +03:00
|
|
|
from ..config import Config
|
|
|
|
from . import force_unix_path
|
|
|
|
|
2021-06-06 10:22:47 +03:00
|
|
|
import gns3server.db.models as models
|
|
|
|
from gns3server.db.repositories.images import ImagesRepository
|
2022-04-17 12:58:20 +03:00
|
|
|
from gns3server.utils.asyncio import wait_run_in_executor
|
2016-06-07 20:38:01 +03:00
|
|
|
|
2015-07-16 19:56:36 +03:00
|
|
|
import logging
|
2021-04-13 12:16:50 +03:00
|
|
|
|
2015-07-16 19:56:36 +03:00
|
|
|
log = logging.getLogger(__name__)
|
2015-06-17 18:11:25 +03:00
|
|
|
|
2015-11-09 13:28:00 +02:00
|
|
|
|
2022-04-17 12:58:20 +03:00
|
|
|
async def list_images(image_type):
|
2016-06-08 15:14:03 +03:00
|
|
|
"""
|
2021-06-06 10:22:47 +03:00
|
|
|
Scan directories for available image for a given type.
|
2016-06-08 15:14:03 +03:00
|
|
|
|
2021-06-06 10:22:47 +03:00
|
|
|
:param image_type: image type (dynamips, qemu, iou)
|
2016-06-08 15:14:03 +03:00
|
|
|
"""
|
|
|
|
files = set()
|
2016-11-28 20:49:50 +02:00
|
|
|
images = []
|
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
server_config = Config.instance().settings.Server
|
|
|
|
general_images_directory = os.path.expanduser(server_config.images_path)
|
2016-11-28 20:49:50 +02:00
|
|
|
|
|
|
|
# Subfolder of the general_images_directory specific to this VM type
|
2021-06-06 10:22:47 +03:00
|
|
|
default_directory = default_images_directory(image_type)
|
2016-11-28 20:49:50 +02:00
|
|
|
|
2021-06-06 10:22:47 +03:00
|
|
|
for directory in images_directories(image_type):
|
2016-11-28 20:49:50 +02:00
|
|
|
|
|
|
|
# We limit recursion to path outside the default images directory
|
|
|
|
# the reason is in the default directory manage file organization and
|
|
|
|
# it should be flatten to keep things simple
|
|
|
|
recurse = True
|
|
|
|
if os.path.commonprefix([directory, general_images_directory]) == general_images_directory:
|
|
|
|
recurse = False
|
|
|
|
|
2016-09-28 11:27:30 +03:00
|
|
|
directory = os.path.normpath(directory)
|
2016-11-28 20:49:50 +02:00
|
|
|
for root, _, filenames in _os_walk(directory, recurse=recurse):
|
|
|
|
for filename in filenames:
|
|
|
|
if filename not in files:
|
|
|
|
if filename.endswith(".md5sum") or filename.startswith("."):
|
2016-08-18 20:26:17 +03:00
|
|
|
continue
|
2021-04-13 12:16:50 +03:00
|
|
|
elif (
|
2021-06-06 10:22:47 +03:00
|
|
|
((filename.endswith(".image") or filename.endswith(".bin")) and image_type == "dynamips")
|
|
|
|
or ((filename.endswith(".bin") or filename.startswith("i86bi")) and image_type == "iou")
|
|
|
|
or (not filename.endswith(".bin") and not filename.endswith(".image") and image_type == "qemu")
|
2021-04-13 12:16:50 +03:00
|
|
|
):
|
2016-11-28 20:49:50 +02:00
|
|
|
files.add(filename)
|
|
|
|
|
|
|
|
# It the image is located in the standard directory the path is relative
|
|
|
|
if os.path.commonprefix([root, default_directory]) != default_directory:
|
|
|
|
path = os.path.join(root, filename)
|
|
|
|
else:
|
|
|
|
path = os.path.relpath(os.path.join(root, filename), default_directory)
|
|
|
|
|
|
|
|
try:
|
2021-06-06 10:22:47 +03:00
|
|
|
if image_type in ["dynamips", "iou"]:
|
2016-11-28 20:49:50 +02:00
|
|
|
with open(os.path.join(root, filename), "rb") as f:
|
|
|
|
# read the first 7 bytes of the file.
|
|
|
|
elf_header_start = f.read(7)
|
|
|
|
# valid IOS images must start with the ELF magic number, be 32-bit, big endian and have an ELF version of 1
|
2021-04-13 12:16:50 +03:00
|
|
|
if (
|
|
|
|
not elf_header_start == b"\x7fELF\x01\x02\x01"
|
|
|
|
and not elf_header_start == b"\x7fELF\x01\x01\x01"
|
|
|
|
):
|
2016-11-28 20:49:50 +02:00
|
|
|
continue
|
|
|
|
|
2021-04-13 12:16:50 +03:00
|
|
|
images.append(
|
|
|
|
{
|
|
|
|
"filename": filename,
|
|
|
|
"path": force_unix_path(path),
|
2022-04-17 12:58:20 +03:00
|
|
|
"md5sum": await wait_run_in_executor(md5sum, os.path.join(root, filename)),
|
2021-04-13 12:16:50 +03:00
|
|
|
"filesize": os.stat(os.path.join(root, filename)).st_size,
|
|
|
|
}
|
|
|
|
)
|
2016-11-28 20:49:50 +02:00
|
|
|
except OSError as e:
|
2021-04-13 12:07:58 +03:00
|
|
|
log.warning(f"Can't add image {path}: {str(e)}")
|
2016-11-28 20:49:50 +02:00
|
|
|
return images
|
|
|
|
|
|
|
|
|
2022-04-17 12:58:20 +03:00
|
|
|
async def read_image_info(path: str, expected_image_type: str = None) -> dict:
|
|
|
|
|
|
|
|
header_magic_len = 7
|
|
|
|
try:
|
|
|
|
async with aiofiles.open(path, "rb") as f:
|
|
|
|
image_header = await f.read(header_magic_len) # read the first 7 bytes of the file
|
|
|
|
if len(image_header) >= header_magic_len:
|
|
|
|
detected_image_type = check_valid_image_header(image_header)
|
|
|
|
if expected_image_type and detected_image_type != expected_image_type:
|
|
|
|
raise InvalidImageError(f"Detected image type for '{path}' is {detected_image_type}, "
|
|
|
|
f"expected type is {expected_image_type}")
|
|
|
|
else:
|
|
|
|
raise InvalidImageError(f"Image '{path}' is too small to be valid")
|
|
|
|
except OSError as e:
|
|
|
|
raise InvalidImageError(f"Cannot read image '{path}': {e}")
|
|
|
|
|
|
|
|
image_info = {
|
|
|
|
"image_name": os.path.basename(path),
|
|
|
|
"image_type": detected_image_type,
|
|
|
|
"image_size": os.stat(path).st_size,
|
|
|
|
"path": path,
|
2022-04-18 13:13:52 +03:00
|
|
|
"checksum": await wait_run_in_executor(md5sum, path, cache_to_md5file=False),
|
2022-04-17 12:58:20 +03:00
|
|
|
"checksum_algorithm": "md5",
|
|
|
|
}
|
|
|
|
return image_info
|
|
|
|
|
|
|
|
|
|
|
|
async def discover_images(image_type: str, skip_image_paths: list = None) -> List[dict]:
|
|
|
|
"""
|
|
|
|
Scan directories for available images
|
|
|
|
"""
|
|
|
|
|
|
|
|
files = set()
|
|
|
|
images = []
|
|
|
|
|
|
|
|
for directory in images_directories(image_type):
|
|
|
|
for root, _, filenames in os.walk(os.path.normpath(directory)):
|
|
|
|
for filename in filenames:
|
2022-06-03 11:35:33 +03:00
|
|
|
if filename.endswith(".tmp") or filename.endswith(".md5sum") or filename.startswith("."):
|
2022-04-17 12:58:20 +03:00
|
|
|
continue
|
|
|
|
path = os.path.join(root, filename)
|
|
|
|
if not os.path.isfile(path) or skip_image_paths and path in skip_image_paths or path in files:
|
|
|
|
continue
|
2022-07-22 13:39:52 +03:00
|
|
|
if "/lib/" in path or "/lib64/" in path:
|
|
|
|
# ignore custom IOU libraries
|
|
|
|
continue
|
2022-04-17 12:58:20 +03:00
|
|
|
files.add(path)
|
|
|
|
|
|
|
|
try:
|
|
|
|
images.append(await read_image_info(path, image_type))
|
|
|
|
except InvalidImageError as e:
|
2022-04-18 13:13:52 +03:00
|
|
|
log.debug(str(e))
|
2022-04-17 12:58:20 +03:00
|
|
|
continue
|
|
|
|
return images
|
|
|
|
|
|
|
|
|
2016-11-28 20:49:50 +02:00
|
|
|
def _os_walk(directory, recurse=True, **kwargs):
|
|
|
|
"""
|
|
|
|
Work like os.walk but if recurse is False just list current directory
|
|
|
|
"""
|
|
|
|
if recurse:
|
|
|
|
for root, dirs, files in os.walk(directory, **kwargs):
|
|
|
|
yield root, dirs, files
|
|
|
|
else:
|
|
|
|
files = []
|
|
|
|
for filename in os.listdir(directory):
|
|
|
|
if os.path.isfile(os.path.join(directory, filename)):
|
|
|
|
files.append(filename)
|
|
|
|
yield directory, [], files
|
|
|
|
|
|
|
|
|
2021-06-06 10:22:47 +03:00
|
|
|
def default_images_directory(image_type):
|
2016-11-28 20:49:50 +02:00
|
|
|
"""
|
2021-06-06 10:22:47 +03:00
|
|
|
:returns: Return the default directory for an image type.
|
2016-11-28 20:49:50 +02:00
|
|
|
"""
|
2021-06-06 10:22:47 +03:00
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
server_config = Config.instance().settings.Server
|
|
|
|
img_dir = os.path.expanduser(server_config.images_path)
|
2021-06-06 10:22:47 +03:00
|
|
|
if image_type == "qemu":
|
2016-11-28 20:49:50 +02:00
|
|
|
return os.path.join(img_dir, "QEMU")
|
2021-06-06 10:22:47 +03:00
|
|
|
elif image_type == "iou":
|
2016-11-28 20:49:50 +02:00
|
|
|
return os.path.join(img_dir, "IOU")
|
2021-06-06 10:22:47 +03:00
|
|
|
elif image_type == "dynamips" or image_type == "ios":
|
2016-11-28 20:49:50 +02:00
|
|
|
return os.path.join(img_dir, "IOS")
|
|
|
|
else:
|
2021-06-06 10:22:47 +03:00
|
|
|
raise NotImplementedError(f"%s node type is not supported", image_type)
|
2016-06-08 15:14:03 +03:00
|
|
|
|
|
|
|
|
2022-04-17 12:58:20 +03:00
|
|
|
def images_directories(image_type):
|
2016-06-07 20:38:01 +03:00
|
|
|
"""
|
2019-07-10 12:16:50 +03:00
|
|
|
Return all directories where we will look for images
|
2016-06-07 20:38:01 +03:00
|
|
|
by priority
|
|
|
|
|
2022-04-17 12:58:20 +03:00
|
|
|
:param image_type: Type of emulator
|
2016-06-07 20:38:01 +03:00
|
|
|
"""
|
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
server_config = Config.instance().settings.Server
|
2016-06-07 20:38:01 +03:00
|
|
|
paths = []
|
2021-04-12 10:32:23 +03:00
|
|
|
img_dir = os.path.expanduser(server_config.images_path)
|
2022-04-17 12:58:20 +03:00
|
|
|
type_img_directory = default_images_directory(image_type)
|
2017-01-27 11:56:48 +02:00
|
|
|
try:
|
|
|
|
os.makedirs(type_img_directory, exist_ok=True)
|
|
|
|
paths.append(type_img_directory)
|
|
|
|
except (OSError, PermissionError):
|
|
|
|
pass
|
2021-04-12 10:32:23 +03:00
|
|
|
for directory in server_config.additional_images_paths:
|
2016-06-07 20:38:01 +03:00
|
|
|
paths.append(directory)
|
|
|
|
# Compatibility with old topologies we look in parent directory
|
|
|
|
paths.append(img_dir)
|
2019-03-18 10:33:37 +02:00
|
|
|
# Return only the existing paths
|
2016-06-07 20:38:01 +03:00
|
|
|
return [force_unix_path(p) for p in paths if os.path.exists(p)]
|
|
|
|
|
|
|
|
|
2022-04-18 13:13:52 +03:00
|
|
|
def md5sum(path, working_dir=None, stopped_event=None, cache_to_md5file=True):
|
2015-06-17 18:11:25 +03:00
|
|
|
"""
|
|
|
|
Return the md5sum of an image and cache it on disk
|
|
|
|
|
|
|
|
:param path: Path to the image
|
2022-04-14 13:01:54 +03:00
|
|
|
:param workdir_dir: where to store .md5sum files
|
2018-01-29 11:18:13 +02:00
|
|
|
:param stopped_event: In case you execute this function on thread and would like to have possibility
|
|
|
|
to cancel operation pass the `threading.Event`
|
2015-06-17 18:11:25 +03:00
|
|
|
:returns: Digest of the image
|
|
|
|
"""
|
|
|
|
|
2015-06-19 17:36:25 +03:00
|
|
|
if path is None or len(path) == 0 or not os.path.exists(path):
|
2015-06-17 18:11:25 +03:00
|
|
|
return None
|
|
|
|
|
2022-04-14 13:01:54 +03:00
|
|
|
if working_dir:
|
|
|
|
md5sum_file = os.path.join(working_dir, os.path.basename(path) + ".md5sum")
|
|
|
|
else:
|
|
|
|
md5sum_file = path + ".md5sum"
|
|
|
|
|
2015-06-17 18:11:25 +03:00
|
|
|
try:
|
2022-04-14 13:01:54 +03:00
|
|
|
with open(md5sum_file) as f:
|
2018-06-04 20:01:20 +03:00
|
|
|
md5 = f.read().strip()
|
2016-10-20 18:24:05 +03:00
|
|
|
if len(md5) == 32:
|
|
|
|
return md5
|
2016-07-28 13:34:41 +03:00
|
|
|
# Unicode error is when user rename an image to .md5sum ....
|
|
|
|
except (OSError, UnicodeDecodeError):
|
2015-06-17 18:11:25 +03:00
|
|
|
pass
|
|
|
|
|
2015-06-19 17:36:25 +03:00
|
|
|
try:
|
|
|
|
m = hashlib.md5()
|
2021-04-13 12:16:50 +03:00
|
|
|
with open(path, "rb") as f:
|
2015-06-19 17:36:25 +03:00
|
|
|
while True:
|
2018-01-29 11:18:13 +02:00
|
|
|
if stopped_event is not None and stopped_event.is_set():
|
2021-04-13 12:07:58 +03:00
|
|
|
log.error(f"MD5 sum calculation of `{path}` has stopped due to cancellation")
|
2018-01-29 11:18:13 +02:00
|
|
|
return
|
2022-04-17 12:58:20 +03:00
|
|
|
buf = f.read(1024)
|
2015-06-19 17:36:25 +03:00
|
|
|
if not buf:
|
|
|
|
break
|
|
|
|
m.update(buf)
|
|
|
|
digest = m.hexdigest()
|
|
|
|
except OSError as e:
|
|
|
|
log.error("Can't create digest of %s: %s", path, str(e))
|
|
|
|
return None
|
2015-06-17 18:11:25 +03:00
|
|
|
|
2022-04-18 13:13:52 +03:00
|
|
|
if cache_to_md5file:
|
|
|
|
try:
|
|
|
|
with open(md5sum_file, "w+") as f:
|
|
|
|
f.write(digest)
|
|
|
|
except OSError as e:
|
|
|
|
log.error("Can't write digest of %s: %s", path, str(e))
|
2015-06-17 18:11:25 +03:00
|
|
|
|
|
|
|
return digest
|
|
|
|
|
|
|
|
|
|
|
|
def remove_checksum(path):
|
|
|
|
"""
|
|
|
|
Remove the checksum of an image from cache if exists
|
|
|
|
"""
|
|
|
|
|
2021-04-13 12:16:50 +03:00
|
|
|
path = f"{path}.md5sum"
|
2015-06-17 18:11:25 +03:00
|
|
|
if os.path.exists(path):
|
|
|
|
os.remove(path)
|
2021-06-06 10:22:47 +03:00
|
|
|
|
|
|
|
|
|
|
|
class InvalidImageError(Exception):
|
|
|
|
|
|
|
|
def __init__(self, message: str):
|
|
|
|
super().__init__()
|
|
|
|
self._message = message
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return self._message
|
|
|
|
|
|
|
|
|
2022-07-22 13:39:52 +03:00
|
|
|
def check_valid_image_header(data: bytes, allow_raw_image: bool = False) -> str:
|
2022-03-20 08:20:17 +02:00
|
|
|
|
|
|
|
if data[:7] == b'\x7fELF\x01\x02\x01':
|
|
|
|
# for IOS images: file must start with the ELF magic number, be 32-bit, big endian and have an ELF version of 1
|
|
|
|
return "ios"
|
|
|
|
elif data[:7] == b'\x7fELF\x01\x01\x01' or data[:7] == b'\x7fELF\x02\x01\x01':
|
2022-04-17 12:58:20 +03:00
|
|
|
# for IOU images: file must start with the ELF magic number, be 32-bit or 64-bit, little endian and
|
2022-03-20 08:20:17 +02:00
|
|
|
# have an ELF version of 1 (normal IOS images are big endian!)
|
|
|
|
return "iou"
|
2022-04-17 12:58:20 +03:00
|
|
|
elif data[:4] == b'QFI\xfb' or data[:4] == b'KDMV':
|
|
|
|
# for Qemy images: file must be QCOW2 or VMDK
|
2022-03-20 08:20:17 +02:00
|
|
|
return "qemu"
|
|
|
|
else:
|
2022-07-22 13:39:52 +03:00
|
|
|
if allow_raw_image is True:
|
|
|
|
return "qemu"
|
2022-03-20 08:20:17 +02:00
|
|
|
raise InvalidImageError("Could not detect image type, please make sure it is a valid image")
|
2021-06-06 10:22:47 +03:00
|
|
|
|
|
|
|
|
|
|
|
async def write_image(
|
2022-03-20 08:20:17 +02:00
|
|
|
image_filename: str,
|
|
|
|
image_path: str,
|
2021-06-06 10:22:47 +03:00
|
|
|
stream: AsyncGenerator[bytes, None],
|
|
|
|
images_repo: ImagesRepository,
|
2022-07-22 13:39:52 +03:00
|
|
|
check_image_header=True,
|
|
|
|
allow_raw_image=False
|
2021-06-06 10:22:47 +03:00
|
|
|
) -> models.Image:
|
|
|
|
|
2022-03-20 08:20:17 +02:00
|
|
|
image_dir, image_name = os.path.split(image_filename)
|
|
|
|
log.info(f"Writing image file to '{image_path}'")
|
2021-06-06 10:22:47 +03:00
|
|
|
# Store the file under its final name only when the upload is completed
|
2022-03-20 08:20:17 +02:00
|
|
|
tmp_path = image_path + ".tmp"
|
|
|
|
os.makedirs(os.path.dirname(image_path), exist_ok=True)
|
2021-06-06 10:22:47 +03:00
|
|
|
checksum = hashlib.md5()
|
|
|
|
header_magic_len = 7
|
2022-03-20 08:20:17 +02:00
|
|
|
image_type = None
|
2021-06-06 10:22:47 +03:00
|
|
|
try:
|
|
|
|
async with aiofiles.open(tmp_path, "wb") as f:
|
|
|
|
async for chunk in stream:
|
|
|
|
if check_image_header and len(chunk) >= header_magic_len:
|
|
|
|
check_image_header = False
|
2022-07-22 13:39:52 +03:00
|
|
|
image_type = check_valid_image_header(chunk, allow_raw_image)
|
2021-06-06 10:22:47 +03:00
|
|
|
await f.write(chunk)
|
|
|
|
checksum.update(chunk)
|
|
|
|
|
2021-08-11 10:28:23 +03:00
|
|
|
image_size = os.path.getsize(tmp_path)
|
|
|
|
if not image_size or image_size < header_magic_len:
|
2021-06-06 10:22:47 +03:00
|
|
|
raise InvalidImageError("The image content is empty or too small to be valid")
|
|
|
|
|
|
|
|
checksum = checksum.hexdigest()
|
|
|
|
duplicate_image = await images_repo.get_image_by_checksum(checksum)
|
2022-03-20 08:20:17 +02:00
|
|
|
if duplicate_image and os.path.dirname(duplicate_image.path) == os.path.dirname(image_path):
|
2021-08-30 10:23:41 +03:00
|
|
|
raise InvalidImageError(f"Image {duplicate_image.filename} with "
|
|
|
|
f"same checksum already exists in the same directory")
|
2022-04-27 12:00:02 +03:00
|
|
|
if not image_dir:
|
|
|
|
directory = default_images_directory(image_type)
|
|
|
|
os.makedirs(directory, exist_ok=True)
|
|
|
|
image_path = os.path.abspath(os.path.join(directory, image_filename))
|
|
|
|
shutil.move(tmp_path, image_path)
|
|
|
|
os.chmod(image_path, stat.S_IWRITE | stat.S_IREAD | stat.S_IEXEC)
|
|
|
|
finally:
|
|
|
|
try:
|
2022-06-03 11:35:33 +03:00
|
|
|
if os.path.exists(tmp_path):
|
|
|
|
os.remove(tmp_path)
|
2022-04-27 12:00:02 +03:00
|
|
|
except OSError:
|
|
|
|
log.warning(f"Could not remove '{tmp_path}'")
|
|
|
|
|
|
|
|
return await images_repo.add_image(
|
|
|
|
image_name,
|
|
|
|
image_type,
|
|
|
|
image_size,
|
|
|
|
image_path,
|
|
|
|
checksum,
|
|
|
|
checksum_algorithm="md5"
|
|
|
|
)
|