2015-01-14 02:05:26 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
|
|
|
# Copyright (C) 2015 GNS3 Technologies Inc.
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
import re
|
|
|
|
import os.path
|
2016-07-11 16:36:52 +03:00
|
|
|
import json
|
2015-02-17 11:37:09 +02:00
|
|
|
import os
|
2015-01-14 02:05:26 +02:00
|
|
|
|
2015-01-20 16:21:13 +02:00
|
|
|
from gns3server.handlers import *
|
2015-01-14 13:32:56 +02:00
|
|
|
from gns3server.web.route import Route
|
2015-01-14 02:05:26 +02:00
|
|
|
|
|
|
|
|
2016-07-11 16:36:52 +03:00
|
|
|
class Documentation:
|
2015-01-20 14:24:00 +02:00
|
|
|
|
2015-01-14 02:05:26 +02:00
|
|
|
"""Extract API documentation as Sphinx compatible files"""
|
2015-01-20 14:24:00 +02:00
|
|
|
|
2015-02-24 12:38:57 +02:00
|
|
|
def __init__(self, route, directory):
|
|
|
|
"""
|
|
|
|
:param route: Route instance
|
|
|
|
:param directory: Output directory
|
|
|
|
"""
|
2015-01-14 02:05:26 +02:00
|
|
|
self._documentation = route.get_documentation()
|
2015-02-24 12:38:57 +02:00
|
|
|
self._directory = directory
|
2015-01-14 02:05:26 +02:00
|
|
|
|
|
|
|
def write(self):
|
2016-07-11 16:36:52 +03:00
|
|
|
with open(os.path.join(self._directory, "gns3_file.json"), "w+") as f:
|
|
|
|
from gns3server.schemas.topology import TOPOLOGY_SCHEMA
|
|
|
|
print("Dump .gns3 schema")
|
|
|
|
json.dump(TOPOLOGY_SCHEMA, f, indent=4)
|
2016-04-15 18:57:06 +03:00
|
|
|
self.write_documentation("compute")
|
2016-03-04 18:50:17 +02:00
|
|
|
# Controller documentation
|
2016-03-07 16:01:35 +02:00
|
|
|
self.write_documentation("controller")
|
2016-03-04 18:50:17 +02:00
|
|
|
|
2016-03-07 16:01:35 +02:00
|
|
|
def write_documentation(self, doc_type):
|
|
|
|
"""
|
|
|
|
Build all the doc page for handlers
|
|
|
|
|
2016-04-18 21:55:22 +03:00
|
|
|
:param doc_type: Type of doc to generate (controller, compute)
|
2016-03-07 16:01:35 +02:00
|
|
|
"""
|
2015-02-17 11:37:09 +02:00
|
|
|
for handler_name in sorted(self._documentation):
|
2016-03-07 18:57:12 +02:00
|
|
|
if "controller." in handler_name:
|
|
|
|
server_type = "controller"
|
2016-04-15 18:57:06 +03:00
|
|
|
elif "compute" in handler_name:
|
|
|
|
server_type = "compute"
|
2016-03-11 16:28:45 +02:00
|
|
|
else:
|
|
|
|
server_type = "root"
|
2016-03-07 18:57:12 +02:00
|
|
|
|
|
|
|
if doc_type != server_type:
|
|
|
|
continue
|
|
|
|
|
2015-04-07 17:09:27 +03:00
|
|
|
print("Build {}".format(handler_name))
|
2015-02-17 11:37:09 +02:00
|
|
|
|
|
|
|
for path in sorted(self._documentation[handler_name]):
|
2015-02-24 12:38:57 +02:00
|
|
|
|
|
|
|
api_version = self._documentation[handler_name][path]["api_version"]
|
|
|
|
if api_version is None:
|
|
|
|
continue
|
|
|
|
|
2015-02-17 11:37:09 +02:00
|
|
|
filename = self._file_path(path)
|
|
|
|
handler_doc = self._documentation[handler_name][path]
|
2016-03-07 16:01:35 +02:00
|
|
|
handler = handler_name.replace(server_type + ".", "")
|
|
|
|
|
|
|
|
self._create_handler_directory(handler, api_version, server_type)
|
|
|
|
with open("{}/api/v{}/{}/{}/{}.rst".format(self._directory, api_version, server_type, handler, filename), 'w+') as f:
|
|
|
|
f.write('{}\n------------------------------------------------------------------------------------------------------------------------------------------\n\n'.format(path))
|
2015-02-17 11:37:09 +02:00
|
|
|
f.write('.. contents::\n')
|
|
|
|
for method in handler_doc["methods"]:
|
|
|
|
f.write('\n{} {}\n'.format(method["method"], path.replace("{", '**{').replace("}", "}**")))
|
2016-03-07 16:01:35 +02:00
|
|
|
f.write('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n')
|
2015-02-17 11:37:09 +02:00
|
|
|
f.write('{}\n\n'.format(method["description"]))
|
|
|
|
|
|
|
|
if len(method["parameters"]) > 0:
|
|
|
|
f.write("Parameters\n**********\n")
|
|
|
|
for parameter in method["parameters"]:
|
|
|
|
desc = method["parameters"][parameter]
|
|
|
|
f.write("- **{}**: {}\n".format(parameter, desc))
|
|
|
|
f.write("\n")
|
|
|
|
|
|
|
|
f.write("Response status codes\n**********************\n")
|
|
|
|
for code in method["status_codes"]:
|
|
|
|
desc = method["status_codes"][code]
|
|
|
|
f.write("- **{}**: {}\n".format(code, desc))
|
2015-01-14 02:05:26 +02:00
|
|
|
f.write("\n")
|
|
|
|
|
2015-02-17 11:37:09 +02:00
|
|
|
if "properties" in method["input_schema"]:
|
|
|
|
f.write("Input\n*******\n")
|
|
|
|
self._write_definitions(f, method["input_schema"])
|
|
|
|
self._write_json_schema(f, method["input_schema"])
|
2015-01-14 02:05:26 +02:00
|
|
|
|
2015-02-17 11:37:09 +02:00
|
|
|
if "properties" in method["output_schema"]:
|
|
|
|
f.write("Output\n*******\n")
|
|
|
|
self._write_json_schema(f, method["output_schema"])
|
2015-01-14 02:05:26 +02:00
|
|
|
|
2016-03-07 16:01:35 +02:00
|
|
|
self._include_query_example(f, method, path, api_version, server_type)
|
2015-01-14 02:05:26 +02:00
|
|
|
|
2016-03-04 18:50:17 +02:00
|
|
|
def _create_handler_directory(self, handler_name, api_version, server_type):
|
2015-02-17 11:37:09 +02:00
|
|
|
"""Create a directory for the handler and add an index inside"""
|
|
|
|
|
2016-03-04 18:50:17 +02:00
|
|
|
directory = "{}/api/v{}/{}/{}".format(self._directory, api_version, server_type, handler_name)
|
2015-02-17 11:37:09 +02:00
|
|
|
os.makedirs(directory, exist_ok=True)
|
|
|
|
|
2016-03-04 18:50:17 +02:00
|
|
|
with open("{}/api/v{}/{}/{}.rst".format(self._directory, api_version, server_type, handler_name), "w+") as f:
|
2015-02-23 19:00:59 +02:00
|
|
|
f.write(handler_name.replace("api.", "").replace("_", " ", ).capitalize())
|
2016-03-07 16:01:35 +02:00
|
|
|
f.write("\n-----------------------------\n\n")
|
2015-02-17 11:37:09 +02:00
|
|
|
f.write(".. toctree::\n :glob:\n :maxdepth: 2\n\n {}/*\n".format(handler_name))
|
2015-01-14 02:05:26 +02:00
|
|
|
|
2016-03-07 16:01:35 +02:00
|
|
|
def _include_query_example(self, f, method, path, api_version, server_type):
|
2015-01-14 02:05:26 +02:00
|
|
|
"""If a sample session is available we include it in documentation"""
|
|
|
|
m = method["method"].lower()
|
2016-03-07 16:01:35 +02:00
|
|
|
query_path = "{}_{}_{}.txt".format(server_type, m, self._file_path(path))
|
2015-02-24 12:38:57 +02:00
|
|
|
if os.path.isfile(os.path.join(self._directory, "api", "examples", query_path)):
|
2015-01-14 02:05:26 +02:00
|
|
|
f.write("Sample session\n***************\n")
|
2016-03-04 18:50:17 +02:00
|
|
|
f.write("\n\n.. literalinclude:: ../../../examples/{}\n\n".format(query_path))
|
2015-01-14 02:05:26 +02:00
|
|
|
|
|
|
|
def _file_path(self, path):
|
2016-04-15 18:57:06 +03:00
|
|
|
path = path.replace("compute", "")
|
2016-03-07 16:01:35 +02:00
|
|
|
path = path.replace("controller", "")
|
2019-01-17 13:01:58 +02:00
|
|
|
return re.sub("^v2", "", re.sub(r"[^a-z0-9]", "", path))
|
2015-01-14 02:05:26 +02:00
|
|
|
|
|
|
|
def _write_definitions(self, f, schema):
|
|
|
|
if "definitions" in schema:
|
|
|
|
f.write("Types\n+++++++++\n")
|
|
|
|
for definition in sorted(schema['definitions']):
|
|
|
|
desc = schema['definitions'][definition].get("description")
|
2015-02-17 11:37:09 +02:00
|
|
|
f.write("{}\n^^^^^^^^^^^^^^^^^^^^^^\n{}\n\n".format(definition, desc))
|
2015-01-14 02:05:26 +02:00
|
|
|
self._write_json_schema(f, schema['definitions'][definition])
|
|
|
|
f.write("Body\n+++++++++\n")
|
|
|
|
|
2015-02-05 14:55:53 +02:00
|
|
|
def _write_json_schema_object(self, f, obj):
|
2015-01-14 02:05:26 +02:00
|
|
|
"""
|
|
|
|
obj is current object in JSON schema
|
|
|
|
schema is the whole schema including definitions
|
|
|
|
"""
|
|
|
|
for name in sorted(obj.get("properties", {})):
|
|
|
|
prop = obj["properties"][name]
|
|
|
|
mandatory = " "
|
|
|
|
if name in obj.get("required", []):
|
|
|
|
mandatory = "✔"
|
|
|
|
|
|
|
|
if "enum" in prop:
|
|
|
|
field_type = "enum"
|
2016-05-18 12:59:25 +03:00
|
|
|
prop['description'] = "Possible values: {}".format(', '.join(map(lambda a: a or "null", prop['enum'])))
|
2015-01-14 02:05:26 +02:00
|
|
|
else:
|
|
|
|
field_type = prop.get("type", "")
|
|
|
|
|
|
|
|
# Resolve oneOf relation to their human type.
|
|
|
|
if field_type == 'object' and 'oneOf' in prop:
|
|
|
|
field_type = ', '.join(map(lambda p: p['$ref'].split('/').pop(), prop['oneOf']))
|
|
|
|
|
|
|
|
f.write(" <tr><td>{}</td>\
|
|
|
|
<td>{}</td> \
|
|
|
|
<td>{}</td> \
|
|
|
|
<td>{}</td> \
|
|
|
|
</tr>\n".format(
|
|
|
|
name,
|
|
|
|
mandatory,
|
|
|
|
field_type,
|
|
|
|
prop.get("description", "")
|
|
|
|
))
|
|
|
|
|
|
|
|
def _write_json_schema(self, f, schema):
|
|
|
|
# TODO: rewrite this using RST for portability
|
|
|
|
f.write(".. raw:: html\n\n <table>\n")
|
|
|
|
f.write(" <tr> \
|
|
|
|
<th>Name</th> \
|
|
|
|
<th>Mandatory</th> \
|
|
|
|
<th>Type</th> \
|
|
|
|
<th>Description</th> \
|
|
|
|
</tr>\n")
|
2015-02-05 14:55:53 +02:00
|
|
|
self._write_json_schema_object(f, schema)
|
2015-01-14 02:05:26 +02:00
|
|
|
f.write(" </table>\n\n")
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2015-01-20 16:21:13 +02:00
|
|
|
print("Generate API documentation")
|
2015-02-24 12:38:57 +02:00
|
|
|
Documentation(Route, "docs").write()
|