diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index a175ba1..8c98849 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -45,6 +45,7 @@ jobs: # Deployment job deploy: + if: github.ref == "refs/heads/main" permissions: contents: read pages: write diff --git a/codegenerator/openapi/cinder.py b/codegenerator/openapi/cinder.py index 2087520..db3e78f 100644 --- a/codegenerator/openapi/cinder.py +++ b/codegenerator/openapi/cinder.py @@ -10,20 +10,15 @@ # License for the specific language governing permissions and limitations # under the License. # - +from multiprocessing import Process from pathlib import Path -from cinder import objects, rpc -from cinder.api.openstack import api_version_request -from cinder.common import config -from cinder.tests.unit.test import Database as db_fixture +from ruamel.yaml.scalarstring import LiteralScalarString + from codegenerator.common.schema import SpecSchema from codegenerator.common.schema import TypeSchema from codegenerator.openapi.base import OpenStackServerSourceBase from codegenerator.openapi.utils import merge_api_ref_doc -from ruamel.yaml.scalarstring import LiteralScalarString - -CONF = config.CONF class CinderV3Generator(OpenStackServerSourceBase): @@ -31,10 +26,34 @@ class CinderV3Generator(OpenStackServerSourceBase): "/versions": "version", } - def __init__(self): + def _api_ver_major(self, ver): + return ver._ver_major + + def _api_ver_minor(self, ver): + return ver._ver_minor + + def _api_ver(self, ver): + return (ver._ver_major, ver._ver_minor) + + def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Cinder OpenAPI schma") + return Path(target_dir, "openapi_specs", "block-storage", "v3.yaml") + + def _generate(self, target_dir, args, *pargs, **kwargs): + from cinder import objects, rpc + from cinder.api.openstack import api_version_request + from cinder.common import config + from cinder.tests.unit.test import Database as db_fixture + # Register all Cinder objects objects.register_all() + CONF = config.CONF + self.api_version = api_version_request._MAX_API_VERSION self.min_api_version = api_version_request._MIN_API_VERSION @@ -49,16 +68,6 @@ def __init__(self): self.router = router.APIRouter() - def _api_ver_major(self, ver): - return ver._ver_major - - def _api_ver_minor(self, ver): - return ver._ver_minor - - def _api_ver(self, ver): - return (ver._ver_major, ver._ver_minor) - - def generate(self, target_dir, args, *pargs, **kwargs): work_dir = Path(target_dir) work_dir.mkdir(parents=True, exist_ok=True) diff --git a/codegenerator/openapi/glance.py b/codegenerator/openapi/glance.py index 8c9dc9c..23d9e40 100644 --- a/codegenerator/openapi/glance.py +++ b/codegenerator/openapi/glance.py @@ -11,26 +11,16 @@ # under the License. # import copy +from multiprocessing import Process from pathlib import Path +from jsonref import replace_refs import routes +from ruamel.yaml.scalarstring import LiteralScalarString + from codegenerator.common.schema import SpecSchema, TypeSchema from codegenerator.openapi.base import OpenStackServerSourceBase from codegenerator.openapi.utils import merge_api_ref_doc -from glance.api.v2 import image_members -from glance.api.v2 import images -from glance.api.v2 import metadef_namespaces -from glance.api.v2 import metadef_objects -from glance.api.v2 import metadef_properties -from glance.api.v2 import metadef_resource_types -from glance.api.v2 import metadef_tags -from glance.api.v2 import router -from glance.api.v2 import tasks -from glance.common import config -from glance import schema as glance_schema -from jsonref import replace_refs -from oslo_config import fixture as cfg_fixture -from ruamel.yaml.scalarstring import LiteralScalarString class GlanceGenerator(OpenStackServerSourceBase): @@ -42,12 +32,6 @@ def __init__(self): self.api_version = "2.16" self.min_api_version = None - self._config_fixture = self.useFixture(cfg_fixture.Config()) - - config.parse_args(args=[]) - - self.router = router.API(routes.Mapper()) - def _api_ver_major(self, ver): return ver.ver_major @@ -58,6 +42,24 @@ def _api_ver(self, ver): return (ver.ver_major, ver.ver_minor) def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Glance OpenAPI schma") + return Path(target_dir, "openapi_specs", "image", "v2.yaml") + + def _generate(self, target_dir, args): + from glance.api.v2 import router + from glance.common import config + from oslo_config import fixture as cfg_fixture + + self._config_fixture = self.useFixture(cfg_fixture.Config()) + + config.parse_args(args=[]) + + self.router = router.API(routes.Mapper()) + work_dir = Path(target_dir) work_dir.mkdir(parents=True, exist_ok=True) @@ -109,6 +111,16 @@ def _get_schema_ref( schema_def=None, action_name=None, ): + from glance.api.v2 import image_members + from glance.api.v2 import images + from glance.api.v2 import metadef_namespaces + from glance.api.v2 import metadef_objects + from glance.api.v2 import metadef_properties + from glance.api.v2 import metadef_resource_types + from glance.api.v2 import metadef_tags + from glance.api.v2 import tasks + from glance import schema as glance_schema + if name == "TasksListResponse": openapi_spec.components.schemas.setdefault( name, diff --git a/codegenerator/openapi/keystone.py b/codegenerator/openapi/keystone.py index 9779cda..90b5ea7 100644 --- a/codegenerator/openapi/keystone.py +++ b/codegenerator/openapi/keystone.py @@ -11,21 +11,24 @@ # under the License. # import inspect +from multiprocessing import Process import logging from pathlib import Path +from ruamel.yaml.scalarstring import LiteralScalarString + +from keystone.assignment import schema as assignment_schema +from keystone.auth import schema as auth_schema +from keystone.identity import schema as identity_schema +from keystone.resource import schema as ks_schema + from codegenerator.common.schema import ParameterSchema from codegenerator.common.schema import PathSchema from codegenerator.common.schema import SpecSchema from codegenerator.common.schema import TypeSchema from codegenerator.openapi.base import OpenStackServerSourceBase from codegenerator.openapi.utils import merge_api_ref_doc -from keystone.assignment import schema as assignment_schema -from keystone.auth import schema as auth_schema -from keystone.identity import schema as identity_schema -from keystone.resource import schema as ks_schema -from keystone.server.flask import application -from ruamel.yaml.scalarstring import LiteralScalarString + PROJECT_SCHEMA = TypeSchema( type="object", @@ -221,9 +224,6 @@ def __init__(self): self.api_version = "3.0" self.min_api_version = "3.14" - self.app = application.application_factory() - self.router = self.app.url_map - def _api_ver_major(self, ver): return ver._ver_major @@ -233,7 +233,20 @@ def _api_ver_minor(self, ver): def _api_ver(self, ver): return (ver._ver_major, ver._ver_minor) - def generate(self, target_dir, args, *pargs, **kwargs): + def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Keystone OpenAPI schma") + return Path(target_dir, "openapi_specs", "identity", "v3.yaml") + + def _generate(self, target_dir, args, *pargs, **kwargs): + from keystone.server.flask import application + + self.app = application.application_factory() + self.router = self.app.url_map + work_dir = Path(target_dir) work_dir.mkdir(parents=True, exist_ok=True) diff --git a/codegenerator/openapi/neutron.py b/codegenerator/openapi/neutron.py index d4d0caf..e5f1324 100644 --- a/codegenerator/openapi/neutron.py +++ b/codegenerator/openapi/neutron.py @@ -11,11 +11,17 @@ # under the License. # import logging +from multiprocessing import Process, Manager from pathlib import Path import re import tempfile from typing import Any +from routes.base import Route +from ruamel.yaml.scalarstring import LiteralScalarString + +import sqlalchemy + from codegenerator.common.schema import ParameterSchema from codegenerator.common.schema import PathSchema from codegenerator.common.schema import SpecSchema @@ -23,19 +29,9 @@ from codegenerator.openapi.base import OpenStackServerSourceBase from codegenerator.openapi.base import VERSION_RE from codegenerator.openapi.utils import merge_api_ref_doc -from neutron.common import config as neutron_config -from neutron.conf.plugins.ml2 import config as ml2_config -from neutron import manager -from neutron.db import models # noqa -from oslo_config import cfg -from oslo_db import options as db_options -from routes.base import Route -from ruamel.yaml.scalarstring import LiteralScalarString -import sqlalchemy -class NeutronGenerator(OpenStackServerSourceBase): - PASTE_CONFIG = """ +PASTE_CONFIG = """ [composite:neutron] use = egg:Paste#urlmap # /: neutronversions_composite @@ -59,6 +55,8 @@ class NeutronGenerator(OpenStackServerSourceBase): paste.app_factory = neutron.api.v2.router:APIRouter.factory """ + +class NeutronGenerator(OpenStackServerSourceBase): URL_TAG_MAP = { "/agents/{agent_id}/dhcp-networks": "dhcp-agent-scheduler", "/agents": "networking-agents", @@ -71,21 +69,33 @@ def __init__(self): self.api_version = "2.0" self.min_api_version = "2.0" - self.PATH_MAP = {} - self.router = None - self.tempdir = tempfile.gettempdir() + # self.tempdir = tempfile.gettempdir() - self.setup_neutron() + def _build_neutron_db(self, tempdir): + db_path: str = f"sqlite:///{tempdir}/neutron.db" # noqa + engine = sqlalchemy.create_engine(db_path) + from neutron.db.migration.models import head - return + db_meta = head.get_metadata() + db_meta.create_all(engine) + return (db_path, engine) - def setup_neutron(self): - # Please somebody from Neutron: is there a way to have API app - # initialized without having DB and all the plugins enabled? + def process_base_neutron_routes(self, work_dir, processed_routes, args): + """Setup base Neutron with whatever is in the core""" + logging.info("Processing base Neutron") # Create the default configurations - db_options.set_defaults( - cfg.CONF, connection=f"sqlite:///{self.tempdir}/neutron.db" # noqa - ) + from neutron.common import config as neutron_config + from neutron.conf.plugins.ml2 import config as ml2_config + + from neutron.db import models # noqa + from neutron_lib import fixture + from oslo_config import cfg + from oslo_db import options as db_options + + tempdir = tempfile.gettempdir() + + fixture.RPCFixture().setUp() + neutron_config.register_common_config_options() ml2_config.register_ml2_plugin_opts() @@ -93,10 +103,10 @@ def setup_neutron(self): cfg.CONF.set_override("core_plugin", plugin) cfg.CONF.set_override( - "api_paste_config", Path(self.tempdir, "api-paste.ini.generator") + "api_paste_config", Path(tempdir, "api-paste.ini.generator") ) - with open(Path(self.tempdir, "api-paste.ini.generator"), "w") as fp: - fp.write(self.PASTE_CONFIG) + with open(Path(tempdir, "api-paste.ini.generator"), "w") as fp: + fp.write(PASTE_CONFIG) neutron_config.init([]) cfg.CONF.set_override( @@ -123,7 +133,6 @@ def setup_neutron(self): "local_ip", "ndp_proxy", ], - # group="ml2", ) cfg.CONF.set_override( "extension_drivers", @@ -146,29 +155,98 @@ def setup_neutron(self): ], group="ml2", ) + # Create the DB - self.engine = sqlalchemy.create_engine( - f"sqlite:///{self.tempdir}/neutron.db" # noqa + db_path, engine = self._build_neutron_db(tempdir) + db_options.set_defaults(cfg.CONF, connection=db_path) + + app_ = neutron_config.load_paste_app("neutron") + router = None + for i, w in app_.applications: + if hasattr(w, "_router"): + # We are only interested in the extensions app with a router + router = w._router + + # Raise an error to signal that we have not found a router + if not router: + raise NotImplementedError + + (impl_path, openapi_spec) = self._read_spec(work_dir) + self._process_router(router, openapi_spec, processed_routes) + + # Add base resource routes exposed as a pecan app + self._process_base_resource_routes(openapi_spec, processed_routes) + + self.dump_openapi(openapi_spec, impl_path, args.validate) + + def process_neutron_with_vpnaas(self, work_dir, processed_routes, args): + """Setup base Neutron with enabled vpnaas""" + logging.info("Processing Neutron with VPNaaS") + from neutron.common import config as neutron_config + from neutron.conf.plugins.ml2 import config as ml2_config + + from neutron.db import models # noqa + from neutron_lib import fixture + from neutron import manager # noqa + from oslo_config import cfg + from oslo_db import options as db_options + + fixture.RPCFixture().setUp() + tempdir = tempfile.gettempdir() + + neutron_config.register_common_config_options() + ml2_config.register_ml2_plugin_opts() + + plugin = "neutron.plugins.ml2.plugin.Ml2Plugin" + cfg.CONF.set_override("core_plugin", plugin) + + cfg.CONF.set_override( + "api_paste_config", Path(tempdir, "api-paste.ini.generator") ) - from neutron.db.migration.models import head + with open(Path(tempdir, "api-paste.ini.generator"), "w") as fp: + fp.write(PASTE_CONFIG) - self.db_meta = head.get_metadata() - self.db_meta.create_all(self.engine) + neutron_config.init([]) + cfg.CONF.set_override( + "service_plugins", + [ + "router", + "vpnaas", + ], + ) + cfg.CONF.set_override( + "service_provider", + [ + "VPN:dummy:neutron_vpnaas.tests.unit.dummy_ipsec.DummyIPsecVPNDriver:default", + ], + group="service_providers", + ) + # Create the DB + db_path, engine = self._build_neutron_db(tempdir) + db_options.set_defaults(cfg.CONF, connection=db_path) + + # Create VPNaaS DB tables + from neutron_vpnaas.db.models import head + + db_meta = head.get_metadata() + db_meta.create_all(engine) - self.app = neutron_config.load_paste_app("neutron") - for i, w in self.app.applications: + app_ = neutron_config.load_paste_app("neutron") + for i, w in app_.applications: if hasattr(w, "_router"): # We are only interested in the extensions app with a router - self.router = w._router + router = w._router - return - - def generate(self, target_dir, args): - openapi_tags: dict[str, Any] = dict() + # Raise an error to signal that we have not found a router + if not router: + raise NotImplementedError - work_dir = Path(target_dir) - work_dir.mkdir(parents=True, exist_ok=True) + (impl_path, openapi_spec) = self._read_spec(work_dir) + self._process_router(router, openapi_spec, processed_routes) + self.dump_openapi(openapi_spec, impl_path, args.validate) + def _read_spec(self, work_dir): + """Read the spec from file or create an empty one""" impl_path = Path(work_dir, "openapi_specs", "network", "v2.yaml") impl_path.parent.mkdir(parents=True, exist_ok=True) openapi_spec = self.load_openapi(Path(impl_path)) @@ -198,14 +276,60 @@ def generate(self, target_dir, args): schemas={}, ), ) - # Neutron router duplicates certain routes. We need to skip such entries + return (impl_path, openapi_spec) - self._processed_routes = set() + def generate(self, target_dir, args): + work_dir = Path(target_dir) + work_dir.mkdir(parents=True, exist_ok=True) - if not self.router: - raise NotImplementedError + # NOTE(gtema): call me paranoic or stupid, but I just gave up fighting + # agains oslo_config and oslo_policy with their global state. It is + # just too painful and takes too much precious time. On multiple + # invocation with different config there are plenty of things remaining + # in the old state. In order to workaroung this just process in + # different processes. + with Manager() as manager: + # Since we may process same route multiple times we need to have a + # shared state + processed_routes = manager.dict() + # Base Neutron + p = Process( + target=self.process_base_neutron_routes, + args=[work_dir, processed_routes, args], + ) + p.start() + p.join() + if p.exitcode != 0: + raise RuntimeError("Error generating Neutron OpenAPI schma") + + # VPNaaS + p = Process( + target=self.process_neutron_with_vpnaas, + args=[work_dir, processed_routes, args], + ) + p.start() + p.join() + if p.exitcode != 0: + raise RuntimeError("Error generating Neutron OpenAPI schma") + + (impl_path, openapi_spec) = self._read_spec(work_dir) - for route in self.router.mapper.matchlist: + # post processing cleanup of the spec + self._sanitize_param_ver_info(openapi_spec, self.min_api_version) + + # merge descriptions from api-ref doc + if args.api_ref_src: + merge_api_ref_doc( + openapi_spec, args.api_ref_src, allow_strip_version=False + ) + + self.dump_openapi(openapi_spec, Path(impl_path), args.validate) + + return impl_path + + def _process_router(self, router, openapi_spec, processed_routes): + """Scan through the routes exposed on a router""" + for route in router.mapper.matchlist: if route.routepath.endswith(".:(format)"): continue # if route.routepath != "/networks": @@ -233,12 +357,16 @@ def generate(self, target_dir, args): route.routepath.endswith("/tags") and route.conditions["method"][0] == "POST" ): - logging.warn( + logging.warning( "Skipping processing POST %s route", route.routepath ) continue - self._process_route(route, openapi_spec, openapi_tags) + self._process_route(route, openapi_spec, processed_routes) + + def _process_base_resource_routes(self, openapi_spec, processed_routes): + """Process base resources exposed through Pecan""" + from neutron import manager mgr = manager.NeutronManager.get_instance() # Nets/subnets/ports are base resources (non extension). They are thus @@ -259,7 +387,7 @@ def generate(self, target_dir, args): _member_name=res, ), openapi_spec, - openapi_tags, + processed_routes, controller=mgr.get_controller_for_resource(coll), ) for coll, res in [ @@ -282,7 +410,7 @@ def generate(self, target_dir, args): _member_name=res, ), openapi_spec, - openapi_tags, + processed_routes, controller=mgr.get_controller_for_resource(coll), ) self._process_route( @@ -295,26 +423,15 @@ def generate(self, target_dir, args): _member_name=res, ), openapi_spec, - openapi_tags, + processed_routes, controller=mgr.get_controller_for_resource("ports"), ) - self._sanitize_param_ver_info(openapi_spec, self.min_api_version) - - if args.api_ref_src: - merge_api_ref_doc( - openapi_spec, args.api_ref_src, allow_strip_version=False - ) - - self.dump_openapi(openapi_spec, Path(impl_path), args.validate) - - return impl_path - def _process_route( self, route, openapi_spec, - openapi_tags, + processed_routes, controller=None, ver_prefix="/v2.0", ): @@ -352,10 +469,10 @@ def _process_route( processed_key = f"{path}:{method}:{action}" # noqa # Some routes in Neutron are duplicated. We need to skip them since # otherwise we may duplicate query parameters which are just a list - if processed_key not in self._processed_routes: - self._processed_routes.add(processed_key) + if processed_key not in processed_routes: + processed_routes[processed_key] = 1 else: - logging.warn("Skipping duplicated route %s", processed_key) + logging.warning("Skipping duplicated route %s", processed_key) return logging.info( @@ -656,7 +773,7 @@ def _get_schema_ref( "SubnetpoolsOnboard_Network_SubnetsOnboard_Network_SubnetsRequest", "SubnetpoolsOnboard_Network_SubnetsOnboard_Network_SubnetsResponse", ]: - logging.warn("TODO: provide schema description for %s", name) + logging.warning("TODO: provide schema description for %s", name) # And now basic CRUD operations, those take whichever info is available in Controller._attr_info @@ -692,7 +809,7 @@ def _get_schema_ref( } } else: - logging.warn("No Schema information for %s" % name) + logging.warning("No Schema information for %s" % name) return f"#/components/schemas/{name}" @@ -823,7 +940,9 @@ def get_schema(param_data): }, } elif "type:list_of_any_key_specs_or_none" in validate: - logging.warn("TODO: Implement type:list_of_any_key_specs_or_none") + logging.warning( + "TODO: Implement type:list_of_any_key_specs_or_none" + ) schema = { "type": "array", "items": { @@ -897,7 +1016,12 @@ def get_schema(param_data): "type": "string", }, } - + elif "type:dict_or_nodata" in validate: + schema = get_schema(validate["type:dict_or_nodata"]) + elif "type:dict_or_empty" in validate: + schema = get_schema(validate["type:dict_or_empty"]) + elif "type:list_of_subnets_or_none" in validate: + schema = {"type": "array", "items": {"type": "string"}} else: raise RuntimeError( "Unsupported type %s in %s" % (validate, param_data) @@ -914,7 +1038,7 @@ def get_schema(param_data): elif convert_to.__name__ == "convert_to_int_if_not_none": schema = {"type": ["string", "integer", "null"]} else: - logging.warn( + logging.warning( "Unsupported conversion function %s used", convert_to.__name__ ) diff --git a/codegenerator/openapi/nova.py b/codegenerator/openapi/nova.py index da8ce88..97adb0c 100644 --- a/codegenerator/openapi/nova.py +++ b/codegenerator/openapi/nova.py @@ -10,8 +10,11 @@ # License for the specific language governing permissions and limitations # under the License. # +from multiprocessing import Process from pathlib import Path +from ruamel.yaml.scalarstring import LiteralScalarString + from codegenerator.common.schema import ( SpecSchema, TypeSchema, @@ -21,11 +24,6 @@ from codegenerator.openapi.base import OpenStackServerSourceBase from codegenerator.openapi import nova_schemas from codegenerator.openapi.utils import merge_api_ref_doc -from nova.api.openstack import api_version_request -from nova.api.openstack.compute import routes -from nova.api.openstack.compute.schemas import flavor_manage -from nova.tests import fixtures as nova_fixtures -from ruamel.yaml.scalarstring import LiteralScalarString class NovaGenerator(OpenStackServerSourceBase): @@ -39,14 +37,6 @@ class NovaGenerator(OpenStackServerSourceBase): "/servers/{server_id}/tags": "server-tags", } - def __init__(self): - pass - self.useFixture(nova_fixtures.RPCFixture("nova.test")) - self.api_version = api_version_request._MAX_API_VERSION - self.min_api_version = api_version_request._MIN_API_VERSION - - self.router = routes.APIRouterV21() - def _api_ver_major(self, ver): return ver.ver_major @@ -56,7 +46,17 @@ def _api_ver_minor(self, ver): def _api_ver(self, ver): return (ver.ver_major, ver.ver_minor) - def generate(self, target_dir, args): + def _generate(self, target_dir, args): + from nova.api.openstack import api_version_request + from nova.api.openstack.compute import routes + from nova.tests import fixtures as nova_fixtures + + self.api_version = api_version_request._MAX_API_VERSION + self.min_api_version = api_version_request._MIN_API_VERSION + + self.useFixture(nova_fixtures.RPCFixture("nova.test")) + self.router = routes.APIRouterV21() + work_dir = Path(target_dir) work_dir.mkdir(parents=True, exist_ok=True) @@ -107,6 +107,14 @@ def generate(self, target_dir, args): return impl_path + def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Compute OpenAPI schma") + return Path(target_dir, "openapi_specs", "compute", "v2.yaml") + def _get_param_ref( self, openapi_spec, @@ -145,6 +153,8 @@ def _get_schema_ref( schema_def=None, action_name=None, ): + from nova.api.openstack.compute.schemas import flavor_manage + schema = None # NOTE(gtema): This must go away once scemas are merged directly to # Nova diff --git a/codegenerator/openapi/octavia.py b/codegenerator/openapi/octavia.py index dc49bfc..596fda6 100644 --- a/codegenerator/openapi/octavia.py +++ b/codegenerator/openapi/octavia.py @@ -11,26 +11,16 @@ # under the License. # import inspect +from multiprocessing import Process from pathlib import Path from unittest import mock import fixtures + from codegenerator.common.schema import SpecSchema from codegenerator.openapi.base import OpenStackServerSourceBase from codegenerator.openapi.utils import merge_api_ref_doc -from octavia.api import root_controller -from octavia.common import config, rpc -from octavia.api.v2.controllers import amphora -from octavia.api.v2.controllers import l7rule -from octavia.api.v2.controllers import listener -from octavia.api.v2.controllers import load_balancer -from octavia.api.v2.controllers import member -from octavia.api.v2.controllers import provider -from oslo_config import cfg -import oslo_messaging as messaging -from oslo_messaging import conffixture as messaging_conffixture -from pecan import make_app as pecan_make_app -from routes import Mapper + from ruamel.yaml.scalarstring import LiteralScalarString @@ -56,27 +46,11 @@ class OctaviaGenerator(OpenStackServerSourceBase): def __init__(self): self.api_version = "2.27" self.min_api_version = "2.0" - config.register_cli_opts() - - self._buses = {} - - self.messaging_conf = messaging_conffixture.ConfFixture(cfg.CONF) - self.messaging_conf.transport_url = "fake:/" - self.useFixture(self.messaging_conf) - self.useFixture( - fixtures.MonkeyPatch( - "octavia.common.rpc.create_transport", - self._fake_create_transport, - ) - ) - with mock.patch("octavia.common.rpc.get_transport_url") as mock_gtu: - mock_gtu.return_value = None - rpc.init() - - self.app = pecan_make_app(root_controller.RootController()) - self.root = self.app.application.root def _fake_create_transport(self, url): + import oslo_messaging as messaging + from oslo_config import cfg + if url not in self._buses: self._buses[url] = messaging.get_rpc_transport(cfg.CONF, url=url) return self._buses[url] @@ -170,6 +144,29 @@ def _build_routes(self, mapper, node, path=""): return def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Octavia OpenAPI schma") + return Path(target_dir, "openapi_specs", "load-balancing", "v2.yaml") + + def _generate(self, target_dir, args): + from octavia.api import root_controller + from octavia.common import config, rpc + from octavia.api.v2.controllers import amphora + from octavia.api.v2.controllers import l7rule + from octavia.api.v2.controllers import listener + from octavia.api.v2.controllers import load_balancer + from octavia.api.v2.controllers import member + from octavia.api.v2.controllers import provider + from oslo_config import cfg + + # import oslo_messaging as messaging + from oslo_messaging import conffixture as messaging_conffixture + from pecan import make_app as pecan_make_app + from routes import Mapper + work_dir = Path(target_dir) work_dir.mkdir(parents=True, exist_ok=True) @@ -199,6 +196,25 @@ def generate(self, target_dir, args): }, ), ) + config.register_cli_opts() + + self._buses = {} + + self.messaging_conf = messaging_conffixture.ConfFixture(cfg.CONF) + self.messaging_conf.transport_url = "fake:/" + self.useFixture(self.messaging_conf) + self.useFixture( + fixtures.MonkeyPatch( + "octavia.common.rpc.create_transport", + self._fake_create_transport, + ) + ) + with mock.patch("octavia.common.rpc.get_transport_url") as mock_gtu: + mock_gtu.return_value = None + rpc.init() + + self.app = pecan_make_app(root_controller.RootController()) + self.root = self.app.application.root mapper = Mapper() diff --git a/doc/source/openapi.rst b/doc/source/openapi.rst index ba364fa..46c5b77 100644 --- a/doc/source/openapi.rst +++ b/doc/source/openapi.rst @@ -79,6 +79,12 @@ After processing when api-ref html is available a dedicated method :class:`~codegenerator.openapi.utils.merge_api_ref_doc` can be called to add available descriptions (operation, parameters). +.. note:: + Since all services use `oslo_config` and `oslo_policy` libraries which rely + on global state they race with each other. In order to avoid this processing + rely on multiprocessing to isolate services. + + Nova ---- @@ -132,37 +138,36 @@ Neutron This is where things are getting more challenging. Neutron requires having DB provisioned and an in-memory DB seems not to be -possible due to technics for the DB communication. In addition to that -config file enabling desired extensions is expected. All this activities are -covered in -:class:`~codegenrator.openapi.neutron.NeutronGenerator:setup_neutron`. +possible due to technics for the DB communication. In addition to that config +file enabling desired extensions is expected. All this activities are covered +in :class:`~codegenrator.openapi.neutron.NeutronGenerator:setup_neutron`. According to the current information it is not possible to have all possible -Neutron extensions and plugins (and Staduim projects, whatever that is) -enabled at the same time. This can be only solved by splitting main -`generate` method to be splinning up Neutron few times with independent -configurations and merging resulting spec. +Neutron extensions and plugins enabled at the same time. This is solved by +generator spinning multiple subprocesses that bootstrap Neutron with different +configuration and then merge results. This is handled by spinning up Neutron +few times with independent configurations and merging resulting spec. Additional challenge in Neutron is that it does not use `routes` to expose operations directly, but is having a mix of `routes` based operations for extensions and `pecan` app for the base functionality. Since the `pecan` framework is based on a purely dynamic routing there is no possibility to -extract information about exposed routes by doing code inspection. Luckily -only base operations (router/net/subnet) are implemented this way. Therefore +extract information about exposed routes by doing code inspection. Luckily only +base operations (router/net/subnet) are implemented this way. Therefore generator registers known `pecan` operations into the extensions router and normal generator flow is being invoked. Next challenge is that for Neutron there is no description of bodies at all, -but certain controllers are having `API_DEFINITION` attached. While this is -not a jsonschema at all it can be used to create one where possible. Sadly -there is still sometime no possibility to properly estimate whether certain -operation is exposed and functioning or it is exposed but fails permanently -due to the fact, that `API_DEFINITION` extrapolation fails for this -operation. :class:`~codegenerator.openapi.neutron.get_schema` method is -responsible for conversion of the `API_DEFINITION` into the jsonschema, but -is not able to work perfectly until additional work is invested. - -Certain additional operations (addRouterInterface, addExtraRoute, ...) are -not having any information available and require to be also hardcodede in the +but certain controllers are having `API_DEFINITION` attached. While this is not +a jsonschema at all it can be used to create one where possible. Sadly there is +still sometime no possibility to properly estimate whether certain operation is +exposed and functioning or it is exposed but fails permanently due to the fact, +that `API_DEFINITION` extrapolation fails for this operation. +:class:`~codegenerator.openapi.neutron.get_schema` method is responsible for +conversion of the `API_DEFINITION` into the jsonschema, but is not able to work +perfectly until additional work is invested. + +Certain additional operations (addRouterInterface, addExtraRoute, ...) are not +having any information available and require to be also hardcodede in the generator. diff --git a/requirements.txt b/requirements.txt index acc4de6..7731ce5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,7 @@ oslotest[spec] nova[spec] glance[spec] neutron[spec] +neutron-vpnaas[spec] keystone[spec] cinder[spec] octavia[spec] diff --git a/tox.ini b/tox.ini index 7742524..2c0b917 100644 --- a/tox.ini +++ b/tox.ini @@ -6,17 +6,15 @@ ignore_basepython_conflict=True [testenv] description = Run unit tests. -usedevelop = True -# Fedora is having py312 default (not removable) but most openstack things dont support it yet -# basepython = {env:TOX_PYTHON:/usr/bin/python3.11} +package = editable passenv = setenv = LANG=en_US.UTF-8 LANGUAGE=en_US:en +install_command = python -I -m pip install -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} {opts} {packages} deps = - -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/test-requirements.txt - -r{toxinidir}/requirements.txt +# -r{toxinidir}/requirements.txt commands = stestr run {posargs} stestr slowest @@ -30,14 +28,15 @@ commands = # Generators conflict with each other since every service is screwing # OsloConfig to its own flavor and I have not found a working way to deal with # that except of physically isolating them - stestr --test-path ./codegenerator/tests/functional/ run --serial --isolated {posargs} + stestr --test-path ./codegenerator/tests/functional/ run {posargs} stestr slowest [testenv:docs{,-py311}] description = Build documentation in HTML format. +# We do not want the package (because of heavy dependencies for docs) +skip_install = True deps = - -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/doc/requirements.txt commands = sphinx-build -W --keep-going -b html -j auto doc/source/ doc/build/html