diff --git a/.gitignore b/.gitignore index cfa1c1303..0d881acd0 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,5 @@ _trial_temp/ apidocs/ *.egg-info .eggs +__pycache__ +.hypothesis \ No newline at end of file diff --git a/docs/tests/test_twisted_docs.py b/docs/tests/test_twisted_docs.py index 339802219..2a5af1386 100644 --- a/docs/tests/test_twisted_docs.py +++ b/docs/tests/test_twisted_docs.py @@ -25,10 +25,10 @@ def test_IPAddress_implementations() -> None: assert all(impl in page for impl in show_up), page # Test for https://github.com/twisted/pydoctor/issues/505 -def test_web_template_api() -> None: +def test_some_apis() -> None: """ This test ensures all important members of the twisted.web.template - module are documented at the right place + module are documented at the right place, and other APIs exist as well. """ exists = ['twisted.web.template.Tag.html', @@ -39,7 +39,8 @@ def test_web_template_api() -> None: 'twisted.web.template.TagLoader.html', 'twisted.web.template.XMLString.html', 'twisted.web.template.XMLFile.html', - 'twisted.web.template.Element.html',] + 'twisted.web.template.Element.html', + 'twisted.internet.ssl.DistinguishedName.html'] for e in exists: assert (BASE_DIR / e).exists(), f"{e} not found" diff --git a/pydoctor/astbuilder.py b/pydoctor/astbuilder.py index 00fbe0e8f..e1b83ab5c 100644 --- a/pydoctor/astbuilder.py +++ b/pydoctor/astbuilder.py @@ -2,6 +2,7 @@ from __future__ import annotations import ast +from collections import defaultdict import sys from functools import partial @@ -13,13 +14,13 @@ Type, TypeVar, Union, cast ) +import attr from pydoctor import epydoc2stan, model, node2stan, extensions, linker from pydoctor.epydoc.markup._pyval_repr import colorize_inline_pyval from pydoctor.astutils import (is_none_literal, is_typing_annotation, is_using_annotations, is_using_typing_final, node2dottedname, node2fullname, is__name__equals__main__, unstring_annotation, iterassign, extract_docstring_linenum, infer_type, get_parents, get_docstring_node, unparse, NodeVisitor, Parentage, Str) - def parseFile(path: Path) -> ast.Module: """Parse the contents of a Python source file.""" with open(path, 'rb') as f: @@ -166,6 +167,218 @@ def extract_final_subscript(annotation: ast.Subscript) -> ast.expr: assert isinstance(ann_slice, ast.expr) return ann_slice +def _resolveReExportTarget(origin_module:model.Module, origin_name:str, + new_parent:model.Module, linenumber:int) -> Optional[model.Documentable]: + # In case of duplicates names, we can't rely on resolveName, + # So we use content.get first to resolve non-alias names. + ob = origin_module.contents.get(origin_name) or origin_module.resolveName(origin_name) + if ob is None: + new_parent.report("cannot resolve re-exported name: " + f'\'{origin_module.fullName()}.{origin_name}\'', lineno_offset=linenumber) + return ob + +def _handleReExport(info:'ReExport', elsewhere:Collection['ReExport']) -> None: + """ + Move re-exported objects into module C{new_parent}. + """ + new_parent = info.new_parent + target = info.target + as_name = info.as_name + target_parent = target.parent + + if target_parent is None: + # TODO: warn because we can't reparent a root module + return None + + # Remember that this name is re-exported + target_parent.exported[target.name] = target + + extra_msg = '' + + for e in elsewhere: + e.new_parent.exported[e.as_name] = target + + if not extra_msg: + extra_msg += ', also available at ' + extra_msg += f"'{e.new_parent.fullName()}.{e.as_name}'" + else: + extra_msg += f" and '{e.new_parent.fullName()}.{e.as_name}'" + + if as_name != target.name: + new_parent.system.msg( + "astbuilder", + f"moving {target.fullName()!r} into {new_parent.fullName()!r} as {as_name!r}{extra_msg}") + else: + new_parent.system.msg( + "astbuilder", + f"moving {target.fullName()!r} into {new_parent.fullName()!r}{extra_msg}") + + target.reparent(new_parent, as_name) + + +def getModuleExports(mod:'model.Module') -> Collection[str]: + # Fetch names to export. + exports = mod.all + if exports is None: + exports = [] + return exports + +def getPublicNames(mod: model.Module) -> Collection[str]: + """ + Get all names to import when wildcardm importing the given module: + use __all__ if available, otherwise take all names that are not private. + """ + names = mod.all + if names is None: + names = [ + name + for name in chain(mod.contents.keys(), + mod._localNameToFullName_map.keys()) + if not name.startswith('_') + ] + return names + +@attr.s(auto_attribs=True, slots=True) +class ReExport: + new_parent: model.Module + as_name: str + origin_module: model.Module + target: model.Documentable + +def _exports_order(r:ReExport) -> tuple[int, int, int]: + return (-r.new_parent.privacyClass.value, + r.new_parent.fullName().count('.'), + -len(r.as_name)) + + +def _maybeExistingNameOverridesImport(mod:model.Module, local_name:str, + imp:model.Import, target:model.Documentable) -> bool: + if local_name in mod.contents: + existing = mod.contents[local_name] + # The imported name already exists in the locals, we test the linenumbers to + # know whether the import should override the local name. We could do better if + # integrate with better static analysis like def-use chains. + if (not isinstance(existing, model.Module) and # modules are always shadowed by members + mod.contents[local_name].linenumber > imp.linenumber): + mod.report(f"not moving {target.fullName()} into {mod.fullName()}, " + f"because {local_name!r} is defined at line {existing.linenumber}", + lineno_offset=imp.linenumber, + thresh=-1) + return True + return False + +def processReExports(system:'model.System') -> None: + # first gather all export infos, clean them up + # and apply them at the end. + reexports: List[ReExport] = [] + + for mod in system.objectsOfType(model.Module): + exports = getModuleExports(mod) + for imported_name in mod.imports: + local_name = imported_name.name + orgname = imported_name.orgname + orgmodule = imported_name.orgmodule + if local_name != '*' and (not orgname or local_name not in exports): + continue + origin = system.modules.get(orgmodule) or system.allobjects.get(orgmodule) + if isinstance(origin, model.Module): + if local_name != '*': + # only 'import from' statements can be used in re-exporting currently. + if orgname: + target = _resolveReExportTarget(origin, orgname, + mod, imported_name.linenumber) + if target: + if _maybeExistingNameOverridesImport(mod, local_name, imported_name, target): + continue + reexports.append( + ReExport(mod, local_name, origin, target) + ) + else: + for n in getPublicNames(origin): + if n in exports: + target = _resolveReExportTarget(origin, n, mod, imported_name.linenumber) + if target: + if _maybeExistingNameOverridesImport(mod, n, imported_name, target): + continue + reexports.append( + ReExport(mod, n, origin, target) + ) + elif orgmodule.split('.', 1)[0] in system.root_names: + msg = f"cannot resolve origin module of re-exported name: {orgname or local_name!r}" + if orgname and local_name!=orgname: + msg += f" as {local_name!r}" + msg += f" from origin module {imported_name.orgmodule!r}" + mod.report(msg, lineno_offset=imported_name.linenumber) + + exports_per_target:Dict[model.Documentable, List[ReExport]] = defaultdict(list) + for r in reexports: + exports_per_target[r.target].append(r) + + for target, _exports in exports_per_target.items(): + elsewhere = [] + + if isinstance(target.parent, model.Module) and target.parent.all is not None \ + and target.name in target.parent.all \ + and target.parent.privacyClass is model.PrivacyClass.PUBLIC: + + target.system.msg( + "astbuilder", + f"not moving {target.fullName()} into {' or '.join(repr(e.new_parent.fullName()) for e in _exports)}, " + f"because {target.name!r} is already exported in public module {target.parent.fullName()!r}") + + for e in _exports: + e.new_parent.exported[e.as_name] = target + + continue + + assert len(_exports) > 0 + if len(_exports) > 1: + # when an object has several re-exports, the public module with the lowest number + # of dot in it's name is choosen, if there is an equality, the longer local name + # is choosen + + _exports.sort(key=_exports_order) + elsewhere.extend(_exports[1:]) + + reexport = _exports[0] + _handleReExport(reexport, elsewhere) + +def postProcessClasses(system: model.System) -> None: + for cls in system.objectsOfType(model.Class): + # Initiate the MROs + cls._init_mro() + # Lookup of constructors + cls._init_constructors() + + # Compute subclasses + for b in cls.baseobjects: + if b is not None: + b.subclasses.append(cls) + + # Checking whether the class is an exception + if model.is_exception(cls): + cls.kind = model.DocumentableKind.EXCEPTION + +def postProcessAttributes(system:model.System) -> None: + for attrib in system.objectsOfType(model.Attribute): + _inherits_instance_variable_kind(attrib) + +def _inherits_instance_variable_kind(attr: model.Attribute) -> None: + """ + If any of the inherited members of a class variable is an instance variable, + then the subclass' class variable become an instance variable as well. + """ + if attr.kind is not model.DocumentableKind.CLASS_VARIABLE: + return + docsources = attr.docsources() + next(docsources) + for inherited in docsources: + if inherited.kind is model.DocumentableKind.INSTANCE_VARIABLE: + attr.kind = model.DocumentableKind.INSTANCE_VARIABLE + break + +# main ast visitor + class ModuleVistor(NodeVisitor): def __init__(self, builder: 'ASTBuilder', module: model.Module): @@ -207,6 +420,7 @@ def visit_Module(self, node: ast.Module) -> None: def depart_Module(self, node: ast.Module) -> None: self._infer_attr_annotations(self.builder.current) self.builder.pop(self.module) + epydoc2stan.transform_parsed_names(self.module) def visit_ClassDef(self, node: ast.ClassDef) -> None: # Ignore classes within functions. @@ -327,101 +541,49 @@ def visit_ImportFrom(self, node: ast.ImportFrom) -> None: assert modname is not None if node.names[0].name == '*': - self._importAll(modname) + self._importAll(modname, linenumber=node.lineno) else: - self._importNames(modname, node.names) + self._importNames(modname, node.names, linenumber=node.lineno) - def _importAll(self, modname: str) -> None: + def _importAll(self, modname: str, linenumber:int) -> None: """Handle a C{from import *} statement.""" - + ctx = self.builder.current + if isinstance(ctx, model.Module): + ctx.imports.append(model.Import('*', modname, + linenumber=linenumber, orgname='*')) + mod = self.system.getProcessedModule(modname) if mod is None: # We don't have any information about the module, so we don't know # what names to import. - self.builder.current.report(f"import * from unknown {modname}", thresh=1) + ctx.report(f"import * from unknown module {modname!r}", thresh=1, lineno_offset=linenumber) return - - self.builder.current.report(f"import * from {modname}", thresh=1) + + if mod.state is model.ProcessingState.PROCESSING: + ctx.report(f"import * from partially processed module {modname!r}", + thresh=1, lineno_offset=linenumber) # Get names to import: use __all__ if available, otherwise take all # names that are not private. - names = mod.all - if names is None: - names = [ - name - for name in chain(mod.contents.keys(), - mod._localNameToFullName_map.keys()) - if not name.startswith('_') - ] - - # Fetch names to export. - exports = self._getCurrentModuleExports() + names = getPublicNames(mod) # Add imported names to our module namespace. - assert isinstance(self.builder.current, model.CanContainImportsDocumentable) - _localNameToFullName = self.builder.current._localNameToFullName_map + assert isinstance(ctx, model.CanContainImportsDocumentable) + _localNameToFullName = ctx._localNameToFullName_map expandName = mod.expandName for name in names: - - if self._handleReExport(exports, name, name, mod) is True: - continue - _localNameToFullName[name] = expandName(name) - def _getCurrentModuleExports(self) -> Collection[str]: - # Fetch names to export. - current = self.builder.current - if isinstance(current, model.Module): - exports = current.all - if exports is None: - exports = [] - else: - # Don't export names imported inside classes or functions. - exports = [] - return exports - - def _handleReExport(self, curr_mod_exports:Collection[str], - origin_name:str, as_name:str, - origin_module:model.Module) -> bool: - """ - Move re-exported objects into current module. - - @returns: True if the imported name has been sucessfully re-exported. - """ - # Move re-exported objects into current module. - current = self.builder.current - modname = origin_module.fullName() - if as_name in curr_mod_exports: - # In case of duplicates names, we can't rely on resolveName, - # So we use content.get first to resolve non-alias names. - ob = origin_module.contents.get(origin_name) or origin_module.resolveName(origin_name) - if ob is None: - current.report("cannot resolve re-exported name :" - f'{modname}.{origin_name}', thresh=1) - else: - if origin_module.all is None or origin_name not in origin_module.all: - self.system.msg( - "astbuilder", - "moving %r into %r" % (ob.fullName(), current.fullName()) - ) - # Must be a Module since the exports is set to an empty list if it's not. - assert isinstance(current, model.Module) - ob.reparent(current, as_name) - return True - return False - - def _importNames(self, modname: str, names: Iterable[ast.alias]) -> None: + def _importNames(self, modname: str, names: Iterable[ast.alias], linenumber:int) -> None: """Handle a C{from import } statement.""" # Process the module we're importing from. mod = self.system.getProcessedModule(modname) - # Fetch names to export. - exports = self._getCurrentModuleExports() - current = self.builder.current assert isinstance(current, model.CanContainImportsDocumentable) _localNameToFullName = current._localNameToFullName_map + is_module = isinstance(current, model.Module) for al in names: orgname, asname = al.name, al.asname if asname is None: @@ -430,10 +592,12 @@ def _importNames(self, modname: str, names: Iterable[ast.alias]) -> None: # are processed (getProcessedModule() ignores non-modules). if isinstance(mod, model.Package): self.system.getProcessedModule(f'{modname}.{orgname}') - if mod is not None and self._handleReExport(exports, orgname, asname, mod) is True: - continue - + _localNameToFullName[asname] = f'{modname}.{orgname}' + if is_module: + cast(model.Module, + current).imports.append(model.Import(asname, modname, + orgname=orgname, linenumber=linenumber)) def visit_Import(self, node: ast.Import) -> None: """Process an import statement. @@ -448,16 +612,23 @@ def visit_Import(self, node: ast.Import) -> None: (dotted_name, as_name) where as_name is None if there was no 'as foo' part of the statement. """ - if not isinstance(self.builder.current, model.CanContainImportsDocumentable): + ctx = self.builder.current + if not isinstance(ctx, model.CanContainImportsDocumentable): # processing import statement in odd context return - _localNameToFullName = self.builder.current._localNameToFullName_map + _localNameToFullName = ctx._localNameToFullName_map + is_module = isinstance(ctx, model.Module) + for al in node.names: targetname, asname = al.name, al.asname if asname is None: # we're keeping track of all defined names asname = targetname = targetname.split('.')[0] _localNameToFullName[asname] = targetname + if is_module: + cast(model.Module, + ctx).imports.append(model.Import(asname, targetname, + linenumber=node.lineno)) def _handleOldSchoolMethodDecoration(self, target: str, expr: Optional[ast.expr]) -> bool: if not isinstance(expr, ast.Call): @@ -1020,12 +1191,12 @@ class _ValueFormatter: """ def __init__(self, value: ast.expr, ctx: model.Documentable): - self._colorized = colorize_inline_pyval(value) + self.parsed = colorize_inline_pyval(value) """ The colorized value as L{ParsedDocstring}. """ - self._linker = ctx.docstring_linker + self.linker = ctx.docstring_linker """ Linker. """ @@ -1038,7 +1209,7 @@ def __repr__(self) -> str: # Using node2stan.node2html instead of flatten(to_stan()). # This avoids calling flatten() twice, # but potential XML parser errors caused by XMLString needs to be handled later. - return ''.join(node2stan.node2html(self._colorized.to_node(), self._linker)) + return ''.join(node2stan.node2html(self.parsed.to_node(), self.linker)) class _AnnotationValueFormatter(_ValueFormatter): """ @@ -1046,7 +1217,7 @@ class _AnnotationValueFormatter(_ValueFormatter): """ def __init__(self, value: ast.expr, ctx: model.Function): super().__init__(value, ctx) - self._linker = linker._AnnotationLinker(ctx) + self.linker = linker._AnnotationLinker(ctx) def __repr__(self) -> str: """ @@ -1059,6 +1230,8 @@ def __repr__(self) -> str: class ASTBuilder: """ Keeps tracks of the state of the AST build, creates documentable and adds objects to the system. + + One ASTBuilder instance is only suitable to build one Module. """ ModuleVistor = ModuleVistor @@ -1288,4 +1461,6 @@ def parseDocformat(node: ast.Assign, mod: model.Module) -> None: def setup_pydoctor_extension(r:extensions.ExtRegistrar) -> None: r.register_astbuilder_visitor(TypeAliasVisitorExt) - r.register_post_processor(model.defaultPostProcess, priority=200) + r.register_post_processor(processReExports, priority=250) + r.register_post_processor(postProcessClasses, priority=200) + r.register_post_processor(postProcessAttributes, priority=200) diff --git a/pydoctor/epydoc/markup/_types.py b/pydoctor/epydoc/markup/_types.py index 005f80a0c..d420e8ea0 100644 --- a/pydoctor/epydoc/markup/_types.py +++ b/pydoctor/epydoc/markup/_types.py @@ -14,6 +14,7 @@ from docutils import nodes from twisted.web.template import Tag, tags +# TODO: this class should support to_node() like others. class ParsedTypeDocstring(TypeDocstring, ParsedDocstring): """ Add L{ParsedDocstring} interface on top of L{TypeDocstring} and diff --git a/pydoctor/epydoc2stan.py b/pydoctor/epydoc2stan.py index 395c8bf25..37a1b8a66 100644 --- a/pydoctor/epydoc2stan.py +++ b/pydoctor/epydoc2stan.py @@ -5,16 +5,21 @@ from collections import defaultdict import enum +import inspect +import builtins +from itertools import chain from typing import ( TYPE_CHECKING, Any, Callable, ClassVar, DefaultDict, Dict, Generator, - Iterator, List, Mapping, Optional, Sequence, Tuple, Union, + Iterator, List, Mapping, Optional, Sequence, Tuple, TypeVar, Union, ) -import ast import re import attr +from docutils.transforms import Transform +from docutils import nodes from pydoctor import model, linker, node2stan +from pydoctor.node2stan import parse_reference from pydoctor.astutils import is_none_literal from pydoctor.epydoc.markup import Field as EpydocField, ParseError, get_parser_by_name, processtypes from twisted.web.template import Tag, tags @@ -266,18 +271,19 @@ def __init__(self, obj: model.Documentable): self.sinces: List[Field] = [] self.unknowns: DefaultDict[str, List[FieldDesc]] = defaultdict(list) - def set_param_types_from_annotations( - self, annotations: Mapping[str, Optional[ast.expr]] - ) -> None: + def set_param_types_from_annotations(self) -> None: + if not isinstance(self.obj, model.Function): + return + annotations = self.obj.annotations _linker = linker._AnnotationLinker(self.obj) formatted_annotations = { - name: None if value is None - else ParamType(safe_to_stan(colorize_inline_pyval(value), _linker, + name: None if parsed_annotation is None + else ParamType(safe_to_stan(parsed_annotation, _linker, self.obj, fallback=colorized_pyval_fallback, section='annotation', report=False), # don't spam the log, invalid annotation are going to be reported when the signature gets colorized origin=FieldOrigin.FROM_AST) - for name, value in annotations.items() + for name, parsed_annotation in get_parsed_annotations(self.obj).items() } ret_type = formatted_annotations.pop('return', None) @@ -794,8 +800,7 @@ def format_docstring(obj: model.Documentable) -> Tag: ret(unwrap_docstring_stan(stan)) fh = FieldHandler(obj) - if isinstance(obj, model.Function): - fh.set_param_types_from_annotations(obj.annotations) + fh.set_param_types_from_annotations() if source is not None: assert obj.parsed_docstring is not None, "ensure_parsed_docstring() did not do it's job" for field in obj.parsed_docstring.fields: @@ -874,20 +879,90 @@ def type2stan(obj: model.Documentable) -> Optional[Tag]: return safe_to_stan(parsed_type, _linker, obj, fallback=colorized_pyval_fallback, section='annotation') +_T = TypeVar('_T') +def _memoize(o:object, attrname:str, getter:Callable[[], _T]) -> _T: + parsed = getattr(o, attrname, None) + if parsed is not None: + return parsed #type:ignore + parsed = getter() + setattr(o, attrname, parsed) + return parsed + def get_parsed_type(obj: model.Documentable) -> Optional[ParsedDocstring]: """ Get the type of this attribute as parsed docstring. """ - parsed_type = obj.parsed_type - if parsed_type is not None: - return parsed_type + def _get_parsed_type() -> Optional[ParsedDocstring]: + annotation = getattr(obj, 'annotation', None) + if annotation is not None: + v = colorize_inline_pyval(annotation) + reportWarnings(obj, v.warnings, section='colorize annotation') + return v + return None + return _memoize(obj, 'parsed_type', _get_parsed_type) - # Only Attribute instances have the 'annotation' attribute. - annotation: Optional[ast.expr] = getattr(obj, 'annotation', None) - if annotation is not None: - return colorize_inline_pyval(annotation) +def get_parsed_decorators(obj: Union[model.Attribute, model.Function, + model.FunctionOverload]) -> Optional[Sequence[ParsedDocstring]]: + """ + Get the decorators of this function as parsed docstring. + """ + def _get_parsed_decorators() -> Optional[Sequence[ParsedDocstring]]: + v = [colorize_inline_pyval(dec) for dec in obj.decorators] if \ + obj.decorators is not None else None + documentable_obj = obj if not isinstance(obj, model.FunctionOverload) else obj.primary + for c in v or (): + if c: + reportWarnings(documentable_obj, c.warnings, section='colorize decorators') + return v + return _memoize(obj, 'parsed_decorators', _get_parsed_decorators) + +def get_parsed_value(obj:model.Attribute) -> Optional[ParsedDocstring]: + """ + Get the value of this constant as parsed docstring. + """ + def _get_parsed_value() -> Optional[ParsedDocstring]: + v = colorize_pyval(obj.value, + linelen=obj.system.options.pyvalreprlinelen, + maxlines=obj.system.options.pyvalreprmaxlines) if obj.value is not None else None + # Report eventual warnings. + if v: + reportWarnings(obj, v.warnings, section='colorize constant') + return v + return _memoize(obj, 'parsed_value', _get_parsed_value) + +def get_parsed_annotations(obj:model.Function) -> Mapping[str, Optional[ParsedDocstring]]: + """ + Get the annotations of this function as dict from str to parsed docstring. + """ + def _get_parsed_annotations() -> Mapping[str, Optional[ParsedDocstring]]: + return {name:colorize_inline_pyval(ann) if ann else None for \ + (name, ann) in obj.annotations.items()} + # do not warn here + return _memoize(obj, 'parsed_annotations', _get_parsed_annotations) - return None +def get_parsed_bases(obj:model.Class) -> Sequence[ParsedDocstring]: + """ + Get the bases of this class as a seqeunce of parsed docstrings. + """ + def _get_parsed_bases() -> Sequence[ParsedDocstring]: + r = [] + for (str_base, base_node), base_obj in zip(obj.rawbases, obj.baseobjects): + # Make sure we bypass the linker’s resolver process for base object, + # because it has been resolved already (with two passes). + # Otherwise, since the class declaration wins over the imported names, + # a class with the same name as a base class confused pydoctor and it would link + # to it self: https://github.com/twisted/pydoctor/issues/662 + refmap = None + if base_obj is not None: + refmap = {str_base:base_obj.fullName()} + + # link to external class, using the colorizer here + # to link to classes with generics (subscripts and other AST expr). + p = colorize_inline_pyval(base_node, refmap=refmap) + r.append(p) + reportWarnings(obj, p.warnings, section='colorize bases') + return r + return _memoize(obj, 'parsed_bases', _get_parsed_bases) def format_toc(obj: model.Documentable) -> Optional[Tag]: # Load the parsed_docstring if it's not already done. @@ -985,23 +1060,19 @@ def colorized_pyval_fallback(_: List[ParseError], doc:ParsedDocstring, __:model. return Tag('code')(node2stan.gettext(doc.to_node())) def _format_constant_value(obj: model.Attribute) -> Iterator["Flattenable"]: - + doc = get_parsed_value(obj) + if doc is None: + return + # yield the table title, "Value" row = tags.tr(class_="fieldStart") row(tags.td(class_="fieldName")("Value")) # yield the first row. yield row - doc = colorize_pyval(obj.value, - linelen=obj.system.options.pyvalreprlinelen, - maxlines=obj.system.options.pyvalreprmaxlines) - value_repr = safe_to_stan(doc, obj.docstring_linker, obj, fallback=colorized_pyval_fallback, section='rendering of constant') - # Report eventual warnings. It warns when a regex failed to parse. - reportWarnings(obj, doc.warnings, section='colorize constant') - # yield the value repr. row = tags.tr() row(tags.td(tags.pre(class_='constant-value')(value_repr))) @@ -1146,6 +1217,127 @@ def populate_constructors_extra_info(cls:model.Class) -> None: extra_epytext += ', ' short_text = format_constructor_short_text(c, cls) extra_epytext += '`%s <%s>`' % (short_text, c.fullName()) - cls.extra_info.append(parse_docstring( cls, extra_epytext, cls, 'restructuredtext', section='constructor extra')) + +_builtin_names = set(dir(builtins)) + +class _ReferenceTransform(Transform): + + def __init__(self, document:nodes.document, + ctx:'model.Documentable', is_annotation:bool): + super().__init__(document) + self.ctx = ctx + self.module = ctx.module + self.is_annotation = is_annotation + + def _transform(self, node:nodes.title_reference) -> None: + ctx = self.ctx + module = self.module + _, target = parse_reference(node) + # we're setting two attributes here: 'refuri' and 'rawtarget'. + # 'refuri' might already be created by the colorizer or docstring parser, + # but 'rawtarget' is only created from within this transform, so we can + # use that information to ensure this process is only ever applied once + # per title_reference element. + attribs = node.attributes + if target == attribs.get('refuri', target) and 'rawtarget' not in attribs: + # save the raw target name + attribs['rawtarget'] = target + name, *rest = target.split('.') + is_name_defined = ctx.isNameDefined(name) + # check if it's a non-shadowed builtins + if not is_name_defined and name in _builtin_names: + # transform bare builtin name into builtins. + attribs['refuri'] = '.'.join(('builtins', name, *rest)) + return + # no-op for unbound name + if not is_name_defined: + attribs['refuri'] = target + return + # kindda duplicate a little part of the annotation linker logic here, + # there are no simple way of doing it otherwise at the moment. + # Once all presented parsed elements are stored as Documentable attributes + # we might be able to simply use that and drop the use of the annotation linker, + # but for now this will do the trick: + lookup_context = ctx + if self.is_annotation and ctx is not module and module.isNameDefined(name, + only_locals=True) and ctx.isNameDefined(name, only_locals=True): + # If we're dealing with an annotation, give precedence to the module's + # lookup (wrt PEP 563) + lookup_context = module + linker.warn_ambiguous_annotation(module, ctx, target) + # save pre-resolved refuri + attribs['refuri'] = '.'.join(chain(lookup_context.expandName(name).split('.'), rest)) + + def apply(self) -> None: + for node in self.document.findall(nodes.title_reference): + self._transform(node) + + +def _apply_reference_transform(doc:ParsedDocstring, ctx:'model.Documentable', + is_annotation:bool=False) -> None: + """ + Runs L{_ReferenceTransform} on the underlying docutils document. + No-op if L{to_node} raises L{NotImplementedError}. + """ + try: + document = doc.to_node() + except NotImplementedError: + return + else: + _ReferenceTransform(document, ctx, is_annotation).apply() + +def transform_parsed_names(node:'model.Module') -> None: + """ + Walk this module's content and apply in-place transformations to the + L{ParsedDocstring} instances that olds L{obj_reference} or L{nodes.title_reference} nodes. + + Fixing "Lookup of name in annotation fails on reparented object #295". + The fix is not 100% complete at the moment: attribute values and decorators + are not handled. + """ + from pydoctor import model, astbuilder + # resolve names early when possible + for ob in model.walk(node): + # resolve names in parsed_docstring, do not forget field bodies + if ob.parsed_docstring: + _apply_reference_transform(ob.parsed_docstring, ob) + for f in ob.parsed_docstring.fields: + _apply_reference_transform(f.body(), ob) + if isinstance(ob, model.Function): + if ob.signature: + for p in ob.signature.parameters.values(): + ann = p.annotation if p.annotation is not inspect.Parameter.empty else None + if isinstance(ann, astbuilder._ValueFormatter): + _apply_reference_transform(ann.parsed, ob, is_annotation=True) + default = p.default if p.default is not inspect.Parameter.empty else None + if isinstance(default, astbuilder._ValueFormatter): + _apply_reference_transform(default.parsed, ob) + for _,ann in get_parsed_annotations(ob).items(): + if ann: + _apply_reference_transform(ann, ob, is_annotation=True) + for dec in get_parsed_decorators(ob) or (): + if dec: + _apply_reference_transform(dec, ob) + for overload in ob.overloads: + for dec in get_parsed_decorators(overload) or (): + if dec: + _apply_reference_transform(dec, ob) + elif isinstance(ob, model.Attribute): + # resolve attribute annotation with parsed_type attribute + parsed_type = get_parsed_type(ob) + if parsed_type: + _apply_reference_transform(parsed_type, ob, is_annotation=True) + if ob.kind in ob.system.show_attr_value: + parsed_value = get_parsed_value(ob) + if parsed_value: + _apply_reference_transform(parsed_value, ob) + for dec in get_parsed_decorators(ob) or (): + if dec: + _apply_reference_transform(dec, ob) + elif isinstance(ob, model.Class): + for base in get_parsed_bases(ob): + _apply_reference_transform(base, ob) + +# TODO: do one test with parsed type docstrings diff --git a/pydoctor/linker.py b/pydoctor/linker.py index f403e64dc..1b66d6b8b 100644 --- a/pydoctor/linker.py +++ b/pydoctor/linker.py @@ -134,9 +134,13 @@ def look_for_intersphinx(self, name: str) -> Optional[str]: def link_to(self, identifier: str, label: "Flattenable") -> Tag: fullID = self.obj.expandName(identifier) - target = self.obj.system.objForFullName(fullID) - if target is not None: - return taglink(target, self.page_url, label) + try: + target = self.obj.system.find_object(fullID) + except LookupError: + pass + else: + if target is not None: + return taglink(target, self.page_url, label) url = self.look_for_intersphinx(fullID) if url is not None: @@ -185,8 +189,18 @@ def _resolve_identifier_xref(self, if target is not None: return target - # Check if the fullID exists in an intersphinx inventory. fullID = self.obj.expandName(identifier) + + # Try fetching the name with it's outdated fullname + try: + target = self.obj.system.find_object(fullID) + except LookupError: + pass + else: + if target is not None: + return target + + # Check if the fullID exists in an intersphinx inventory. target_url = self.look_for_intersphinx(fullID) if not target_url: # FIXME: https://github.com/twisted/pydoctor/issues/125 @@ -239,6 +253,19 @@ def _resolve_identifier_xref(self, self.reporting_obj.report(message, 'resolve_identifier_xref', lineno) raise LookupError(identifier) +def warn_ambiguous_annotation(mod:'model.Documentable', + obj:'model.Documentable', + target:str) -> None: + # report a low-level message about ambiguous annotation + mod_ann = mod.expandName(target) + obj_ann = obj.expandName(target) + if mod_ann != obj_ann and '.' in obj_ann and '.' in mod_ann: + obj.report( + f'ambiguous annotation {target!r}, could be interpreted as ' + f'{obj_ann!r} instead of {mod_ann!r}', section='annotation', + thresh=1 + ) + class _AnnotationLinker(DocstringLinker): """ Specialized linker to resolve annotations attached to the given L{Documentable}. @@ -256,22 +283,11 @@ def __init__(self, obj:'model.Documentable') -> None: @property def obj(self) -> 'model.Documentable': return self._obj - - def warn_ambiguous_annotation(self, target:str) -> None: - # report a low-level message about ambiguous annotation - mod_ann = self._module.expandName(target) - obj_ann = self._scope.expandName(target) - if mod_ann != obj_ann and '.' in obj_ann and '.' in mod_ann: - self.obj.report( - f'ambiguous annotation {target!r}, could be interpreted as ' - f'{obj_ann!r} instead of {mod_ann!r}', section='annotation', - thresh=1 - ) def link_to(self, target: str, label: "Flattenable") -> Tag: with self.switch_context(self._obj): if self._module.isNameDefined(target): - self.warn_ambiguous_annotation(target) + warn_ambiguous_annotation(self._module, self._obj, target) return self._module_linker.link_to(target, label) elif self._scope.isNameDefined(target): return self._scope_linker.link_to(target, label) diff --git a/pydoctor/model.py b/pydoctor/model.py index 31c30ac91..c4dce767f 100644 --- a/pydoctor/model.py +++ b/pydoctor/model.py @@ -13,7 +13,6 @@ from collections import defaultdict import datetime import importlib -import platform import sys import textwrap import types @@ -53,12 +52,6 @@ # Functions can't contain anything. -_string_lineno_is_end = sys.version_info < (3,8) \ - and platform.python_implementation() != 'PyPy' -"""True iff the 'lineno' attribute of an AST string node points to the last -line in the string, rather than the first line. -""" - class LineFromAst(int): "Simple L{int} wrapper for linenumbers coming from ast analysis." @@ -117,6 +110,19 @@ class DocumentableKind(Enum): PROPERTY = 150 VARIABLE = 100 +def walk(node:'Documentable') -> Iterator['Documentable']: + """ + Recursively yield all descendant nodes in the tree starting at *node* + (including *node* itself), in no specified order. This is useful if you + only want to modify nodes in place and don't care about the context. + """ + from collections import deque + todo = deque([node]) + while todo: + node = todo.popleft() + todo.extend(node.contents.values()) + yield node + class Documentable: """An object that can be documented. @@ -264,14 +270,30 @@ def docsources(self) -> Iterator['Documentable']: def reparent(self, new_parent: 'Module', new_name: str) -> None: + """ + Move this documentable to a new location. + """ + + old_name = self.name + new_contents = new_parent.contents + + # issue warnings + if new_name in new_contents: + self.system.handleDuplicate(new_contents[new_name]) + self.report(f"introduced by re-exporting {self} into {new_parent}" + '' if new_name==old_name else f' as {new_name!r}', thresh=1) + # this code attempts to preserve "rather a lot" of # invariants assumed by various bits of pydoctor # and that are of course not written down anywhere # :/ - self._handle_reparenting_pre() + # Basically we maintain at least 2 references for each object in the system + # one in it's parent.contents dict and one in allobject dict. The later has been proven + # not to be necessary, but it speeds-up name resolving. + self._handle_reparenting_pre() # but why do we call this method twice? old_parent = self.parent assert isinstance(old_parent, CanContainImportsDocumentable) - old_name = self.name + self.parent = self.parentMod = new_parent self.name = new_name self._handle_reparenting_post() @@ -293,7 +315,7 @@ def _handle_reparenting_post(self) -> None: def _localNameToFullName(self, name: str) -> str: raise NotImplementedError(self._localNameToFullName) - def isNameDefined(self, name:str) -> bool: + def isNameDefined(self, name:str, only_locals:bool=False) -> bool: """ Is the given name defined in the globals/locals of self-context? Only the first name of a dotted name is checked. @@ -389,7 +411,8 @@ def module(self) -> 'Module': def report(self, descr: str, section: str = 'parsing', lineno_offset: int = 0, thresh:int=-1) -> None: """ - Log an error or warning about this documentable object. + Log an error or warning about this documentable object. + A reported message will only be printed once. @param descr: The error/warning string @param section: What the warning is about. @@ -414,7 +437,8 @@ def report(self, descr: str, section: str = 'parsing', lineno_offset: int = 0, t self.system.msg( section, f'{self.description}:{linenumber}: {descr}', - thresh=thresh) + # some warnings can be reported more that once. + thresh=thresh, once=True) @property def docstring_linker(self) -> 'linker.DocstringLinker': @@ -432,18 +456,41 @@ class CanContainImportsDocumentable(Documentable): def setup(self) -> None: super().setup() self._localNameToFullName_map: Dict[str, str] = {} + """ + Mapping from local names to fullnames: Powers name resolving. + """ + + self.exported: Dict[str, 'Documentable'] = {} + """ + When pydoctor re-export objects, it leaves references to object in this dict + so they can still be listed in childtable of origin modules or classes. This attribute belongs + to the "view model" part of Documentable interface and should only be used to present + links to these objects. Not to do name resolving. + """ - def isNameDefined(self, name: str) -> bool: + def isNameDefined(self, name: str, only_locals:bool=False) -> bool: name = name.split('.')[0] if name in self.contents: return True if name in self._localNameToFullName_map: return True - if not isinstance(self, Module): + if not isinstance(self, Module) and not only_locals: return self.module.isNameDefined(name) else: return False + +@attr.s(auto_attribs=True, slots=True) +class Import: + """ + An imported name. + @note: One L{Import} instance is created for each + name bound in the C{import} statement. + """ + name:str + orgmodule:str + linenumber:int + orgname:Optional[str]=None class Module(CanContainImportsDocumentable): kind = DocumentableKind.MODULE @@ -479,6 +526,8 @@ def setup(self) -> None: self._docformat: Optional[str] = None + self.imports: List[Import] = [] + def _localNameToFullName(self, name: str) -> str: if name in self.contents: o: Documentable = self.contents[name] @@ -653,7 +702,9 @@ def setup(self) -> None: """ self._initialbases: List[str] = [] self._initialbaseobjects: List[Optional['Class']] = [] - + + self.parsed_bases:Optional[List[ParsedDocstring]] = None + def _init_mro(self) -> None: """ Compute the correct value of the method resolution order returned by L{mro()}. @@ -833,8 +884,8 @@ def docsources(self) -> Iterator[Documentable]: def _localNameToFullName(self, name: str) -> str: return self.parent._localNameToFullName(name) - def isNameDefined(self, name: str) -> bool: - return self.parent.isNameDefined(name) + def isNameDefined(self, name: str, only_locals:bool=False) -> bool: + return self.parent.isNameDefined(name, only_locals=only_locals) class Function(Inheritable): kind = DocumentableKind.FUNCTION @@ -850,6 +901,8 @@ def setup(self) -> None: self.kind = DocumentableKind.METHOD self.signature = None self.overloads = [] + self.parsed_decorators:Optional[Sequence[ParsedDocstring]] = None + self.parsed_annotations:Optional[Dict[str, Optional[ParsedDocstring]]] = None @attr.s(auto_attribs=True) class FunctionOverload: @@ -859,6 +912,7 @@ class FunctionOverload: primary: Function signature: Signature decorators: Sequence[ast.expr] + parsed_decorators:Optional[Sequence[ParsedDocstring]] = None class Attribute(Inheritable): kind: Optional[DocumentableKind] = DocumentableKind.ATTRIBUTE @@ -870,6 +924,8 @@ class Attribute(Inheritable): None value means the value is not initialized at the current point of the the process. """ + parsed_decorators:Optional[Sequence[ParsedDocstring]] = None + parsed_value:Optional[ParsedDocstring] = None # Work around the attributes of the same name within the System class. _ModuleT = Module @@ -936,6 +992,7 @@ class System: """ def __init__(self, options: Optional['Options'] = None): + self.modules: Dict[str, Module] = {} self.allobjects: Dict[str, Documentable] = {} self.rootobjects: List[_ModuleT] = [] @@ -1170,7 +1227,9 @@ def membersOrder(self, ob: Documentable) -> Callable[[Documentable], Tuple[Any, def addObject(self, obj: Documentable) -> None: """Add C{object} to the system.""" - + if isinstance(obj, _ModuleT): + # we already handled duplication of modules. + self.modules[obj.fullName()] = obj if obj.parent: obj.parent.contents[obj.name] = obj elif isinstance(obj, _ModuleT): @@ -1483,39 +1542,6 @@ def fetchIntersphinxInventories(self, cache: CacheT) -> None: for url in self.options.intersphinx: self.intersphinx.update(cache, url) -def defaultPostProcess(system:'System') -> None: - for cls in system.objectsOfType(Class): - # Initiate the MROs - cls._init_mro() - # Lookup of constructors - cls._init_constructors() - - # Compute subclasses - for b in cls.baseobjects: - if b is not None: - b.subclasses.append(cls) - - # Checking whether the class is an exception - if is_exception(cls): - cls.kind = DocumentableKind.EXCEPTION - - for attrib in system.objectsOfType(Attribute): - _inherits_instance_variable_kind(attrib) - -def _inherits_instance_variable_kind(attr: Attribute) -> None: - """ - If any of the inherited members of a class variable is an instance variable, - then the subclass' class variable become an instance variable as well. - """ - if attr.kind is not DocumentableKind.CLASS_VARIABLE: - return - docsources = attr.docsources() - next(docsources) - for inherited in docsources: - if inherited.kind is DocumentableKind.INSTANCE_VARIABLE: - attr.kind = DocumentableKind.INSTANCE_VARIABLE - break - def get_docstring( obj: Documentable ) -> Tuple[Optional[str], Optional[Documentable]]: diff --git a/pydoctor/node2stan.py b/pydoctor/node2stan.py index bdc3cb543..65e1c3fd6 100644 --- a/pydoctor/node2stan.py +++ b/pydoctor/node2stan.py @@ -5,7 +5,7 @@ import re import optparse -from typing import Any, Callable, ClassVar, Iterable, List, Optional, Union, TYPE_CHECKING +from typing import Any, Callable, ClassVar, Iterable, List, Optional, Sequence, Tuple, Union, TYPE_CHECKING from docutils.writers import html4css1 from docutils import nodes, frontend, __version_info__ as docutils_version_info @@ -54,6 +54,25 @@ def gettext(node: Union[nodes.Node, List[nodes.Node]]) -> List[str]: filtered.extend(gettext(child)) return filtered +def parse_reference(node:nodes.title_reference) -> Tuple[Union[str, Sequence[nodes.Node]], str]: + """ + Split a reference into (label, target). + """ + label: Union[str, Sequence[nodes.Node]] + if 'refuri' in node.attributes: + # Epytext parsed or manually constructed nodes. + label, target = node.children, node.attributes['refuri'] + else: + # RST parsed. + m = _TARGET_RE.match(node.astext()) + if m: + label, target = m.groups() + else: + label = target = node.astext() + # Support linking to functions and methods with () at the end + if target.endswith('()'): + target = target[:len(target)-2] + return label, target _TARGET_RE = re.compile(r'^(.*?)\s*<(?:URI:|URL:)?([^<>]+)>$') _VALID_IDENTIFIER_RE = re.compile('[^0-9a-zA-Z_]') @@ -107,22 +126,12 @@ def visit_obj_reference(self, node: nodes.Node) -> None: self._handle_reference(node, link_func=self._linker.link_to) def _handle_reference(self, node: nodes.Node, link_func: Callable[[str, "Flattenable"], "Flattenable"]) -> None: + node_label, target = parse_reference(node) label: "Flattenable" - if 'refuri' in node.attributes: - # Epytext parsed or manually constructed nodes. - label, target = node2stan(node.children, self._linker), node.attributes['refuri'] + if not isinstance(node_label, str): + label = node2stan(node_label, self._linker) else: - # RST parsed. - m = _TARGET_RE.match(node.astext()) - if m: - label, target = m.groups() - else: - label = target = node.astext() - - # Support linking to functions and methods with () at the end - if target.endswith('()'): - target = target[:len(target)-2] - + label = node_label self.body.append(flatten(link_func(target, label))) raise nodes.SkipNode() diff --git a/pydoctor/sphinx.py b/pydoctor/sphinx.py index 5766934a7..8755567b1 100644 --- a/pydoctor/sphinx.py +++ b/pydoctor/sphinx.py @@ -136,6 +136,12 @@ def getLink(self, name: str) -> Optional[str]: """ Return link for `name` or None if no link is found. """ + # special casing the 'builtins' module because our name resolving + # replaces bare builtins names with builtins. in order not to confuse + # them with objects in the system when reparenting. + if name.startswith('builtins.'): + name = name[len('builtins.'):] + base_url, relative_link = self._links.get(name, (None, None)) if not relative_link: return None diff --git a/pydoctor/templatewriter/pages/__init__.py b/pydoctor/templatewriter/pages/__init__.py index 2f57084c0..a33316b73 100644 --- a/pydoctor/templatewriter/pages/__init__.py +++ b/pydoctor/templatewriter/pages/__init__.py @@ -1,9 +1,10 @@ """The classes that turn L{Documentable} instances into objects we can render.""" from __future__ import annotations +from itertools import chain from typing import ( - TYPE_CHECKING, Dict, Iterator, List, Optional, Mapping, Sequence, - Type, Union + TYPE_CHECKING, Callable, Dict, Iterator, List, Optional, Mapping, Sequence, + Tuple, Type, Union ) import ast import abc @@ -18,7 +19,6 @@ from pydoctor.templatewriter import util, TemplateLookup, TemplateElement from pydoctor.templatewriter.pages.table import ChildTable from pydoctor.templatewriter.pages.sidebar import SideBar -from pydoctor.epydoc.markup._pyval_repr import colorize_inline_pyval if TYPE_CHECKING: from typing_extensions import Final @@ -32,7 +32,8 @@ def format_decorators(obj: Union[model.Function, model.Attribute, model.Function # primary function for parts that requires an interface to Documentable methods or attributes documentable_obj = obj if not isinstance(obj, model.FunctionOverload) else obj.primary - for dec in obj.decorators or (): + for dec, doc in zip(obj.decorators or (), + epydoc2stan.get_parsed_decorators(obj) or ()): if isinstance(dec, ast.Call): fn = node2fullname(dec.func, documentable_obj) # We don't want to show the deprecated decorator; @@ -40,15 +41,9 @@ def format_decorators(obj: Union[model.Function, model.Attribute, model.Function if fn in ("twisted.python.deprecate.deprecated", "twisted.python.deprecate.deprecatedProperty"): break - - # Colorize decorators! - doc = colorize_inline_pyval(dec) stan = epydoc2stan.safe_to_stan(doc, documentable_obj.docstring_linker, documentable_obj, fallback=epydoc2stan.colorized_pyval_fallback, section='rendering of decorators') - - # Report eventual warnings. It warns when we can't colorize the expression for some reason. - epydoc2stan.reportWarnings(documentable_obj, doc.warnings, section='colorize decorator') yield '@', stan.children, tags.br() def format_signature(func: Union[model.Function, model.FunctionOverload]) -> "Flattenable": @@ -72,29 +67,17 @@ def format_class_signature(cls: model.Class) -> "Flattenable": """ r: List["Flattenable"] = [] # the linker will only be used to resolve the generic arguments of the base classes, - # it won't actually resolve the base classes (see comment few lines below). + # it won't actually resolve the base classes (see comment in epydoc2stan.get_parsed_bases). # this is why we're using the annotation linker. _linker = linker._AnnotationLinker(cls) - if cls.rawbases: + parsed_bases = epydoc2stan.get_parsed_bases(cls) + if parsed_bases: r.append('(') - for idx, ((str_base, base_node), base_obj) in enumerate(zip(cls.rawbases, cls.baseobjects)): + for idx, parsed_base in enumerate(parsed_bases): if idx != 0: r.append(', ') - - # Make sure we bypass the linker’s resolver process for base object, - # because it has been resolved already (with two passes). - # Otherwise, since the class declaration wins over the imported names, - # a class with the same name as a base class confused pydoctor and it would link - # to it self: https://github.com/twisted/pydoctor/issues/662 - - refmap = None - if base_obj is not None: - refmap = {str_base:base_obj.fullName()} - - # link to external class, using the colorizer here - # to link to classes with generics (subscripts and other AST expr). - stan = epydoc2stan.safe_to_stan(colorize_inline_pyval(base_node, refmap=refmap), _linker, cls, + stan = epydoc2stan.safe_to_stan(parsed_base, _linker, cls, fallback=epydoc2stan.colorized_pyval_fallback, section='rendering of class signature') r.extend(stan.children) @@ -279,11 +262,20 @@ def extras(self) -> List["Flattenable"]: def docstring(self) -> "Flattenable": return self.docgetter.get(self.ob) + + def _childtable_objects_order(self, + v:Union[model.Documentable, Tuple[str, model.Documentable]]) -> Tuple[int, int, str]: + if isinstance(v, model.Documentable): + return self._order(v) + else: + name, o = v + i,j,_ = self._order(o) + return (i,j, f'{self.ob.fullName()}.{name}'.lower()) - def children(self) -> Sequence[model.Documentable]: + def children(self) -> Sequence[Union[model.Documentable, Tuple[str, model.Documentable]]]: return sorted( (o for o in self.ob.contents.values() if o.isVisible), - key=self._order) + key=self._childtable_objects_order) def packageInitTable(self) -> "Flattenable": return () @@ -363,7 +355,6 @@ def slot_map(self) -> Dict[str, "Flattenable"]: ) return slot_map - class ModulePage(CommonPage): ob: model.Module @@ -376,17 +367,35 @@ def extras(self) -> List["Flattenable"]: r.extend(super().extras()) return r + + def _iter_reexported_members(self, predicate: Optional[Callable[[model.Documentable], bool]]=None) -> Iterator[Tuple[str, model.Documentable]]: + if not predicate: + predicate = lambda v:True + return ((n,o) for n,o in self.ob.exported.items() if o.isVisible and predicate(o)) + def children(self) -> Sequence[Union[model.Documentable, Tuple[str, model.Documentable]]]: + return sorted(chain( + super().children(), self._iter_reexported_members()), + key=self._childtable_objects_order) -class PackagePage(ModulePage): - def children(self) -> Sequence[model.Documentable]: - return sorted(self.ob.submodules(), key=self._order) - def packageInitTable(self) -> "Flattenable": - children = sorted( - (o for o in self.ob.contents.values() +class PackagePage(ModulePage): + def children(self) -> Sequence[Union[model.Documentable, Tuple[str, model.Documentable]]]: + return sorted(chain(self.ob.submodules(), + self._iter_reexported_members( + predicate=lambda o: isinstance(o, model.Module))), + key=self._childtable_objects_order) + + def initTableChildren(self) -> Sequence[Union[model.Documentable, Tuple[str, model.Documentable]]]: + return sorted( + chain((o for o in self.ob.contents.values() if not isinstance(o, model.Module) and o.isVisible), - key=self._order) + self._iter_reexported_members( + predicate=lambda o: not isinstance(o, model.Module))), + key=self._childtable_objects_order) + + def packageInitTable(self) -> "Flattenable": + children = self.initTableChildren() if children: loader = ChildTable.lookup_loader(self.template_lookup) return [ @@ -574,7 +583,7 @@ def objectExtras(self, ob: model.Documentable) -> List["Flattenable"]: r.extend(super().objectExtras(ob)) return r -commonpages: 'Final[Mapping[str, Type[CommonPage]]]' = { +commonpages: Final[Mapping[str, Type[CommonPage]]] = { 'Module': ModulePage, 'Package': PackagePage, 'Class': ClassPage, diff --git a/pydoctor/templatewriter/pages/table.py b/pydoctor/templatewriter/pages/table.py index 05b486c19..adca6a6ca 100644 --- a/pydoctor/templatewriter/pages/table.py +++ b/pydoctor/templatewriter/pages/table.py @@ -1,12 +1,12 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Collection +from typing import TYPE_CHECKING, Collection, Optional, Tuple, Union, overload from twisted.web.iweb import ITemplateLoader from twisted.web.template import Element, Tag, TagLoader, renderer, tags from pydoctor import epydoc2stan -from pydoctor.model import Documentable, Function +from pydoctor.model import Documentable, Function, Class from pydoctor.templatewriter import TemplateElement, util if TYPE_CHECKING: @@ -20,16 +20,18 @@ def __init__(self, docgetter: util.DocGetter, ob: Documentable, child: Documentable, + as_name:Optional[str] ): super().__init__(loader) self.docgetter = docgetter self.ob = ob self.child = child + self.as_name = as_name @renderer def class_(self, request: object, tag: Tag) -> "Flattenable": class_ = util.css_class(self.child) - if self.child.parent is not self.ob: + if isinstance(self.ob, Class) and self.child.parent is not self.ob: class_ = 'base' + class_ return class_ @@ -49,8 +51,9 @@ def kind(self, request: object, tag: Tag) -> Tag: @renderer def name(self, request: object, tag: Tag) -> Tag: return tag.clear()(tags.code( - epydoc2stan.taglink(self.child, self.ob.url, epydoc2stan.insert_break_points(self.child.name)) - )) + epydoc2stan.taglink(self.child, self.ob.url, + epydoc2stan.insert_break_points( + self.as_name or self.child.name)))) @renderer def summaryDoc(self, request: object, tag: Tag) -> Tag: @@ -63,11 +66,28 @@ class ChildTable(TemplateElement): filename = 'table.html' + # not really a legit usage of overload, but mypy made me do it. + @overload def __init__(self, docgetter: util.DocGetter, ob: Documentable, children: Collection[Documentable], loader: ITemplateLoader, + ):... + @overload + def __init__(self, + docgetter: util.DocGetter, + ob: Documentable, + children: Collection[Union[Documentable, + Tuple[str, Documentable]]], + loader: ITemplateLoader, + ):... + def __init__(self, + docgetter: util.DocGetter, + ob: Documentable, + children: Collection[Union[Documentable, + Tuple[str, Documentable]]], + loader: ITemplateLoader, ): super().__init__(loader) self.children = children @@ -87,7 +107,8 @@ def rows(self, request: object, tag: Tag) -> "Flattenable": TagLoader(tag), self.docgetter, self.ob, - child) + child=child if isinstance(child, Documentable) else child[1], + as_name=None if isinstance(child, Documentable) else child[0]) for child in self.children - if child.isVisible - ] + if (child if isinstance(child, Documentable) else child[1]).isVisible + ] diff --git a/pydoctor/test/test_astbuilder.py b/pydoctor/test/test_astbuilder.py index 95ac3d803..18600ff1f 100644 --- a/pydoctor/test/test_astbuilder.py +++ b/pydoctor/test/test_astbuilder.py @@ -2719,3 +2719,314 @@ def test_typealias_unstring(systemcls: Type[model.System]) -> None: # there is not Constant nodes in the type alias anymore next(n for n in ast.walk(typealias.value) if isinstance(n, ast.Constant)) +@systemcls_param +def test_module_imports(systemcls: Type[model.System]) -> None: + code = ''' + import mod2 + import pack.subpack + import pack.subpack as a + from mod2 import _k as k, _l as l, _m as m + from pack.subpack.stuff import C + from x import * + ''' + expected = [('mod2','mod2', None), + ('pack','pack', None), + ('a','pack.subpack', None), + ('k','mod2','_k'), + ('l','mod2','_l'), + ('m','mod2','_m'), + ('C','pack.subpack.stuff','C'), + ('*','x', '*')] + mod = fromText(code, systemcls=systemcls) + + assert len(expected)==len(mod.imports) + for i, exp in zip(mod.imports, expected): + assert isinstance(i, model.Import) + + expected_name, expected_orgmodule, expected_orgname = exp + assert i.name == expected_name + assert i.orgmodule == expected_orgmodule + assert i.orgname == expected_orgname + +@systemcls_param +def test_module_relative_imports(systemcls: Type[model.System]) -> None: + code = ''' + from ..mod2 import bar as b + from .pack import foo + ''' + expected = [('b','top.mod2','bar'), + ('foo','top.subpack.pack','foo'),] + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString('', modname='top', is_package=True) + builder.addModuleString('', modname='subpack', parent_name='top', is_package=True) + builder.addModuleString(code, modname='other', parent_name='top.subpack') + builder.buildModules() + mod = system.allobjects['top.subpack.other'] + assert isinstance(mod, model.Module) + assert len(expected)==len(mod.imports) + for i, exp in zip(mod.imports, expected): + assert isinstance(i, model.Import) + + expected_name, expected_orgmodule, expected_orgname = exp + assert i.name == expected_name + assert i.orgmodule == expected_orgmodule + assert i.orgname == expected_orgname + +@systemcls_param +def test_module_relative_package_imports(systemcls: Type[model.System]) -> None: + code = ''' + from ...mod2 import bar as b + from .pack import foo + ''' + expected = [('b','top.mod2','bar'), + ('foo','top.subpack.other.pack','foo'),] + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString('', modname='top', is_package=True) + builder.addModuleString('', modname='subpack', parent_name='top', is_package=True) + builder.addModuleString(code, modname='other', parent_name='top.subpack', is_package=True) + builder.buildModules() + mod = system.allobjects['top.subpack.other'] + assert isinstance(mod, model.Module) + assert len(expected)==len(mod.imports) + for i, exp in zip(mod.imports, expected): + assert isinstance(i, model.Import) + + expected_name, expected_orgmodule, expected_orgname = exp + assert i.name == expected_name + assert i.orgmodule == expected_orgmodule + assert i.orgname == expected_orgname + +@systemcls_param +def test_allobjects_mapping_reparented_confusion(systemcls: Type[model.System], capsys:CapSys) -> None: + """ + When reparenting, it takes care to handle duplicte objects with system.handleDuplicate. + """ + src1 = '''\ + class mything: + "reparented" + class stuff: + do = object() + ''' + mything_src = '''\ + class stuff: + "doc" + def do(x:int):... + ''' + pack = 'from ._src import mything; __all__=["mything"]' + + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString(pack, 'pack', is_package=True) + builder.addModuleString(src1, '_src', parent_name='pack') + builder.addModuleString(mything_src, 'mything', parent_name='pack') + builder.buildModules() + + assert [(o.name,o.kind) for o in + system.allobjects['pack'].contents.values()] == [('_src', model.DocumentableKind.MODULE), + # ('mything 0', model.DocumentableKind.MODULE), + ('mything', model.DocumentableKind.CLASS)] + + assert system.allobjects['pack.mything'].docstring == "reparented" + + assert system.allobjects['pack.mything.stuff'].docstring == None + assert system.allobjects['pack.mything.stuff.do'].kind == model.DocumentableKind.CLASS_VARIABLE + + assert system.allobjects['pack.mything 0.stuff'].docstring == "doc" + assert system.allobjects['pack.mything 0.stuff'].kind == model.DocumentableKind.CLASS + assert system.allobjects['pack.mything 0.stuff.do'].kind == model.DocumentableKind.METHOD + + assert capsys.readouterr().out == ( + "moving 'pack._src.mything' into 'pack'\n" + "pack.mything:???: duplicate Module 'pack.mything'\n" + "pack._src:1: introduced by re-exporting Class 'pack._src.mything' into Package 'pack'\n" + ) + +@systemcls_param +def test_cannot_resolve_reparented(systemcls: Type[model.System], capsys:CapSys) -> None: + """ + When reparenting, it warns when the reparented target cannot be found + """ + src1 = '''\ + class Cls:... + ''' + mything_src = '''\ + class Slc:... + ''' + pack = 'from ._src2 import Slc;from ._src1 import Cls; __all__=["Cls", "Slc"]' + + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString(pack, 'pack', is_package=True) + builder.addModuleString(src1, '_src0', parent_name='pack') + builder.addModuleString(mything_src, '_src1', parent_name='pack') + builder.buildModules() + + assert list(system.allobjects['pack'].contents) == ['_src0', '_src1'] + + assert capsys.readouterr().out == ("pack:1: cannot resolve origin module of re-exported name: 'Slc' from origin module 'pack._src2'\n" + "pack:1: cannot resolve re-exported name: 'pack._src1.Cls'\n") + +@systemcls_param +def test_reparenting_from_module_that_defines__all__(systemcls: Type[model.System], capsys:CapSys) -> None: + """ + Even if a module defined it's own __all__ attribute, + we can reparent it's direct children to a new module + but only when the origin module has a lower privacy class + (i.e reparenting from a private module to a plublic module), otherwise the name stays there. + """ + _src = '''\ + class cls:... + class cls3:... + class cls4:... + __all__ = ['cls', 'cls3', 'cls4'] + ''' + src = ''' + class cls2:... + __all__ = ['cls2'] + ''' + pack = '''\ + from ._src import cls + from .src import cls2 + __all__=["cls","cls2"] + ''' + subpack = '''\ + from .._src import cls3 + __all__=["cls3"] + ''' + private = '''\ + from pack._src import cls3, cls4 + __all__ = ['cls3', 'cls4'] + ''' + + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString(private, '_private') + builder.addModuleString(pack, 'pack', is_package=True) + builder.addModuleString(subpack, 'subpack', parent_name='pack', is_package=True) + builder.addModuleString(_src, '_src', parent_name='pack') + builder.addModuleString(src, 'src', parent_name='pack') + builder.buildModules() + assert capsys.readouterr().out == ( + "moving 'pack._src.cls3' into 'pack.subpack', also available at '_private.cls3'\n" + "moving 'pack._src.cls4' into '_private'\n" + "moving 'pack._src.cls' into 'pack'\n" + "not moving pack.src.cls2 into 'pack', because 'cls2' is already exported in public module 'pack.src'\n") + + assert system.allobjects['pack.cls'] is system.allobjects['pack._src'].exported['cls'] # type:ignore + +@systemcls_param +def test_do_not_reparent_to_existing_name(systemcls: Type[model.System], capsys:CapSys) -> None: + """ + Pydoctor will not re-export a name that is + shadowed by a local by the same name. + """ + src1 = '''\ + class Cls:... + ''' + src2 = '''\ + class Slc:... + ''' + pack = '''\ + class Slc:... + from ._src1 import Slc + from ._src import Cls + class Cls:... + __all__=["Cls", "Slc"] + ''' + + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString(pack, 'pack', is_package=True) + builder.addModuleString(src1, '_src', parent_name='pack') + builder.addModuleString(src2, '_src1', parent_name='pack') + builder.buildModules() + + assert capsys.readouterr().out == ("pack:3: not moving pack._src.Cls into pack, because 'Cls' is defined at line 4\n" + "moving 'pack._src1.Slc' into 'pack'\n" + "pack:1: duplicate Class 'pack.Slc'\n" + "pack._src1:1: introduced by re-exporting Class 'pack._src1.Slc' into Package 'pack'\n") + + assert system.allobjects['pack.Slc'] is system.allobjects['pack._src1'].exported['Slc'] # type:ignore + +@systemcls_param +def test_multiple_re_exports(systemcls: Type[model.System], capsys:CapSys) -> None: + """ + Pydoctor will re-export a name to the module with + the lowest amount of dots in it's fullname. + """ + src = '''\ + class Cls:... + ''' + subpack = '''\ + from pack.subpack.src import Cls + __all__=['Cls'] + ''' + pack = '''\ + from pack.subpack import Cls + __all__=["Cls"] + ''' + + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString(pack, 'pack', is_package=True) + builder.addModuleString(subpack, 'subpack', is_package=True, parent_name='pack') + builder.addModuleString(src, 'src', parent_name='pack.subpack') + builder.buildModules() + + assert capsys.readouterr().out == ("moving 'pack.subpack.src.Cls' into 'pack', " + "also available at 'pack.subpack.Cls'\n") + + assert system.allobjects['pack.Cls'] is system.allobjects['pack.subpack'].exported['Cls'] # type:ignore + assert system.allobjects['pack.Cls'] is system.allobjects['pack.subpack.src'].exported['Cls'] # type:ignore + +@systemcls_param +def test_multiple_re_exports_alias(systemcls: Type[model.System], capsys:CapSys) -> None: + """ + The case of twisted.internet.ssl.DistinguishedName/DN + """ + src = '''\ + class DistinguishedName:... + DN = DistinguishedName + ''' + subpack = '' + pack = ''' + from pack.subpack.src import DN, DistinguishedName as DisName + __all__=['DN', 'DisName'] + ''' + + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString(pack, 'pack', is_package=True) + builder.addModuleString(subpack, 'subpack', is_package=True, parent_name='pack') + builder.addModuleString(src, 'src', parent_name='pack.subpack') + builder.buildModules() + + assert capsys.readouterr().out == ("moving 'pack.subpack.src.DistinguishedName' into 'pack' as 'DisName', " + "also available at 'pack.DN'\n") + + assert system.allobjects['pack.DisName'] is system.allobjects['pack'].exported['DN'] # type:ignore + assert system.allobjects['pack.DisName'] is system.allobjects['pack.subpack.src'].exported['DistinguishedName'] # type:ignore + +@systemcls_param +def test_re_export_method(systemcls: Type[model.System], capsys:CapSys) -> None: + src = '''\ + class Thing: + def method(self):... + method = Thing.method + ''' + subpack = '' + pack = ''' + from pack.subpack.src import method + __all__=['method'] + ''' + + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString(pack, 'pack', is_package=True) + builder.addModuleString(subpack, 'subpack', is_package=True, parent_name='pack') + builder.addModuleString(src, 'src', parent_name='pack.subpack') + builder.buildModules() + assert capsys.readouterr().out == "moving 'pack.subpack.src.Thing.method' into 'pack'\n" + diff --git a/pydoctor/test/test_epydoc2stan.py b/pydoctor/test/test_epydoc2stan.py index 828edccfb..9a5e79341 100644 --- a/pydoctor/test/test_epydoc2stan.py +++ b/pydoctor/test/test_epydoc2stan.py @@ -1029,21 +1029,22 @@ class Klass: mod.parsed_docstring.get_summary().to_stan(mod.docstring_linker) # type:ignore warnings = ['test:2: Cannot find link target for "thing.notfound" (you can link to external docs with --intersphinx)'] - if linkercls is linker._EpydocLinker: - warnings = warnings * 2 assert capsys.readouterr().out.strip().splitlines() == warnings - + + # reset warnings + mod.system.once_msgs = set() + # This is wrong: Klass.parsed_docstring.to_stan(mod.docstring_linker) # type:ignore Klass.parsed_docstring.get_summary().to_stan(mod.docstring_linker) # type:ignore # Because the warnings will be reported on line 2 warnings = ['test:2: Cannot find link target for "thing.notfound" (you can link to external docs with --intersphinx)'] - warnings = warnings * 2 assert capsys.readouterr().out.strip().splitlines() == warnings - # assert capsys.readouterr().out == '' + # reset warnings + mod.system.once_msgs = set() # Reset stan and summary, because they are supposed to be cached. Klass.parsed_docstring._stan = None # type:ignore @@ -1054,9 +1055,7 @@ class Klass: Klass.parsed_docstring.to_stan(mod.docstring_linker) # type:ignore Klass.parsed_docstring.get_summary().to_stan(mod.docstring_linker) # type:ignore - warnings = ['test:5: Cannot find link target for "thing.notfound" (you can link to external docs with --intersphinx)'] - warnings = warnings * 2 - + warnings = ['test:5: Cannot find link target for "thing.notfound" (you can link to external docs with --intersphinx)'] assert capsys.readouterr().out.strip().splitlines() == warnings def test_EpydocLinker_look_for_intersphinx_no_link() -> None: @@ -1188,6 +1187,43 @@ def test_EpydocLinker_resolve_identifier_xref_intersphinx_link_not_found(capsys: assert expected == captured +def test_EpydocLinker_link_not_found_show_original(capsys: CapSys) -> None: + n = '' + m = '''\ + from n import Stuff + S = Stuff + ''' + src = '''\ + """ + L{S} + """ + class Cls: + """ + L{Stuff } + """ + from m import S + ''' + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString(n, 'n') + builder.addModuleString(m, 'm') + builder.addModuleString(src, 'src') + builder.buildModules() + docstring2html(system.allobjects['src']) + captured = capsys.readouterr().out + # TODO: shoud say resolved from "S" + expected = ( + 'src:2: Cannot find link target for "n.Stuff", resolved from "m.S"\n' + ) + assert expected == captured + + docstring2html(system.allobjects['src.Cls']) + captured = capsys.readouterr().out + expected = ( + 'src:6: Cannot find link target for "n.Stuff", resolved from "m.S"\n' + ) + assert expected == captured + class InMemoryInventory: """ A simple inventory implementation which has an in-memory API link mapping. @@ -1291,8 +1327,6 @@ def test_EpydocLinker_warnings(capsys: CapSys) -> None: # The rationale about xref warnings is to warn when the target cannot be found. assert captured == ('module:3: Cannot find link target for "notfound"' - '\nmodule:3: Cannot find link target for "notfound"' - '\nmodule:5: Cannot find link target for "notfound"' '\nmodule:5: Cannot find link target for "notfound"\n') assert 'href="index.html#base"' in summary2html(mod) @@ -1325,6 +1359,61 @@ class C: assert 'href="#var"' in url assert not capsys.readouterr().out +def test_EpydocLinker_xref_look_for_name_multiple_candidates(capsys:CapSys) -> None: + """ + When the linker use look_for_name(), if 'identifier' refers to more than one object, it complains. + """ + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString('class C:...', modname='_one') + builder.addModuleString('class C:...', modname='_two') + builder.addModuleString('"L{C}"', modname='top') + builder.buildModules() + docstring2html(system.allobjects['top']) + assert capsys.readouterr().out == ( + 'top:1: ambiguous ref to C, could be _one.C, _two.C\n' + 'top:1: Cannot find link target for "C"\n') + +def test_EpydocLinker_xref_look_for_name_into_uncle_objects(capsys:CapSys) -> None: + """ + The linker walk up the object tree and see if 'identifier' refers to an + object in an "uncle" object. + """ + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString('', modname='pack', is_package=True) + builder.addModuleString('class C:...', modname='mod2', parent_name='pack') + builder.addModuleString('class I:\n var=1;"L{C}"', modname='mod1', parent_name='pack') + builder.buildModules() + assert 'href="pack.mod2.C.html"' in docstring2html(system.allobjects['pack.mod1.I.var']) + assert capsys.readouterr().out == '' + +def test_EpydocLinker_xref_look_for_name_into_all_modules(capsys:CapSys) -> None: + """ + The linker examine every module and package in the system and see if 'identifier' + names an object in each one. + """ + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString('class C:...', modname='_one') + builder.addModuleString('"L{C}"', modname='top') + builder.buildModules() + assert 'href="_one.C.html"' in docstring2html(system.allobjects['top']) + assert capsys.readouterr().out == '' + +def test_EpydocLinker_xref_walk_up_the_object_tree(capsys:CapSys) -> None: + """ + The linker walks up the object tree and see if 'identifier' refers + to an object by Python name resolution in each context. + """ + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString('class C:...', modname='pack', is_package=True) + builder.addModuleString('class I:\n var=1;"L{C}"', modname='mod1', parent_name='pack') + builder.buildModules() + assert 'href="pack.C.html"' in docstring2html(system.allobjects['pack.mod1.I.var']) + assert capsys.readouterr().out == '' + def test_xref_not_found_epytext(capsys: CapSys) -> None: """ When a link in an epytext docstring cannot be resolved, the reference @@ -1415,6 +1504,8 @@ def __init__(self) -> None: self.requests: List[str] = [] def link_to(self, target: str, label: "Flattenable") -> Tag: + if target.startswith('builtins.'): + target = target[len('builtins.'):] self.requests.append(target) return tags.transparent(label) @@ -2013,7 +2104,6 @@ def f(self, x:typ) -> typ: assert capsys.readouterr().out == """\ m:5: ambiguous annotation 'typ', could be interpreted as 'm.C.typ' instead of 'm.typ' -m:5: ambiguous annotation 'typ', could be interpreted as 'm.C.typ' instead of 'm.typ' m:7: ambiguous annotation 'typ', could be interpreted as 'm.C.typ' instead of 'm.typ' """ @@ -2121,6 +2211,181 @@ def func(): captured = capsys.readouterr().out assert captured == '' +def test_parsed_names_partially_resolved_early() -> None: + """ + Test for issue #295 + + Annotations are first locally resolved when we reach the end of the module, + then again when we actually resolve the name when generating the stan for the annotation. + """ + typing = '''\ + List = ClassVar = TypeVar = object() + ''' + + base = '''\ + import ast + class Vis(ast.NodeVisitor): + ... + ''' + src = '''\ + from typing import List + import typing as t + + from .base import Vis + + class Cls(Vis, t.Generic['_T']): + """ + L{Cls} + """ + clsvar:List[str] + clsvar2:t.ClassVar[List[str]] + + def __init__(self, a:'_T'): + self._a:'_T' = a + + C = Cls + _T = t.TypeVar('_T') + unknow: i|None|list + ann:Cls + ''' + + top = '''\ + # the order matters here + from .src import C, Cls, Vis + __all__ = ['Cls', 'C', 'Vis'] + ''' + + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString(top, 'top', is_package=True) + builder.addModuleString(base, 'base', 'top') + builder.addModuleString(src, 'src', 'top') + builder.addModuleString(typing, 'typing') + builder.buildModules() + + Cls = system.allobjects['top.Cls'] + clsvar = Cls.contents['clsvar'] + clsvar2 = Cls.contents['clsvar2'] + a = Cls.contents['_a'] + assert clsvar.expandName('typing.List')=='typing.List' + assert 'refuri="typing.List"' in clsvar.parsed_type.to_node().pformat() #type: ignore + assert 'href="typing.html#List"' in flatten(clsvar.parsed_type.to_stan(clsvar.docstring_linker)) #type: ignore + assert 'href="typing.html#ClassVar"' in flatten(clsvar2.parsed_type.to_stan(clsvar2.docstring_linker)) #type: ignore + assert 'href="top.src.html#_T"' in flatten(a.parsed_type.to_stan(clsvar.docstring_linker)) #type: ignore + + # the reparenting/alias issue + ann = system.allobjects['top.src.ann'] + assert 'href="top.Cls.html"' in flatten(ann.parsed_type.to_stan(ann.docstring_linker)) #type: ignore + assert 'href="top.Cls.html"' in flatten(Cls.parsed_docstring.to_stan(Cls.docstring_linker)) #type: ignore + + unknow = system.allobjects['top.src.unknow'] + assert flatten_text(unknow.parsed_type.to_stan(unknow.docstring_linker)) == 'i|None|list' #type: ignore + + # test the __init__ signature + assert 'href="top.src.html#_T"' in flatten(format_signature(Cls.contents['__init__'])) #type: ignore + +def test_reparented_ambiguous_annotation_confusion() -> None: + """ + Like L{test_top_level_type_alias_wins_over_class_level} but with reparented class. + """ + src = ''' + typ = object() + class C: + typ = int|str + var: typ + ''' + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString(src, modname='_m') + builder.addModuleString('from _m import C; __all__=["C"]', 'm') + builder.buildModules() + var = system.allobjects['m.C.var'] + assert 'href="_m.html#typ"' in flatten(var.parsed_type.to_stan(var.docstring_linker)) #type: ignore + +def test_reparented_builtins_confusion() -> None: + """ + - builtin links are resolved as such even when the new parent + declares a name shadowing a builtin. + """ + src = ''' + class C(int): + var: list + C = print('one') + @stuff(auto=object) + def __init__(self, v:bytes=bytes): + "L{str}" + ''' + top = ''' + list = object = int = print = str = bytes = True + + from src import C + __all__=["C"] + ''' + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString(src, modname='src') + builder.addModuleString(top, modname='top') + builder.buildModules() + clsvar = system.allobjects['top.C.var'] + C = system.allobjects['top.C'] + Ci = system.allobjects['top.C.C'] + __init__ = system.allobjects['top.C.__init__'] + + assert 'refuri="builtins.list"' in clsvar.parsed_type.to_node().pformat() #type: ignore + assert 'refuri="builtins.print"' in Ci.parsed_value.to_node().pformat() #type: ignore + assert 'refuri="builtins.int"' in C.parsed_bases[0].to_node().pformat() #type: ignore + assert 'refuri="builtins.object"' in __init__.parsed_decorators[0].to_node().pformat() #type: ignore + assert 'refuri="builtins.bytes"' in __init__.signature.parameters['v'].default.parsed.to_node().pformat() #type: ignore + assert 'refuri="builtins.bytes"' in __init__.signature.parameters['v'].annotation.parsed.to_node().pformat() #type: ignore + assert 'refuri="builtins.bytes"' in __init__.parsed_annotations['v'].to_node().pformat() #type: ignore + assert __init__.parsed_docstring is None # should not be none, actually :/ + # assert 'refuri="builtins.bytes"' in __init__.parsed_docstring.to_node().pformat() #type: ignore + +def test_link_resolving_unbound_names() -> None: + """ + - unbdound names are not touched, and does not stop the process. + """ + src = ''' + class C: + var: unknown|list + ''' + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString(src, modname='src') + builder.buildModules() + clsvar = system.allobjects['src.C.var'] + + assert 'refuri="builtins.list"' in clsvar.parsed_type.to_node().pformat() #type: ignore + assert 'refuri="unknown"' in clsvar.parsed_type.to_node().pformat() #type: ignore + # does not work for constant values at the moment + +def test_reference_transform_in_type_docstring() -> None: + """ + It will fail with ParsedTypeDocstring at the moment. + """ + src = ''' + __docformat__='google' + class C: + """ + Args: + a (list): the list + """ + ''' + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString(src, modname='src') + builder.addModuleString('from src import C;__all__=["C"];list=True', modname='top') + builder.buildModules() + clsvar = system.allobjects['top.C'] + + with pytest.raises(NotImplementedError): + assert 'refuri="builtins.list"' in clsvar.parsed_docstring.fields[1].body().to_node().pformat() #type: ignore + +# what to do with inherited documentation of reparented class attribute part of an +# import cycle? We can't set the value of parsed_docstring from the astbuilder because +# we havnen't resolved the mro yet. + + def test_regression_not_found_linenumbers(capsys: CapSys) -> None: """ Test for issue https://github.com/twisted/pydoctor/issues/745 @@ -2162,4 +2427,5 @@ def create_repository(self) -> repository.Repository: mod = fromText(code, ) docstring2html(mod.contents['Settings']) captured = capsys.readouterr().out - assert captured == ':15: Cannot find link target for "TypeError"\n' \ No newline at end of file + assert captured == ':15: Cannot find link target for "TypeError"\n' + diff --git a/pydoctor/test/test_packages.py b/pydoctor/test/test_packages.py index fe16a9991..37034c655 100644 --- a/pydoctor/test/test_packages.py +++ b/pydoctor/test/test_packages.py @@ -57,8 +57,9 @@ def test_allgames() -> None: assert isinstance(mod1, model.Module) mod2 = system.allobjects['allgames.mod2'] assert isinstance(mod2, model.Module) - # InSourceAll is not moved into mod2, but NotInSourceAll is. + # InSourceAll is not moved into mod2 because it's defined in __all__ and module is public. assert 'InSourceAll' in mod1.contents + assert 'InSourceAll' not in mod2.contents assert 'NotInSourceAll' in mod2.contents # Source paths must be unaffected by the move, so that error messages # point to the right source code. diff --git a/pydoctor/test/test_sphinx.py b/pydoctor/test/test_sphinx.py index d71e0caf5..d112ffcec 100644 --- a/pydoctor/test/test_sphinx.py +++ b/pydoctor/test/test_sphinx.py @@ -110,7 +110,8 @@ def test_generate_empty_functional() -> None: @contextmanager def openFileForWriting(path: str) -> Iterator[io.BytesIO]: yield output - inv_writer._openFileForWriting = openFileForWriting # type: ignore + + inv_writer._openFileForWriting = openFileForWriting # type:ignore inv_writer.generate(subjects=[], basepath='base-path') diff --git a/pydoctor/test/test_templatewriter.py b/pydoctor/test/test_templatewriter.py index dbc143967..b4deb0bdf 100644 --- a/pydoctor/test/test_templatewriter.py +++ b/pydoctor/test/test_templatewriter.py @@ -13,6 +13,7 @@ TemplateLookup, Template, HtmlTemplate, UnsupportedTemplateVersion, OverrideTemplateNotAllowed) +from pydoctor.templatewriter.pages import PackagePage, ModulePage from pydoctor.templatewriter.pages.table import ChildTable from pydoctor.templatewriter.pages.attributechild import AttributeChild from pydoctor.templatewriter.summary import isClassNodePrivate, isPrivate, moduleSummary, ClassIndexPage @@ -910,3 +911,61 @@ class Stuff(socket): index = flatten(ClassIndexPage(mod.system, TemplateLookup(template_dir))) assert 'href="https://docs.python.org/3/library/socket.html#socket.socket"' in index +def test_multiple_re_exports_documented_elsewhere_renders() -> None: + """ + Pydoctor will leave links from the origin module. + """ + src = '''\ + class Cls:... + ''' + subpack = '''\ + from pack.subpack.src import Cls + __all__=['Cls'] + ''' + pack = '''\ + from pack.subpack import Cls + __all__=["Cls"] + ''' + + system = model.System() + builder = system.systemBuilder(system) + builder.addModuleString(pack, 'pack', is_package=True) + builder.addModuleString(subpack, 'subpack', is_package=True, parent_name='pack') + builder.addModuleString(src, 'src', parent_name='pack.subpack') + builder.buildModules() + + subpackpage = PackagePage(system.allobjects['pack.subpack'], TemplateLookup(template_dir)) + srcpage = ModulePage(system.allobjects['pack.subpack.src'], TemplateLookup(template_dir)) + assert len(subpackpage.children())==1 + assert ('Cls', system.allobjects['pack.Cls']) in subpackpage.initTableChildren() + assert ('Cls', system.allobjects['pack.Cls']) in srcpage.children() + + assert system.allobjects['pack.Cls'].url in flatten(subpackpage) + assert system.allobjects['pack.Cls'].url in flatten(srcpage) + +@systemcls_param +def test_multiple_re_exports_alias_renders_asname(systemcls: Type[model.System], capsys:CapSys) -> None: + """ + The case of twisted.internet.ssl.DistinguishedName/DN + """ + src = '''\ + class DistinguishedName:... + DN = DistinguishedName + ''' + pack = ''' + from pack.subpack.src import DN, DistinguishedName + __all__=['DN', 'DistinguishedName'] + ''' + + system = systemcls() + builder = system.systemBuilder(system) + builder.addModuleString(pack, 'pack', is_package=True) + builder.addModuleString('', 'subpack', is_package=True, parent_name='pack') + builder.addModuleString(src, 'src', parent_name='pack.subpack') + builder.buildModules() + + subpackpage = PackagePage(system.allobjects['pack'], TemplateLookup(template_dir)) + html = flatten(subpackpage) + + assert system.allobjects['pack.DistinguishedName'].url in html + assert 'DN' in html