Skip to content

Commit

Permalink
Apply suggestions from code review
Browse files Browse the repository at this point in the history
  • Loading branch information
tristanlatr authored Dec 13, 2024
1 parent bd2de92 commit cc82f10
Show file tree
Hide file tree
Showing 5 changed files with 8 additions and 225 deletions.
214 changes: 2 additions & 212 deletions docs/google_demo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,7 @@
https://google.github.io/styleguide/pyguide.html
"""
from datetime import timedelta
from typing import Any, Awaitable, Callable, Concatenate, List, Mapping, Optional, Sequence, Union, overload # NOQA
from typing import List, Union # NOQA

module_level_variable1 = 12345

Expand Down Expand Up @@ -298,213 +297,4 @@ class ExamplePEP526Class:
"""

attr1: str
attr2: int


_not_in_the_demo = object()

@overload
async def overwhelming_overload(
workflow: tuple[Any, Any],
*,
id: str,
task_queue: str,
execution_timeout: Optional[timedelta] = None,
run_timeout: Optional[timedelta] = None,
task_timeout: Optional[timedelta] = None,
id_reuse_policy: _not_in_the_demo = '_not_in_the_demo.WorkflowIDReusePolicy.ALLOW_DUPLICATE',
id_conflict_policy: _not_in_the_demo = '_not_in_the_demo.WorkflowIDConflictPolicy.UNSPECIFIED',
retry_policy: Optional[_not_in_the_demo.RetryPolicy] = None,
cron_schedule: str = "",
memo: Optional[Mapping[str, Any]] = None,
search_attributes: Optional[
Union[
_not_in_the_demo.TypedSearchAttributes,
_not_in_the_demo.SearchAttributes,
]
] = None,
start_delay: Optional[timedelta] = None,
start_signal: Optional[str] = None,
start_signal_args: Sequence[Any] = [],
rpc_metadata: Mapping[str, str] = {},
rpc_timeout: Optional[timedelta] = None,
request_eager_start: bool = False,
) -> tuple[Any, Any]: ...

# Overload for single-param workflow
@overload
async def overwhelming_overload(
workflow: tuple[Any, Any, Any],
arg: Any,
*,
id: str,
task_queue: str,
execution_timeout: Optional[timedelta] = None,
run_timeout: Optional[timedelta] = None,
task_timeout: Optional[timedelta] = None,
id_reuse_policy: _not_in_the_demo.WorkflowIDReusePolicy = '_not_in_the_demo.WorkflowIDReusePolicy.ALLOW_DUPLICATE',
id_conflict_policy: _not_in_the_demo.WorkflowIDConflictPolicy = '_not_in_the_demo.WorkflowIDConflictPolicy.UNSPECIFIED',
retry_policy: Optional[_not_in_the_demo.RetryPolicy] = None,
cron_schedule: str = "",
memo: Optional[Mapping[str, Any]] = None,
search_attributes: Optional[
Union[
_not_in_the_demo.TypedSearchAttributes,
_not_in_the_demo.SearchAttributes,
]
] = None,
start_delay: Optional[timedelta] = None,
start_signal: Optional[str] = None,
start_signal_args: Sequence[Any] = [],
rpc_metadata: Mapping[str, str] = {},
rpc_timeout: Optional[timedelta] = None,
request_eager_start: bool = False,
) -> tuple[Any, Any]: ...

# Overload for multi-param workflow
@overload
async def overwhelming_overload(
workflow: Callable[
Concatenate[Any, Any], Awaitable[Any]
],
*,
args: Sequence[Any],
id: str,
task_queue: str,
execution_timeout: Optional[timedelta] = None,
run_timeout: Optional[timedelta] = None,
task_timeout: Optional[timedelta] = None,
id_reuse_policy: _not_in_the_demo.WorkflowIDReusePolicy = '_not_in_the_demo.WorkflowIDReusePolicy.ALLOW_DUPLICATE',
id_conflict_policy: _not_in_the_demo.WorkflowIDConflictPolicy = '_not_in_the_demo.WorkflowIDConflictPolicy.UNSPECIFIED',
retry_policy: Optional[_not_in_the_demo.RetryPolicy] = None,
cron_schedule: str = "",
memo: Optional[Mapping[str, Any]] = None,
search_attributes: Optional[
Union[
_not_in_the_demo.TypedSearchAttributes,
_not_in_the_demo.SearchAttributes,
]
] = None,
start_delay: Optional[timedelta] = None,
start_signal: Optional[str] = None,
start_signal_args: Sequence[Any] = [],
rpc_metadata: Mapping[str, str] = {},
rpc_timeout: Optional[timedelta] = None,
request_eager_start: bool = False,
) -> tuple[Any, Any]: ...

# Overload for string-name workflow
@overload
async def overwhelming_overload(
workflow: str,
arg: Any = _not_in_the_demo._arg_unset,
*,
args: Sequence[Any] = [],
id: str,
task_queue: str,
result_type: Optional[type] = None,
execution_timeout: Optional[timedelta] = None,
run_timeout: Optional[timedelta] = None,
task_timeout: Optional[timedelta] = None,
id_reuse_policy: _not_in_the_demo.WorkflowIDReusePolicy = '_not_in_the_demo.WorkflowIDReusePolicy.ALLOW_DUPLICATE',
id_conflict_policy: _not_in_the_demo.WorkflowIDConflictPolicy = '_not_in_the_demo.WorkflowIDConflictPolicy.UNSPECIFIED',
retry_policy: Optional[_not_in_the_demo.RetryPolicy] = None,
cron_schedule: str = "",
memo: Optional[Mapping[str, Any]] = None,
search_attributes: Optional[
Union[
_not_in_the_demo.TypedSearchAttributes,
_not_in_the_demo.SearchAttributes,
]
] = None,
start_delay: Optional[timedelta] = None,
start_signal: Optional[str] = None,
start_signal_args: Sequence[Any] = [],
rpc_metadata: Mapping[str, str] = {},
rpc_timeout: Optional[timedelta] = None,
request_eager_start: bool = False,
) -> tuple[Any, Any]: ...

async def overwhelming_overload(
workflow: Union[str, Callable[..., Awaitable[Any]]],
arg: Any = _not_in_the_demo,
*,
args: Sequence[Any] = [],
id: str,
task_queue: str,
result_type: Optional[type] = None,
execution_timeout: Optional[timedelta] = None,
run_timeout: Optional[timedelta] = None,
task_timeout: Optional[timedelta] = None,
id_reuse_policy: _not_in_the_demo.WorkflowIDReusePolicy = '_not_in_the_demo.WorkflowIDReusePolicy.ALLOW_DUPLICATE',
id_conflict_policy: _not_in_the_demo.WorkflowIDConflictPolicy = '_not_in_the_demo.WorkflowIDConflictPolicy.UNSPECIFIED',
retry_policy: Optional[_not_in_the_demo.RetryPolicy] = None,
cron_schedule: str = "",
memo: Optional[Mapping[str, Any]] = None,
search_attributes: Optional[
Union[
_not_in_the_demo.TypedSearchAttributes,
_not_in_the_demo.SearchAttributes,
]
] = None,
start_delay: Optional[timedelta] = None,
start_signal: Optional[str] = None,
start_signal_args: Sequence[Any] = [],
rpc_metadata: Mapping[str, str] = {},
rpc_timeout: Optional[timedelta] = None,
request_eager_start: bool = False,
stack_level: int = 2,
) -> tuple[Any, Any]:
"""
This is a big overload taken from the source code of temporalio sdk for Python.
The types don't make sens: it's only to showcase bigger overload.
Start a workflow and return its handle.
Args:
workflow: String name or class method decorated with
``@workflow.run`` for the workflow to start.
arg: Single argument to the workflow.
args: Multiple arguments to the workflow. Cannot be set if arg is.
id: Unique identifier for the workflow execution.
task_queue: Task queue to run the workflow on.
result_type: For string workflows, this can set the specific result
type hint to deserialize into.
execution_timeout: Total workflow execution timeout including
retries and continue as new.
run_timeout: Timeout of a single workflow run.
task_timeout: Timeout of a single workflow task.
id_reuse_policy: How already-existing IDs are treated.
id_conflict_policy: How already-running workflows of the same ID are
treated. Default is unspecified which effectively means fail the
start attempt. This cannot be set if ``id_reuse_policy`` is set
to terminate if running.
retry_policy: Retry policy for the workflow.
cron_schedule: See https://docs.temporal.io/docs/content/what-is-a-temporal-cron-job/
memo: Memo for the workflow.
search_attributes: Search attributes for the workflow. The
dictionary form of this is deprecated, use
:py:class:`_not_in_the_demo.TypedSearchAttributes`.
start_delay: Amount of time to wait before starting the workflow.
This does not work with ``cron_schedule``.
start_signal: If present, this signal is sent as signal-with-start
instead of traditional workflow start.
start_signal_args: Arguments for start_signal if start_signal
present.
rpc_metadata: Headers used on the RPC call. Keys here override
client-level RPC metadata keys.
rpc_timeout: Optional RPC deadline to set for the RPC call.
request_eager_start: Potentially reduce the latency to start this workflow by
encouraging the server to start it on a local worker running with
this same client.
This is currently experimental.
Returns:
A workflow handle to the started workflow.
Raises:
temporalio.exceptions.WorkflowAlreadyStartedError: Workflow has
already been started.
RPCError: Workflow could not be started for some other reason.
"""
...
attr2: int
2 changes: 1 addition & 1 deletion pydoctor/epydoc/markup/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,7 @@ def link_to(self, target: str, label: "Flattenable", *, is_annotation: bool = Fa
should be linked to.
@param label: The label to show for the link.
@param is_annotation: Generated links will give precedence to the module
defined varaible rather the nested definitions when there are name colisions.
defined variables rather the nested definitions when there are name collisions.
@return: The link, or just the label if the target was not found.
"""

Expand Down
2 changes: 0 additions & 2 deletions pydoctor/epydoc/markup/restructuredtext.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,8 +136,6 @@ def parsed_text(text: str) -> ParsedDocstring:
set_node_attributes(document, children=[txt_node])
return ParsedRstDocstring(document, ())

# not using cache here because we need new span tag for every call
# othewise it messes-up everything.
@lru_cache()
def parsed_text_with_css(text:str, css_class: str) -> ParsedDocstring:
return parsed_text(text).with_tag(tags.span(class_=css_class))
Expand Down
13 changes: 4 additions & 9 deletions pydoctor/epydoc2stan.py
Original file line number Diff line number Diff line change
Expand Up @@ -1191,23 +1191,18 @@ def _colorize_signature_param(param: inspect.Parameter,
has_next: bool,
is_first: bool, ) -> ParsedDocstring:
"""
One parameter is converted to a series of ParsedDocstrings.
- one, the first, for the param name
- two others if the parameter is annotated: one for ': ' and one for the annotation
- two others if the paramter has a default value: one for ' = ' and one for the annotation
Convert a single parameter to a parsed docstring representation.
"""
kind = param.kind
result: list[ParsedDocstring] = []
if kind == _VAR_POSITIONAL:
result.append(parsed_text(f'*{param.name}'))
elif kind == _VAR_KEYWORD:
result.append(parsed_text(f'**{param.name}'))
elif is_first and _is_less_important_param(param, ctx):
result.append(parsed_text_with_css(param.name, css_class='undocumented'))
else:
if is_first and _is_less_important_param(param, ctx):
result.append(parsed_text_with_css(param.name, css_class='undocumented'))
else:
result.append(parsed_text(param.name))
result.append(parsed_text(param.name))

# Add annotation and default value
if param.annotation is not _empty:
Expand Down
2 changes: 1 addition & 1 deletion pydoctor/themes/base/apidocs.css
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ ul ul ul ul ul ul ul {
word-break: break-word;
/* It not seems to work with percentage values
so I used px values, these are just indications for the
CSS auto layour table algo not to create tables
CSS auto layout table algo not to create tables
with rather unbalanced columns width. */
max-width: 400px;
}
Expand Down

0 comments on commit cc82f10

Please sign in to comment.