Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Linting updates #4259

Draft
wants to merge 6 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions .github/workflows/pylint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,7 @@ jobs:
run: |
python -m pip install --upgrade pip
pip install ".[all]"
pip install pylint==2.14.5
pip install pylint-quotes==0.2.3
pip install pylint==$(grep 'pylint==' requirements-dev.txt | cut -d'=' -f3)
- name: Analysing the code with pylint
run: |
pylint --load-plugins pylint_quotes sky
pylint sky
13 changes: 3 additions & 10 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -429,13 +429,6 @@ valid-metaclass-classmethod-first-arg=mcs

# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=StandardError,
Exception,
BaseException

#######

# https://github.com/edaniszewski/pylint-quotes#configuration
string-quote=single
triple-quote=double
docstring-quote=double
Comment on lines -439 to -441
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We want to keep these behaviors : )

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@Michaelvll Use Pylint with this configuration instead of pylint-quotes should work well for most cases in the existing repo:

[tool.pylint]
check-quote-consistency = true
load-plugins = ["pylint.extensions.docstyle"]

The pylint-quotes is deprecated due to no updates in 3 years and incompatibility with pylint >= 3.0.

overgeneral-exceptions=builtins.StandardError,
builtins.Exception,
builtins.BaseException
6 changes: 0 additions & 6 deletions format.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ builtin cd "$ROOT" || exit 1

YAPF_VERSION=$(yapf --version | awk '{print $2}')
PYLINT_VERSION=$(pylint --version | head -n 1 | awk '{print $2}')
PYLINT_QUOTES_VERSION=$(pip list | grep pylint-quotes | awk '{print $2}')
MYPY_VERSION=$(mypy --version | awk '{print $2}')
BLACK_VERSION=$(black --version | head -n 1 | awk '{print $2}')

Expand All @@ -37,7 +36,6 @@ tool_version_check() {

tool_version_check "yapf" $YAPF_VERSION "$(grep yapf requirements-dev.txt | cut -d'=' -f3)"
tool_version_check "pylint" $PYLINT_VERSION "$(grep "pylint==" requirements-dev.txt | cut -d'=' -f3)"
tool_version_check "pylint-quotes" $PYLINT_QUOTES_VERSION "$(grep "pylint-quotes==" requirements-dev.txt | cut -d'=' -f3)"
tool_version_check "mypy" "$MYPY_VERSION" "$(grep mypy requirements-dev.txt | cut -d'=' -f3)"
tool_version_check "black" "$BLACK_VERSION" "$(grep black requirements-dev.txt | cut -d'=' -f3)"

Expand All @@ -60,10 +58,6 @@ BLACK_INCLUDES=(
'sky/skylet/providers/ibm'
)

PYLINT_FLAGS=(
'--load-plugins' 'pylint_quotes'
)

# Format specified files
format() {
yapf --in-place "${YAPF_FLAGS[@]}" "$@"
Expand Down
7 changes: 3 additions & 4 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,22 +1,21 @@
# formatting
yapf==0.32.0
pylint==2.14.5
pylint==3.2.7
# formatting the node_providers code from upstream ray-project/ray project
black==22.10.0
# https://github.com/edaniszewski/pylint-quotes
pylint-quotes==0.2.3
toml==0.10.2
isort==5.12.0

# type checking
mypy==0.991
mypy==1.4.1
types-PyYAML
# 2.31 requires urlib3>2, which is incompatible with SkyPilot, IBM and
# kubernetes packages, which require urllib3<2.
types-requests<2.31
types-setuptools
types-cachetools
types-pyvmomi
types-networkx

# testing
pytest
Expand Down
8 changes: 5 additions & 3 deletions sky/backends/cloud_vm_ray_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -2475,10 +2475,12 @@ def num_ips_per_node(self) -> int:
num_ips = 1
return num_ips

def __setstate__(self, state):
def __setstate__(self, state: Dict[str, Any]):
self._version = self._VERSION

version = state.pop('_version', None)
head_ip: Optional[str] = None

if version is None:
version = -1
state.pop('cluster_region', None)
Expand All @@ -2502,7 +2504,6 @@ def __setstate__(self, state):
if version < 9:
# For backward compatibility, we should update the region of a
# SkyPilot cluster on Kubernetes to the actual context it is using.
# pylint: disable=import-outside-toplevel
launched_resources = state['launched_resources']
if isinstance(launched_resources.cloud, clouds.Kubernetes):
yaml_config = common_utils.read_yaml(
Expand Down Expand Up @@ -3272,7 +3273,7 @@ def _exec_code_on_head(
mkdir_code = (f'{cd} && mkdir -p {remote_log_dir} && '
f'touch {remote_log_path}')
encoded_script = shlex.quote(codegen)
create_script_code = (f'{{ echo {encoded_script} > {script_path}; }}')
create_script_code = f'{{ echo {encoded_script} > {script_path}; }}'
job_submit_cmd = (
f'RAY_DASHBOARD_PORT=$({constants.SKY_PYTHON_CMD} -c "from sky.skylet import job_lib; print(job_lib.get_job_submission_port())" 2> /dev/null || echo 8265);' # pylint: disable=line-too-long
f'{cd} && {constants.SKY_RAY_CMD} job submit '
Expand Down Expand Up @@ -3829,6 +3830,7 @@ def teardown_no_lock(self,
RuntimeError: If the cluster fails to be terminated/stopped.
"""
cluster_status_fetched = False
prev_cluster_status = None
if refresh_cluster_status:
try:
prev_cluster_status, _ = (
Expand Down
11 changes: 7 additions & 4 deletions sky/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def _merge_env_vars(env_dict: Optional[Dict[str, str]],
default=None,
type=int,
required=False,
help=('OS disk size in GBs.')),
help='OS disk size in GBs.'),
click.option('--disk-tier',
default=None,
type=click.Choice(resources_utils.DiskTier.supported_tiers(),
Expand Down Expand Up @@ -392,7 +392,8 @@ def _install_shell_completion(ctx: click.Context, param: click.Parameter,
else:
click.secho(f'Unsupported shell: {value}', fg='red')
ctx.exit()

# Though `ctx.exit()` already NoReturn, we need to make pylint happy.
assert False, 'Unreachable'
try:
subprocess.run(cmd,
shell=True,
Expand Down Expand Up @@ -447,6 +448,8 @@ def _uninstall_shell_completion(ctx: click.Context, param: click.Parameter,
else:
click.secho(f'Unsupported shell: {value}', fg='red')
ctx.exit()
# Though `ctx.exit()` already NoReturn, we need to make pylint happy.
assert False, 'Unreachable'

try:
subprocess.run(cmd, shell=True, check=True)
Expand Down Expand Up @@ -3530,7 +3533,7 @@ def jobs():
default=None,
type=str,
hidden=True,
help=('Alias for --name, the name of the managed job.'))
help='Alias for --name, the name of the managed job.')
@click.option('--job-recovery',
default=None,
type=str,
Expand Down Expand Up @@ -4544,7 +4547,7 @@ def serve_logs(
sky serve logs [SERVICE_NAME] 1
"""
have_replica_id = replica_id is not None
num_flags = (controller + load_balancer + have_replica_id)
num_flags = controller + load_balancer + have_replica_id
if num_flags > 1:
raise click.UsageError('At most one of --controller, --load-balancer, '
'[REPLICA_ID] can be specified.')
Expand Down
11 changes: 7 additions & 4 deletions sky/dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
import pprint
import threading
import typing
from typing import List, Optional
from typing import Any, List, Optional

import networkx as nx

if typing.TYPE_CHECKING:
from sky import task
Expand All @@ -19,9 +21,8 @@ class Dag:

def __init__(self) -> None:
self.tasks: List['task.Task'] = []
import networkx as nx # pylint: disable=import-outside-toplevel

self.graph = nx.DiGraph()
self.graph: nx.DiGraph = nx.DiGraph()
self.name: Optional[str] = None

def add(self, task: 'task.Task') -> None:
Expand All @@ -44,7 +45,8 @@ def __enter__(self) -> 'Dag':
push_dag(self)
return self

def __exit__(self, exc_type, exc_value, traceback) -> None:
def __exit__(self, exec_type: Any, exec_val: Any, exec_tb: Any) -> None:
"""Exit the runtime context related to this object."""
pop_dag()

def __repr__(self) -> str:
Expand All @@ -60,6 +62,7 @@ def is_chain(self) -> bool:
visited_zero_out_degree = False
for node in self.graph.nodes:
out_degree = self.graph.out_degree(node)
assert isinstance(out_degree, int)
if out_degree > 1:
is_chain = False
break
Expand Down
20 changes: 12 additions & 8 deletions sky/jobs/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import time
import traceback
import typing
from typing import Tuple
from typing import List, Tuple

import filelock

Expand All @@ -28,6 +28,8 @@
from sky.utils import ux_utils

if typing.TYPE_CHECKING:
import networkx as nx

import sky

# Use the explicit logger name so that the logger is under the
Expand Down Expand Up @@ -55,11 +57,11 @@ def __init__(self, job_id: int, dag_yaml: str,
# TODO(zhwu): this assumes the specific backend.
self._backend = cloud_vm_ray_backend.CloudVmRayBackend()

# pylint: disable=line-too-long
# Add a unique identifier to the task environment variables, so that
# the user can have the same id for multiple recoveries.
# Example value: sky-2022-10-04-22-46-52-467694_my-spot-name_spot_id-17-0
job_id_env_vars = []
# Example value:
# sky-2022-10-04-22-46-52-467694_my-spot-name_spot_id-17-0
job_id_env_vars: List[str] = []
for i, task in enumerate(self._dag.tasks):
if len(self._dag.tasks) <= 1:
task_name = self._dag_name
Expand Down Expand Up @@ -416,7 +418,7 @@ def _run_controller(job_id: int, dag_yaml: str, retry_until_up: bool):
jobs_controller.run()


def _handle_signal(job_id):
def _handle_signal(job_id: int) -> None:
"""Handle the signal if the user sent it."""
signal_file = pathlib.Path(
managed_job_utils.SIGNAL_FILE_PREFIX.format(job_id))
Expand All @@ -426,9 +428,9 @@ def _handle_signal(job_id):
# signal writing.
with filelock.FileLock(str(signal_file) + '.lock'):
with signal_file.open(mode='r', encoding='utf-8') as f:
user_signal = f.read().strip()
user_signal_str = f.read().strip()
try:
user_signal = managed_job_utils.UserSignal(user_signal)
user_signal = managed_job_utils.UserSignal(user_signal_str)
except ValueError:
logger.warning(
f'Unknown signal received: {user_signal}. Ignoring.')
Expand Down Expand Up @@ -469,7 +471,7 @@ def _cleanup(job_id: int, dag_yaml: str):
backend.teardown_ephemeral_storage(task)


def start(job_id, dag_yaml, retry_until_up):
def start(job_id: int, dag_yaml: str, retry_until_up: bool) -> None:
"""Start the controller."""
controller_process = None
cancelling = False
Expand All @@ -493,6 +495,7 @@ def start(job_id, dag_yaml, retry_until_up):
task_id, _ = managed_job_state.get_latest_task_id_status(job_id)
logger.info(
f'Cancelling managed job, job_id: {job_id}, task_id: {task_id}')
assert task_id is not None
managed_job_state.set_cancelling(
job_id=job_id,
callback_func=managed_job_utils.event_callback_func(
Expand Down Expand Up @@ -521,6 +524,7 @@ def start(job_id, dag_yaml, retry_until_up):
_cleanup(job_id, dag_yaml=dag_yaml)
logger.info(f'Cluster of managed job {job_id} has been cleaned up.')

assert task_id is not None
if cancelling:
managed_job_state.set_cancelled(
job_id=job_id,
Expand Down
5 changes: 3 additions & 2 deletions sky/jobs/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def cancel_jobs_by_id(job_ids: Optional[List[int]]) -> str:
return 'No job to cancel.'
job_id_str = ', '.join(map(str, job_ids))
logger.info(f'Cancelling jobs {job_id_str}.')
cancelled_job_ids = []
cancelled_job_ids: List[int] = []
for job_id in job_ids:
# Check the status of the managed job status. If it is in
# terminal state, we can safely skip it.
Expand Down Expand Up @@ -490,7 +490,8 @@ def stream_logs(job_id: Optional[int],
if controller:
if job_id is None:
assert job_name is not None
managed_jobs = managed_job_state.get_managed_jobs()
managed_jobs: List[Dict[
str, Any]] = managed_job_state.get_managed_jobs()
# We manually filter the jobs by name, instead of using
# get_nonterminal_job_ids_by_name, as with `controller=True`, we
# should be able to show the logs for jobs in terminal states.
Expand Down
3 changes: 2 additions & 1 deletion sky/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,6 +435,7 @@ def _optimize_by_dp(
# FIXME: Account for egress costs for multi-node clusters
for resources, execution_cost in node_to_cost_map[node].items():
min_pred_cost_plus_egress = np.inf
best_parent_hardware = resources_lib.Resources()
for parent_resources, parent_cost in \
dp_best_objective[parent].items():
egress_cost = Optimizer._egress_cost_or_time(
Expand Down Expand Up @@ -529,7 +530,7 @@ def _optimize_by_ilp(

# Prepare the constants.
V = topo_order # pylint: disable=invalid-name
E = graph.edges() # pylint: disable=invalid-name
E: List[Tuple[Any, Any]] = list(graph.edges()) # pylint: disable=invalid-name
k = {
node: list(resource_cost_map.values())
for node, resource_cost_map in node_to_cost_map.items()
Expand Down
4 changes: 2 additions & 2 deletions sky/serve/autoscalers.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,8 +320,8 @@ def select_outdated_replicas_to_scale_down(
"""Select outdated replicas to scale down."""

if self.update_mode == serve_utils.UpdateMode.ROLLING:
latest_ready_replicas = []
old_nonterminal_replicas = []
latest_ready_replicas: List['replica_managers.ReplicaInfo'] = []
old_nonterminal_replicas: List['replica_managers.ReplicaInfo'] = []
for info in replica_infos:
if info.version == self.latest_version:
if info.is_ready:
Expand Down
4 changes: 4 additions & 0 deletions sky/serve/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,3 +100,7 @@
TERMINATE_REPLICA_VERSION_MISMATCH_ERROR = (
'The version of service is outdated and does not support manually '
'terminating replicas. Please terminate the service and spin up again.')

# Default timeout in seconds for HTTP requests to avoid hanging indefinitely.
# This is used for internal service communication requests.
DEFAULT_HTTP_REQUEST_TIMEOUT_SECONDS = 30
4 changes: 0 additions & 4 deletions sky/serve/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -687,10 +687,6 @@ def tail_logs(
"""
if isinstance(target, str):
target = serve_utils.ServiceComponent(target)
if not isinstance(target, serve_utils.ServiceComponent):
with ux_utils.print_exception_no_traceback():
raise ValueError(f'`target` must be a string or '
f'sky.serve.ServiceComponent, got {type(target)}.')
if target == serve_utils.ServiceComponent.REPLICA:
if replica_id is None:
with ux_utils.print_exception_no_traceback():
Expand Down
8 changes: 4 additions & 4 deletions sky/serve/load_balancer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import asyncio
import logging
import threading
from typing import Dict, Union
from typing import Coroutine, Dict, List, NoReturn, Union

import aiohttp
import fastapi
Expand Down Expand Up @@ -55,7 +55,7 @@ def __init__(self, controller_url: str, load_balancer_port: int) -> None:
# updating it from _sync_with_controller.
self._client_pool_lock: threading.Lock = threading.Lock()

async def _sync_with_controller(self):
async def _sync_with_controller(self) -> NoReturn:
"""Sync with controller periodically.

Every `constants.LB_CONTROLLER_SYNC_INTERVAL_SECONDS` seconds, the
Expand All @@ -68,7 +68,7 @@ async def _sync_with_controller(self):
await asyncio.sleep(5)

while True:
close_client_tasks = []
close_client_tasks: List[Coroutine[None, None, None]] = []
async with aiohttp.ClientSession() as session:
try:
# Send request information
Expand Down Expand Up @@ -101,7 +101,7 @@ async def _sync_with_controller(self):
httpx.AsyncClient(base_url=replica_url))
urls_to_close = set(
self._client_pool.keys()) - set(ready_replica_urls)
client_to_close = []
client_to_close: List[httpx.AsyncClient] = []
for replica_url in urls_to_close:
client_to_close.append(
self._client_pool.pop(replica_url))
Expand Down
Loading
Loading