exclude doc_code from import sorting (#25772)

Skip sorting the imports in doc_code.
This commit is contained in:
clarng 2022-06-15 11:34:45 -07:00 committed by GitHub
parent 8982e4d78c
commit ef866d1e49
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 64 additions and 46 deletions

View file

@ -9,6 +9,7 @@ use_parentheses=True
float_to_top=True
skip_glob=doc/**/doc_code/*
known_local_folder=ray
known_afterray=psutil,setproctitle
sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER,AFTERRAY

View file

@ -148,8 +148,14 @@ MYPY_FILES=(
ISORT_PATHS=(
# TODO: Expand this list and remove once it is applied to the entire codebase.
'python/ray/autoscaler/_private/'
'doc/'
)
ISORT_GIT_LS_EXCLUDES=(
':(exclude)doc/**/doc_code/*'
)
BLACK_EXCLUDES=(
'--force-exclude' 'python/ray/cloudpickle/*'
'--force-exclude' 'python/build/*'
@ -316,8 +322,8 @@ format_changed() {
# exist on both branches.
MERGEBASE="$(git merge-base upstream/master HEAD)"
if ! git diff --diff-filter=ACRM --quiet --exit-code "$MERGEBASE" -- '*.py' &>/dev/null; then
git diff --name-only --diff-filter=ACRM "$MERGEBASE" -- '*.py' | xargs -P 5 \
if ! git diff --diff-filter=ACRM --quiet --exit-code "$MERGEBASE" -- '*.py' "${ISORT_GIT_LS_EXCLUDES[@]}" &>/dev/null; then
git diff --name-only --diff-filter=ACRM "$MERGEBASE" -- '*.py' "${ISORT_GIT_LS_EXCLUDES[@]}" | xargs -P 5 \
isort
fi

View file

@ -1,6 +1,7 @@
from collections import Counter
import sys
import time
from collections import Counter
import ray
""" This script is meant to be run from a pod in the same Kubernetes namespace

View file

@ -1,6 +1,7 @@
from collections import Counter
import sys
import time
from collections import Counter
import ray
""" Run this script locally to execute a Ray program on your Ray cluster on

View file

@ -1,6 +1,7 @@
from collections import Counter
import sys
import time
from collections import Counter
import ray
# Run this script on the Ray head node using kubectl exec.

View file

@ -1,9 +1,11 @@
# trainer.py
from collections import Counter
import os
import socket
import sys
import time
# trainer.py
from collections import Counter
import ray
num_cpus = int(sys.argv[1])

View file

@ -7,7 +7,6 @@ import argparse
import subprocess
import sys
import time
from pathlib import Path
template_file = Path(__file__) / "slurm-template.sh"

View file

@ -1,11 +1,15 @@
# isort: skip_file
import os
import sys
from datetime import datetime
# -*- coding: utf-8 -*-
from pathlib import Path
import sys
import os
sys.path.insert(0, os.path.abspath("."))
from custom_directives import *
from datetime import datetime
# Mocking modules allows Sphinx to work without installing Ray.
mock_modules()

View file

@ -1,20 +1,21 @@
from pathlib import Path
import urllib
import urllib.request
import requests
import mock
import sys
from preprocess_github_markdown import preprocess_github_markdown_file
from sphinx.util import logging as sphinx_logging
import logging
import logging.handlers
import sys
import urllib
import urllib.request
from pathlib import Path
from queue import Queue
from sphinx.util.console import red # type: ignore
import requests
import scipy.linalg # noqa: F401
# Note: the scipy import has to stay here, it's used implicitly down the line
import scipy.stats # noqa: F401
import scipy.linalg # noqa: F401
from preprocess_github_markdown import preprocess_github_markdown_file
from sphinx.util import logging as sphinx_logging
from sphinx.util.console import red # type: ignore
import mock
__all__ = [
"fix_xgb_lgbm_docs",

View file

@ -1,8 +1,7 @@
from typing import Optional
import re
import argparse
import pathlib
import re
from typing import Optional
def preprocess_github_markdown_file(source_path: str, dest_path: Optional[str] = None):

View file

@ -18,22 +18,23 @@ from typing import Tuple
import boto3
import mlflow
import pandas as pd
import ray
import torch
import torch.nn as nn
import torch.optim as optim
from torch.nn.parallel import DistributedDataParallel
import ray
from ray import train
from ray.data.aggregate import Mean, Std
from ray.train import Trainer
from ray.train.callbacks.logging import MLflowLoggerCallback
from ray.train.callbacks import TBXLoggerCallback
from torch.nn.parallel import DistributedDataParallel
from ray.train.callbacks.logging import MLflowLoggerCallback
def make_and_upload_dataset(dir_path):
import random
import os
import random
import pandas as pd
import sklearn.datasets

View file

@ -58,7 +58,7 @@ import time
import dask
import dask.dataframe as dd
from xgboost_ray import RayDMatrix, RayParams, train, predict
from xgboost_ray import RayDMatrix, RayParams, predict, train
import ray
from ray import tune

View file

@ -1,7 +1,7 @@
import numpy as np
import os
import scipy.optimize
import numpy as np
import scipy.optimize
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data

View file

@ -1,16 +1,16 @@
#!/usr/bin/env python3 -u
import math
import copy
import math
import socket
import time
import ray
from contextlib import closing
import fairseq
from fairseq import options
from fairseq_cli.train import main
from contextlib import closing
import ray
_original_save_checkpoint = fairseq.checkpoint_utils.save_checkpoint

View file

@ -58,7 +58,7 @@ import time
import modin.pandas as pd
from modin.experimental.sklearn.model_selection import train_test_split
from xgboost_ray import RayDMatrix, RayParams, train, predict
from xgboost_ray import RayDMatrix, RayParams, predict, train
import ray

View file

@ -1,6 +1,7 @@
import ray
import time
import ray
# By adding the `@ray.remote` decorator, a regular Python function
# becomes a Ray remote function.

View file

@ -1,10 +1,9 @@
# TODO: actually use this to predict something
import ray
from ray import serve
from fastapi import FastAPI
from transformers import pipeline
# TODO: actually use this to predict something
import ray
from ray import serve
app = FastAPI()

View file

@ -1,6 +1,7 @@
import torch
import ray.train as train
from ray.train import Trainer
import torch
def train_func():

View file

@ -1,10 +1,10 @@
"""Convert a jupytext-compliant format in to a python script
and execute it with parsed arguments."""
import subprocess
import argparse
import tempfile
import subprocess
import sys
import tempfile
from pathlib import Path
import jupytext

View file

@ -1,6 +1,7 @@
from collections import Counter
import sys
import time
from collections import Counter
import ray