Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix a subset of existing lint issues #2252

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .bazelrc
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ build --experimental_cc_shared_library
# uses TensorFlow for all IO operations.
build --define=use_tensorflow_io=1

# TensorFlow Decision Forests need absl::anyInvocable support for new versions
# TensorFlow Decision Forests need absl::anyInvocable support for new versions
# of Tensorflow.
build --define use_absl_anyinvocable=1

Expand Down
1 change: 0 additions & 1 deletion WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -130,4 +130,3 @@ load(
)

nccl_configure(name = "local_config_nccl")

123 changes: 123 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
line-length = 88

[lint]
select = [
# pycodestyle
"E",
"W",
# Pyflakes
"F",
# pyupgrade
"UP",
# flake8-bugbear
"B",
# flake8-simplify
"SIM",
# isort
"I",
# pep8 naming
"N",
# pydocstyle
"D",
# annotations
"ANN",
# debugger
"T10",
# flake8-pytest
"PT",
# flake8-return
"RET",
# flake8-unused-arguments
"ARG",
# flake8-fixme
"FIX",
# flake8-eradicate
"ERA",
# pandas-vet
"PD",
# numpy-specific rules
"NPY",
]
ignore = [
"D104", # Missing docstring in public package
"D100", # Missing docstring in public module
"D211", # No blank line before class
"D213", # Multiline summary second line
"PD901", # Avoid using 'df' for pandas dataframes. Perfectly fine in functions with limited scope
"ANN201", # Missing return type annotation for public function (makes no sense for NoneType return types...)
"ANN101", # Missing type annotation for `self`
"ANN102", # Missing type annotation for `cls` in classmethod
"ANN204", # Missing return type annotation for special method
"ANN206", # Missing return type annotation for classmethod
"ANN002", # Missing type annotation for `*args`
"ANN003", # Missing type annotation for `**kwargs`
"D105", # Missing docstring in magic method
"D203", # 1 blank line before after class docstring
"D204", # 1 blank line required after class docstring
"D413", # 1 blank line after parameters
"SIM108", # Simplify if/else to one line; not always clearer
"D206", # Advised to disable by ruff-format
"E501", # Advised to disable by ruff-format
"W191", # Advised to disable by ruff-format
"N802", # Function name should be lowercase; unittest uses mixed case
"D107", # Missing docstring in `__init__`

# These are issues which remain to be fixed
"D102", # Missing docstring in public method
"D103", # Missing docstring in public function
"D401", # First line of docstring should be in imperative mood
"D404", # First word of the docstring should not be "This"
"ANN001", # Missing type annotation for function argument
"ANN202", # Missing return type for private function
"ANN205", # Missing return type for staticmethod
"B024", # <class> is an abstract base class, but it has no abstract methods
"FIX002", # Line contains TODO, consider resolving the issue
"ANN401", # Dynamically typed expressions (typing.Any) are disallowed
"UP008", # Use `super()` instead of `super(__class__, self)`
"SIM102", # Use a single `if` statement instead of nested `if` statements
"UP031", # Use format specifiers instead of percent format
"SIM115", # Use context handler for opening files
"B008", # Do not perform function call in argument defaults
"ARG001", # Unused function argument
"F841", # Local variable is assigned to but never used
"ERA001", # Found commented-out code
"RET505", # Unnecessary `else` after `return` statement
"RET503", # Missing explicit `return` at the end of function able to return non-`None` value
"UP029", # Unnecessary builtin import
"ARG005", # Unused lambda argument
"E402", # Module level import not at top of file
"F403", # Wildcard import
"NPY002", # Replace legacy numpy call
"D200", # One-line docstring should fit on one line
"D205", # 1 blank line required between summary line and description
"RET504", # Unnecessary assignment before return
"ARG002", # Unused method argument
]

[lint.per-file-ignores]
"__init__.py" = [
"F401", # Unused import
]
"*_test_base.py" = [
"ANN001", # Type annotations aren't needed for tests; these are fixtures or parametrizations
"PT009", # Use a regular `assert` instead of a unittest-style `assertEqual`
"PT027", # Use `pytest.raises` instead of unittest-style `assertRaisesRegex`

# Missing docstrings; probably want to fill these out for tests. For now, we just disable
"D101", # Missing docstring in public class
"D102", # Missing docstring in public method

]
"*_test.py" = [
"ANN001", # Type annotations aren't needed for tests; these are fixtures or parametrizations
"PT009", # Use a regular `assert` instead of a unittest-style `assertEqual`
"PT027", # Use `pytest.raises` instead of unittest-style `assertRaisesRegex`

# Missing docstrings; probably want to fill these out for tests. For now, we just disable
"D101", # Missing docstring in public class
"D102", # Missing docstring in public method

]

[lint.pydocstyle]
convention = "google"
14 changes: 9 additions & 5 deletions tensorflow_serving/apis/model_service_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,23 @@
# python -m grpc.tools.protoc --python_out=. --grpc_python_out=. -I. tensorflow_serving/apis/model_service.proto

import sys

_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
from google.protobuf import symbol_database as _symbol_database

# @@protoc_insertion_point(imports)

_sym_db = _symbol_database.Default()


from tensorflow_serving.apis import get_model_status_pb2 as tensorflow__serving_dot_apis_dot_get__model__status__pb2
from tensorflow_serving.apis import model_management_pb2 as tensorflow__serving_dot_apis_dot_model__management__pb2
from tensorflow_serving.apis import (
get_model_status_pb2 as tensorflow__serving_dot_apis_dot_get__model__status__pb2,
)
from tensorflow_serving.apis import (
model_management_pb2 as tensorflow__serving_dot_apis_dot_model__management__pb2,
)

DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow_serving/apis/model_service.proto',
Expand Down
12 changes: 8 additions & 4 deletions tensorflow_serving/apis/model_service_pb2_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,15 @@

import grpc

from tensorflow_serving.apis import get_model_status_pb2 as tensorflow__serving_dot_apis_dot_get__model__status__pb2
from tensorflow_serving.apis import model_management_pb2 as tensorflow__serving_dot_apis_dot_model__management__pb2
from tensorflow_serving.apis import (
get_model_status_pb2 as tensorflow__serving_dot_apis_dot_get__model__status__pb2,
)
from tensorflow_serving.apis import (
model_management_pb2 as tensorflow__serving_dot_apis_dot_model__management__pb2,
)


class ModelServiceStub(object):
class ModelServiceStub:
"""ModelService provides methods to query and update the state of the server,
e.g. which models/versions are being served.
"""
Expand All @@ -50,7 +54,7 @@ def __init__(self, channel):
)


class ModelServiceServicer(object):
class ModelServiceServicer:
"""ModelService provides methods to query and update the state of the server,
e.g. which models/versions are being served.
"""
Expand Down
27 changes: 18 additions & 9 deletions tensorflow_serving/apis/prediction_service_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,23 +22,32 @@
# source: tensorflow_serving/apis/prediction_service.proto

import sys

_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
from google.protobuf import symbol_database as _symbol_database

# @@protoc_insertion_point(imports)

_sym_db = _symbol_database.Default()


from tensorflow_serving.apis import classification_pb2 as tensorflow__serving_dot_apis_dot_classification__pb2
from tensorflow_serving.apis import get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2
from tensorflow_serving.apis import inference_pb2 as tensorflow__serving_dot_apis_dot_inference__pb2
from tensorflow_serving.apis import predict_pb2 as tensorflow__serving_dot_apis_dot_predict__pb2
from tensorflow_serving.apis import regression_pb2 as tensorflow__serving_dot_apis_dot_regression__pb2

from tensorflow_serving.apis import (
classification_pb2 as tensorflow__serving_dot_apis_dot_classification__pb2,
)
from tensorflow_serving.apis import (
get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2,
)
from tensorflow_serving.apis import (
inference_pb2 as tensorflow__serving_dot_apis_dot_inference__pb2,
)
from tensorflow_serving.apis import (
predict_pb2 as tensorflow__serving_dot_apis_dot_predict__pb2,
)
from tensorflow_serving.apis import (
regression_pb2 as tensorflow__serving_dot_apis_dot_regression__pb2,
)

DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow_serving/apis/prediction_service.proto',
Expand Down
24 changes: 17 additions & 7 deletions tensorflow_serving/apis/prediction_service_pb2_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,24 @@
# python -m grpc.tools.protoc --python_out=. --grpc_python_out=. -I. tensorflow_serving/apis/prediction_service.proto
import grpc

from tensorflow_serving.apis import classification_pb2 as tensorflow__serving_dot_apis_dot_classification__pb2
from tensorflow_serving.apis import get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2
from tensorflow_serving.apis import inference_pb2 as tensorflow__serving_dot_apis_dot_inference__pb2
from tensorflow_serving.apis import predict_pb2 as tensorflow__serving_dot_apis_dot_predict__pb2
from tensorflow_serving.apis import regression_pb2 as tensorflow__serving_dot_apis_dot_regression__pb2
from tensorflow_serving.apis import (
classification_pb2 as tensorflow__serving_dot_apis_dot_classification__pb2,
)
from tensorflow_serving.apis import (
get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2,
)
from tensorflow_serving.apis import (
inference_pb2 as tensorflow__serving_dot_apis_dot_inference__pb2,
)
from tensorflow_serving.apis import (
predict_pb2 as tensorflow__serving_dot_apis_dot_predict__pb2,
)
from tensorflow_serving.apis import (
regression_pb2 as tensorflow__serving_dot_apis_dot_regression__pb2,
)


class PredictionServiceStub(object):
class PredictionServiceStub:
"""open source marker; do not remove
PredictionService provides access to machine-learned models loaded by
model_servers.
Expand Down Expand Up @@ -64,7 +74,7 @@ def __init__(self, channel):
)


class PredictionServiceServicer(object):
class PredictionServiceServicer:
"""open source marker; do not remove
PredictionService provides access to machine-learned models loaded by
model_servers.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# ==============================================================================

import tensorflow.compat.v1 as tf

FLAGS = tf.app.flags.FLAGS

tf.app.flags.DEFINE_string("output_dir", "/tmp/matrix_half_plus_two/1",
Expand All @@ -22,13 +23,14 @@

def _generate_saved_model_for_matrix_half_plus_two(export_dir):
"""Creates SavedModel for half plus two model that accepts batches of
3*3 matrices.
The model divides all elements in each matrix by 2 and adds 2 to them.
So, for one input matrix [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
the result will be [[2.5, 3, 3.5], [4, 4.5, 5], [5.5, 6, 6.5]].
Args:
export_dir: The directory where to write SavedModel files.
"""
3*3 matrices.
The model divides all elements in each matrix by 2 and adds 2 to them.
So, for one input matrix [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
the result will be [[2.5, 3, 3.5], [4, 4.5, 5], [5.5, 6, 6.5]].

Args:
export_dir: The directory where to write SavedModel files.
"""
builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
with tf.Session() as session:
x = tf.placeholder(tf.float32, shape=[None, 3, 3], name="x")
Expand Down
11 changes: 4 additions & 7 deletions tensorflow_serving/example/mnist_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,21 +25,17 @@
mnist_client.py --num_tests=100 --server=localhost:9000
"""

from __future__ import print_function

import sys
import threading

# This is a placeholder for a Google-internal import.

import grpc
import mnist_input_data
import numpy
import tensorflow as tf

from tensorflow_serving.apis import predict_pb2
from tensorflow_serving.apis import prediction_service_pb2_grpc
import mnist_input_data

from tensorflow_serving.apis import predict_pb2, prediction_service_pb2_grpc

tf.compat.v1.app.flags.DEFINE_integer(
'concurrency', 1, 'maximum number of concurrent inference requests')
Expand All @@ -50,7 +46,7 @@
FLAGS = tf.compat.v1.app.flags.FLAGS


class _ResultCounter(object):
class _ResultCounter:
"""Counter for the prediction results."""

def __init__(self, num_tests, concurrency):
Expand Down Expand Up @@ -94,6 +90,7 @@ def _create_rpc_callback(label, result_counter):
Args:
label: The correct label for the predicted example.
result_counter: Counter for the prediction result.

Returns:
The callback function.
"""
Expand Down
6 changes: 2 additions & 4 deletions tensorflow_serving/example/mnist_input_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

"""Functions for downloading and reading MNIST data."""

from __future__ import print_function

import gzip
import os
Expand Down Expand Up @@ -95,12 +94,11 @@ def extract_labels(filename, one_hot=False):
return labels


class DataSet(object):
class DataSet:
"""Class encompassing test, validation and training MNIST data set."""

def __init__(self, images, labels, fake_data=False, one_hot=False):
"""Construct a DataSet. one_hot arg is used only if fake_data is true."""

if fake_data:
self._num_examples = 10000
self.one_hot = one_hot
Expand Down Expand Up @@ -171,7 +169,7 @@ def next_batch(self, batch_size, fake_data=False):
def read_data_sets(train_dir, fake_data=False, one_hot=False):
"""Return training, validation and testing data sets."""

class DataSets(object):
class DataSets:
pass

data_sets = DataSets()
Expand Down
Loading