Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .rstcheck.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[rstcheck]
ignore_directives=ifconfig,substitution-code-block,tabs,validated-code-block
ignore_roles=repo
# reenable this
ignore_messages=(Hyperlink)
1 change: 1 addition & 0 deletions BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ exports_files([
".clang-format",
"pytest.ini",
".coveragerc",
".rstcheck.cfg",
])

# These two definitions exist to help reduce Envoy upstream core code depending on extensions.
Expand Down
5 changes: 5 additions & 0 deletions bazel/repositories_extra.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,11 @@ def _python_deps():
requirements = "@envoy//tools/dependency:requirements.txt",
extra_pip_args = ["--require-hashes"],
)
pip_install(
name = "docs_pip3",
requirements = "@envoy//docs:requirements.txt",
extra_pip_args = ["--require-hashes"],
)
pip_install(
name = "kafka_pip3",
requirements = "@envoy//source/extensions/filters/network/kafka:requirements.txt",
Expand Down
1 change: 1 addition & 0 deletions ci/do_ci.sh
Original file line number Diff line number Diff line change
Expand Up @@ -474,6 +474,7 @@ elif [[ "$CI_TARGET" == "tooling" ]]; then
bazel run "${BAZEL_BUILD_OPTIONS[@]}" //tools/base:pytest_runner -- --cov-collect /tmp/.coverage-envoy
bazel run "${BAZEL_BUILD_OPTIONS[@]}" //tools/base:pytest_utils -- --cov-collect /tmp/.coverage-envoy
bazel run "${BAZEL_BUILD_OPTIONS[@]}" //tools/code_format:pytest_python_check -- --cov-collect /tmp/.coverage-envoy
bazel run "${BAZEL_BUILD_OPTIONS[@]}" //tools/code_format:pytest_rst_check -- --cov-collect /tmp/.coverage-envoy
bazel run "${BAZEL_BUILD_OPTIONS[@]}" //tools/testing:python_coverage -- --fail-under=95 /tmp/.coverage-envoy /source/generated/tooling
exit 0
elif [[ "$CI_TARGET" == "verify_examples" ]]; then
Expand Down
10 changes: 10 additions & 0 deletions docs/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,17 @@ rsync -av \
"${SCRIPT_DIR}"/_ext \
"${GENERATED_RST_DIR}"

LINTFAIL=

bazel run "${BAZEL_BUILD_OPTIONS[@]}" //tools/code_format:rst_check "${GENERATED_RST_DIR}" || {
LINTFAIL=yes
}

# To speed up validate_fragment invocations in validating_code_block
bazel build "${BAZEL_BUILD_OPTIONS[@]}" //tools/config_validation:validate_fragment

sphinx-build -W --keep-going -b html "${GENERATED_RST_DIR}" "${DOCS_OUTPUT_DIR}"

if [[ -n "$LINTFAIL" ]]; then
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

handle this better

exit 1
fi
5 changes: 5 additions & 0 deletions docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,11 @@ pytz==2021.1 \
requests==2.25.1 \
--hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e \
--hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804
rstcheck==3.3.1 \
--hash=sha256:92c4f79256a54270e0402ba16a2f92d0b3c15c8f4410cb9c57127067c215741f
setuptools==54.2.0 \
--hash=sha256:b726461910b9ba30f077880c228bea22121aec50b172edf39eb7ff026c054a11 \
--hash=sha256:aa9c24fb83a9116b8d425e53bec24c7bfdbffc313c2159f9ed036d4a6dd32d7d
six==1.15.0 \
--hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \
--hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced
Expand Down
1 change: 0 additions & 1 deletion docs/root/api/api.rst
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
.. _api:

API
===
Expand Down
5 changes: 2 additions & 3 deletions docs/root/configuration/http/http_conn_man/headers.rst
Original file line number Diff line number Diff line change
Expand Up @@ -304,12 +304,11 @@ Example 6: The internal Envoy from Example 5, receiving a request proxied by ano

A few very important notes about XFF:

.. _config_http_conn_man_headers_x-forwarded-for_internal_origin:

1. If *use_remote_address* is set to true, Envoy sets the
:ref:`config_http_conn_man_headers_x-envoy-external-address` header to the trusted
client address.

.. _config_http_conn_man_headers_x-forwarded-for_internal_origin:

2. XFF is what Envoy uses to determine whether a request is internal origin or external origin.
If *use_remote_address* is set to true, the request is internal if and only if the
request contains no XFF and the immediate downstream node's connection to Envoy has
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@ Compression Libraries
Underlying implementation
-------------------------

Currently Envoy uses `zlib <http://zlib.net>`_ and `brotli <https://brotli.org>`_ as compression
Currently Envoy uses `zlib <http://zlib.net>`__ and `brotli <https://brotli.org>`_ as compression
libraries.

.. note::

`zlib-ng <https://github.com/zlib-ng/zlib-ng>`_ is a fork that hosts several 3rd-party
`zlib-ng <https://github.com/zlib-ng/zlib-ng>`__ is a fork that hosts several 3rd-party
contributions containing new optimizations. Those optimizations are considered useful for
`improving compression performance <https://github.com/envoyproxy/envoy/issues/8448#issuecomment-667152013>`_.
Envoy can be built to use `zlib-ng <https://github.com/zlib-ng/zlib-ng>`_ instead of regular
`zlib <http://zlib.net>`_ by using ``--define zlib=ng`` Bazel option. The relevant build options
used to build `zlib-ng <https://github.com/zlib-ng/zlib-ng>`_ can be evaluated in :repo:`here
Envoy can be built to use `zlib-ng <https://github.com/zlib-ng/zlib-ng>`__ instead of regular
`zlib <http://zlib.net>`__ by using ``--define zlib=ng`` Bazel option. The relevant build options
used to build `zlib-ng <https://github.com/zlib-ng/zlib-ng>`__ can be evaluated in :repo:`here
<bazel/foreign_cc/BUILD>`. Currently, this option is only available on Linux.
2 changes: 1 addition & 1 deletion docs/root/intro/arch_overview/other_protocols/grpc.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
gRPC
====

`gRPC <https://www.grpc.io/>`_ is an RPC framework from Google. It uses protocol buffers as the
`gRPC <https://www.grpc.io/>`__ is an RPC framework from Google. It uses protocol buffers as the
underlying serialization/IDL format. At the transport layer it uses HTTP/2 for request/response
multiplexing. Envoy has first class support for gRPC both at the transport layer as well as at the
application layer:
Expand Down
6 changes: 3 additions & 3 deletions docs/root/intro/arch_overview/other_protocols/redis.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Redis
Envoy can act as a Redis proxy, partitioning commands among instances in a cluster.
In this mode, the goals of Envoy are to maintain availability and partition tolerance
over consistency. This is the key point when comparing Envoy to `Redis Cluster
<https://redis.io/topics/cluster-spec>`_. Envoy is designed as a best-effort cache,
<https://redis.io/topics/cluster-spec>`__. Envoy is designed as a best-effort cache,
meaning that it will not try to reconcile inconsistent data or keep a globally consistent
view of cluster membership. It also supports routing commands from different workload to
different to different upstream clusters based on their access patterns, eviction, or isolation
Expand Down Expand Up @@ -63,9 +63,9 @@ close map to 5xx. All other responses from Redis are counted as a success.
.. _arch_overview_redis_cluster_support:

Redis Cluster Support (Experimental)
----------------------------------------
------------------------------------

Envoy currently offers experimental support for `Redis Cluster <https://redis.io/topics/cluster-spec>`_.
Envoy currently offers experimental support for `Redis Cluster <https://redis.io/topics/cluster-spec>`__.

When using Envoy as a sidecar proxy for a Redis Cluster, the service can use a non-cluster Redis client
implemented in any language to connect to the proxy as if it's a single node Redis instance.
Expand Down
4 changes: 2 additions & 2 deletions docs/root/intro/arch_overview/security/ssl.rst
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ FIPS 140-2
BoringSSL can be built in a
`FIPS-compliant mode <https://boringssl.googlesource.com/boringssl/+/master/crypto/fipsmodule/FIPS.md>`_,
following the build instructions from the `Security Policy for BoringCrypto module
<https://csrc.nist.gov/CSRC/media/projects/cryptographic-module-validation-program/documents/security-policies/140sp3678.pdf>`_,
<https://csrc.nist.gov/CSRC/media/projects/cryptographic-module-validation-program/documents/security-policies/140sp3678.pdf>`__,
using ``--define boringssl=fips`` Bazel option. Currently, this option is only available on Linux-x86_64.

The correctness of the FIPS build can be verified by checking the presence of ``BoringSSL-FIPS``
Expand All @@ -54,7 +54,7 @@ it's not sufficient by itself, and depending on the context, additional steps mi
The extra requirements may include using only approved algorithms and/or using only private keys
generated by a module operating in FIPS-approved mode. For more information, please refer to the
`Security Policy for BoringCrypto module
<https://csrc.nist.gov/CSRC/media/projects/cryptographic-module-validation-program/documents/security-policies/140sp3678.pdf>`_
<https://csrc.nist.gov/CSRC/media/projects/cryptographic-module-validation-program/documents/security-policies/140sp3678.pdf>`__
and/or an `accredited CMVP laboratory <https://csrc.nist.gov/projects/testing-laboratories>`_.

Please note that the FIPS-compliant build is based on an older version of BoringSSL than
Expand Down
2 changes: 0 additions & 2 deletions docs/root/start/building.rst
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
.. _building:


Building
========
Expand Down
2 changes: 1 addition & 1 deletion tools/base/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def path(self) -> str:
raise self.parser.error(
"Incorrect path: `path` must be a directory, set either as first arg or with --path"
)
return path
return path.rstrip("/")

@property
def paths(self) -> list:
Expand Down
10 changes: 10 additions & 0 deletions tools/base/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import argparse
import logging
import multiprocessing
import os
import subprocess
import sys
Expand Down Expand Up @@ -73,6 +74,15 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None:
"""Override this method to add custom arguments to the arg parser"""
pass

def parallel(self, cmd, args, handle):
errors = 0
pool = multiprocessing.Pool(multiprocessing.cpu_count())
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Use a context manager with?

try:
handle(pool.map(cmd, args))
finally:
pool.close()
return errors


class ForkingAdapter(object):

Expand Down
14 changes: 14 additions & 0 deletions tools/code_format/BUILD
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
load("@pylint_pip3//:requirements.bzl", "requirement")
load("@docs_pip3//:requirements.bzl", doc_requirement = "requirement")
load("//bazel:envoy_build_system.bzl", "envoy_package")
load("//tools/base:envoy_python.bzl", "envoy_py_binary")

Expand All @@ -22,3 +23,16 @@ envoy_py_binary(
requirement("yapf"),
],
)

envoy_py_binary(
name = "tools.code_format.rst_check",
data = [
"//:.rstcheck.cfg",
],
deps = [
"//tools/base:checker",
"//tools/base:utils",
doc_requirement("rstcheck"),
doc_requirement("Sphinx"),
],
)
161 changes: 161 additions & 0 deletions tools/code_format/rst_check.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
#!/usr/bin/python3

import argparse
import configparser
import multiprocessing
import re
import os
import shutil
import subprocess
import sys
import tempfile
from contextlib import closing, contextmanager
from functools import cached_property

import docutils
import rstcheck
import sphinx

from tools.base import checker, utils


# things we dont want to see in generated docs
# TODO(phlax): move to .rstcheck.cfg when available
RSTCHECK_GREP_FAIL = (" ref:", "\\[\\#")
RSTCHECK_CONFIG=".rstcheck.cfg"
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: spacing (why doesn't our linter cover this?)

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i think it does - flake8 might not because we dont enable most pep8 checks yet, but yapf isnt happy with it

RSTCHECK_COMMAND = ("rstcheck", "-r")


@contextmanager
def sphinx_enabled():
"""Register Sphinx directives and roles."""
srcdir = tempfile.mkdtemp()
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not with tempfile.TemporaryDirectory()?

outdir = os.path.join(srcdir, '_build')
try:
sphinx.application.Sphinx(
srcdir=srcdir,
confdir=None,
outdir=outdir,
doctreedir=outdir,
buildername='dummy',
status=None)
yield
finally:
shutil.rmtree(srcdir)


def _check_rst_file(params: tuple) -> list:
"""Check an rst file and return list of errors.

This function *must* be module level in order to be pickled
for multiprocessing
"""
filename, args = params

with closing(docutils.io.FileInput(source_path=filename)) as input_file:
contents = input_file.read()

rstcheck.ignore_directives_and_roles(
args.ignore_directives, args.ignore_roles)

for substitution in args.ignore_substitutions:
contents = contents.replace(f"|{substitution}|", "None")

ignore = {
"languages": args.ignore_language,
"messages": args.ignore_messages}
return filename, list(
rstcheck.check(
contents,
filename=filename,
report_level=args.report,
ignore=ignore))


class RstChecker(checker.ForkingChecker):
checks = ("greps", "rstcheck")

@property
def failing_regexes(self) -> tuple:
return RSTCHECK_GREP_FAIL

@cached_property
def rstcheck_args(self) -> argparse.Namespace:
"""Return new ``args`` with configuration loaded from file."""
args = argparse.Namespace()
args.report = self.rstcheck_report_level
for ignore in ["language", "directives", "substitutions", "roles"]:
setattr(
args,
f"ignore_{ignore}",
[o.strip()
for o in self.rstcheck_config.get(f"ignore_{ignore}", "").split(",")
if o.strip()])
args.ignore_messages = self.rstcheck_config.get("ignore_messages", "")
return args

@cached_property
def rstcheck_config(self) -> dict:
parser = configparser.ConfigParser()
parser.read(rstcheck.find_config(self.path))
try:
return dict(parser.items("rstcheck"))
except configparser.NoSectionError:
return {}

@cached_property
def rstcheck_report_level(self) -> int:
report = self.rstcheck_config.get("report", "info")
threshold_dictionary = docutils.frontend.OptionParser.thresholds
return int(threshold_dictionary.get(report, report))

@cached_property
def rst_files(self) -> list:
return list(
rstcheck.find_files(
filenames=self.paths,
recursive=True))

def check_greps(self) -> None:
for check in self.failing_regexes:
self.grep_unwanted(check)

def check_rstcheck(self) -> None:
with sphinx_enabled():
try:
self.parallel(
_check_rst_file,
[(path, self.rstcheck_args) for path in self.rst_files],
self._rstcheck_handle)
except (IOError, UnicodeError) as e:
self.error("rstcheck", [f"Rstcheck failed: {e}"])

def grep_unwanted(self, check: str) -> None:
response = self.fork(["grep", "-nr", "--include", "\\*.rst"] + [check])
if not response.returncode:
self.error("grep", [f"grepping found bad '{check}': {path}" for path in resp.stdout.split("\n")])
else:
self.succeed("grep", [f"grepping found no errors for '{check}'"])

def _rstcheck_handle(self, results: list) -> None:
"""Handle multiprocessed error results of rstchecks"""
for (filename, _errors) in results:
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: parens not needed here and below

for (line_number, message) in _errors:
if not re.match(r'\([A-Z]+/[0-9]+\)', message):
message = '(ERROR/3) ' + message
self.error(
"rstcheck",
[f"{self._strip_line(filename)}:{line_number}: {message}"])
else:
self.succeed("rstcheck", [f"No errors for: {filename}"])

def _strip_line(self, line: str) -> str:
return line[len(self.path) + 1:] if line.startswith(f"{self.path}/") else line


def main(*args: list) -> None:
return RstChecker(*args).run_checks()


if __name__ == "__main__":
sys.exit(main(*sys.argv[1:]))
20 changes: 20 additions & 0 deletions tools/code_format/tests/test_rst_check.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@

from unittest.mock import patch

from tools.code_format import rst_check


def test_python_checker_main():
class_mock = patch("tools.code_format.rst_check.RstChecker")

with class_mock as m_class:
assert (
rst_check.main("arg0", "arg1", "arg2")
== m_class.return_value.run_checks.return_value)

assert (
list(m_class.call_args)
== [('arg0', 'arg1', 'arg2'), {}])
assert (
list(m_class.return_value.run_checks.call_args)
== [(), {}])