Skip to content

Commit

Permalink
feat: Add scrapyd-client targets. Remove scrapyd-deploy -l (closes #96)…
Browse files Browse the repository at this point in the history
…. test: Move fixtures into conftest and helpers into tests.__init__.
  • Loading branch information
jpmckinney committed Oct 11, 2024
1 parent 2719ac5 commit 369cf29
Show file tree
Hide file tree
Showing 9 changed files with 250 additions and 209 deletions.
6 changes: 4 additions & 2 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@ History
Added
^^^^^

- Add support for Python 3.12.
- Add ``scrapyd-client targets`` subcommand.
- ``scrapyd-client`` can be called as ``python -m scrapyd_client``.
- Add support for Python 3.12.

Changed
^^^^^^^
Expand All @@ -29,7 +30,8 @@ Fixed
Removed
^^^^^^^

- **BREAKING CHANGE:** Remove the ``scrapyd-deploy --list-projects`` option, in favor of ``scrapyd-client projects``.
- **BREAKING CHANGE:** Remove the ``scrapyd-deploy --list-targets`` (``-l``) option, in favor of ``scrapyd-client targets``.
- **BREAKING CHANGE:** Remove the ``scrapyd-deploy --list-projects`` (``-L``) option, in favor of ``scrapyd-client projects``.
- **BREAKING CHANGE:** Remove the ``get_request`` and ``post_request`` functions from ``scrapyd_client.utils``.
- **BREAKING CHANGE:** Remove the ``scrapyd_client.lib`` module, in favor of ``scrapyd_client.ScrapydClient``.
- Remove ``urllib3`` and ``w3lib`` requirements.
Expand Down
15 changes: 7 additions & 8 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,13 @@ deploy

This is a wrapper around `scrapyd-deploy`_.

targets
~~~~~~~

Lists all targets:

scrapyd-client targets

projects
~~~~~~~~

Expand Down Expand Up @@ -311,14 +318,6 @@ To deploy to all targets, use the ``-a`` option::

scrapyd-deploy -a -p <project>

To list all available targets, use the ``-l`` option::

scrapyd-deploy -l

To list all available projects on one target, use the ``-L`` option::

scrapyd-deploy -L example

While your target needs to be defined with its URL in ``scrapy.cfg``,
you can use `netrc <https://www.gnu.org/software/inetutils/manual/html_node/The-_002enetrc-file.html>`__ for username and password, like so::

Expand Down
16 changes: 15 additions & 1 deletion scrapyd_client/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
from traceback import print_exc

import requests
from scrapy.utils.project import inside_project

import scrapyd_client.deploy
from scrapyd_client.exceptions import ErrorResponse, MalformedResponse
from scrapyd_client.pyclient import ScrapydClient
from scrapyd_client.utils import DEFAULT_TARGET_URL, get_config
from scrapyd_client.utils import DEFAULT_TARGET_URL, _get_targets, get_config

ISSUE_TRACKER_URL = "https://github.com/scrapy/scrapyd-client/issues"

Expand All @@ -23,6 +24,16 @@ def deploy(args): # noqa: ARG001
scrapyd_client.deploy.main()


def targets(args): # noqa: ARG001
"""List all targets."""
if not inside_project():
print("Error: no Scrapy project found in this location", file=sys.stderr)
sys.exit(1)

for name, target in _get_targets().items():
print("%-20s %s" % (name, target["url"]))


def projects(args):
"""List all projects deployed on a Scrapyd instance."""
client = _get_client(args)
Expand Down Expand Up @@ -98,6 +109,9 @@ def parse_cli_args(args):
parser = subparsers.add_parser("deploy", description=deploy.__doc__)
parser.set_defaults(action=deploy)

parser = subparsers.add_parser("targets", description=targets.__doc__)
parser.set_defaults(action=targets)

parser = subparsers.add_parser("projects", description=projects.__doc__)
parser.set_defaults(action=projects)

Expand Down
22 changes: 1 addition & 21 deletions scrapyd_client/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from scrapy.utils.conf import closest_scrapy_cfg
from scrapy.utils.project import inside_project

from scrapyd_client.utils import get_auth, get_config
from scrapyd_client.utils import _get_targets, get_auth, get_config

_SETUP_PY_TEMPLATE = """
# Automatically created by: scrapyd-deploy
Expand All @@ -35,7 +35,6 @@ def parse_args():
parser.add_argument("target", nargs="?", default="default", metavar="TARGET")
parser.add_argument("-p", "--project", help="the project name in the TARGET")
parser.add_argument("-v", "--version", help="the version to deploy. Defaults to current timestamp")
parser.add_argument("-l", "--list-targets", action="store_true", help="list available targets")
parser.add_argument("-a", "--deploy-all-targets", action="store_true", help="deploy all targets")
parser.add_argument(
"-d",
Expand All @@ -60,11 +59,6 @@ def main():
_log("Error: no Scrapy project found in this location")
sys.exit(1)

if opts.list_targets:
for name, target in _get_targets().items():
print("%-20s %s" % (name, target["url"]))
return

tmpdir = None

if opts.build_egg: # build egg only
Expand Down Expand Up @@ -162,20 +156,6 @@ def _fail(message, code=1):
sys.exit(code)


def _get_targets():
cfg = get_config()
baset = dict(cfg.items("deploy")) if cfg.has_section("deploy") else {}
targets = {}
if "url" in baset:
targets["default"] = baset
for section in cfg.sections():
if section.startswith("deploy:"):
t = baset.copy()
t.update(cfg.items(section))
targets[section[7:]] = t
return targets


def _url(target, action):
if "url" in target:
return urljoin(target["url"], action)
Expand Down
14 changes: 14 additions & 0 deletions scrapyd_client/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,3 +45,17 @@ def get_config(use_closest=True):
cfg = ConfigParser(interpolation=EnvInterpolation())
cfg.read(conf.get_sources(use_closest))
return cfg


def _get_targets():
cfg = get_config()
baset = dict(cfg.items("deploy")) if cfg.has_section("deploy") else {}
targets = {}
if "url" in baset:
targets["default"] = baset
for section in cfg.sections():
if section.startswith("deploy:"):
t = baset.copy()
t.update(cfg.items(section))
targets[section[7:]] = t
return targets
11 changes: 11 additions & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import re


def assert_lines(actual, expected):
if isinstance(expected, str):
assert actual.splitlines() == expected.splitlines()
else:
lines = actual.splitlines()
assert len(lines) == len(expected)
for i, line in enumerate(lines):
assert re.search(f"^{expected[i]}$", line), f"{line} does not match {expected[i]}"
132 changes: 132 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
import os
from textwrap import dedent

import pytest


def _write_conf_file(content):
"""
Scrapy startproject writes a file like:
.. code-block:: ini
# Automatically created by: scrapy startproject
#
# For more information about the [deploy] section see:
# https://scrapyd.readthedocs.io/en/latest/deploy.html
[settings]
default = ${project_name}.settings
[deploy]
#url = http://localhost:6800/
project = ${project_name}
See scrapy/templates/project/scrapy.cfg
"""
with open("scrapy.cfg", "w") as f:
f.write(
dedent(
"""\
[settings]
default = scrapyproj.settings
"""
)
+ dedent(content)
)


@pytest.fixture
def project(tmpdir, script_runner):
cwd = os.getcwd()

p = tmpdir.mkdir("myhome")
p.chdir()
ret = script_runner.run(["scrapy", "startproject", "scrapyproj"])

assert "New Scrapy project 'scrapyproj'" in ret.stdout
assert ret.stderr == ""
assert ret.success

os.chdir("scrapyproj")
yield
os.chdir(cwd)


@pytest.fixture
def project_with_dependencies(project):
with open("requirements.txt", "w") as f:
f.write("")


@pytest.fixture
def conf_empty_section_implicit_target(project):
_write_conf_file("[deploy]")


@pytest.fixture
def conf_empty_section_explicit_target(project):
_write_conf_file("[deploy:mytarget]")


@pytest.fixture
def conf_no_project(project):
_write_conf_file(
"""\
[deploy]
url = http://localhost:6800/
"""
)


@pytest.fixture
def conf_no_url(project):
_write_conf_file(
"""\
[deploy:mytarget]
project = scrapydproject
"""
)


@pytest.fixture
def conf_default_target(project):
_write_conf_file(
"""\
[deploy]
url = http://localhost:6800/
project = scrapydproject
"""
)


@pytest.fixture
def conf_named_targets(project):
# target2 is deliberately before target 1, to test ordering.
_write_conf_file(
"""\
[deploy:target2]
url = http://localhost:6802/
project = anotherproject
[deploy:target1]
url = http://localhost:6801/
project = scrapydproject
"""
)


@pytest.fixture
def conf_mixed_targets(project):
# target2 is deliberately before target 1, to test ordering.
_write_conf_file(
"""\
[deploy]
url = http://localhost:6800/
project = anotherproject
[deploy:target1]
url = http://localhost:6801/
project = scrapydproject
"""
)
Loading

0 comments on commit 369cf29

Please sign in to comment.