Skip to content

Commit 0055dd9

Browse files
committed
docs: Fix changelog entry. Prepare test in case #529 advanced.
1 parent bf5e1bd commit 0055dd9

File tree

2 files changed

+12
-10
lines changed

2 files changed

+12
-10
lines changed

docs/news.rst

+5-5
Original file line numberDiff line numberDiff line change
@@ -133,11 +133,11 @@ Removed
133133
- Remove the ``native_stringify_dict`` function.
134134
- Remove undocumented and unused internal environment variables:
135135

136-
- ``SCRAPY_FEED_URI`` to ``SCRAPYD_FEED_URI``
137-
- ``SCRAPY_JOB`` to ``SCRAPYD_JOB``
138-
- ``SCRAPY_LOG_FILE`` to ``SCRAPYD_LOG_FILE``
139-
- ``SCRAPY_SLOT`` to ``SCRAPYD_SLOT``
140-
- ``SCRAPY_SPIDER`` to ``SCRAPYD_SPIDER``
136+
- ``SCRAPYD_FEED_URI``
137+
- ``SCRAPYD_JOB``
138+
- ``SCRAPYD_LOG_FILE``
139+
- ``SCRAPYD_SLOT``
140+
- ``SCRAPYD_SPIDER``
141141

142142
1.4.3 (2023-09-25)
143143
------------------

tests/test_webservice.py

+7-5
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
from scrapyd.webservice import spider_list
1717
from tests import get_egg_data, get_finished_job, get_message, has_settings, root_add_version, touch
1818

19+
cliargs = [sys.executable, "-m", "scrapyd.runner", "crawl", "s2", "-s", "DOWNLOAD_DELAY=2", "-a", "arg1=val1"]
20+
1921
job1 = get_finished_job(
2022
project="p1",
2123
spider="s1",
@@ -27,7 +29,7 @@
2729

2830
@pytest.fixture()
2931
def scrapy_process():
30-
process = ScrapyProcessProtocol(project="p1", spider="s1", job="j1", env={}, args=[])
32+
process = ScrapyProcessProtocol(project="p1", spider="s1", job="j1", env={}, args=cliargs)
3133
process.start_time = datetime.datetime(2001, 2, 3, 4, 5, 6, 9)
3234
process.end_time = datetime.datetime(2001, 2, 3, 4, 5, 6, 10)
3335
process.transport = MagicMock()
@@ -374,7 +376,7 @@ def test_list_jobs(txrequest, root, scrapy_process, args, exists, chdir):
374376
_job="j1",
375377
_version="0.1",
376378
settings={"DOWNLOAD_DELAY=2": "TRACK=Cause = Time"},
377-
other="one",
379+
arg1="val1",
378380
)
379381

380382
expected["pending"].append(
@@ -384,7 +386,7 @@ def test_list_jobs(txrequest, root, scrapy_process, args, exists, chdir):
384386
"spider": "s1",
385387
"version": "0.1",
386388
"settings": {"DOWNLOAD_DELAY=2": "TRACK=Cause = Time"},
387-
"args": {"other": "one"},
389+
"args": {"arg1": "val1"},
388390
},
389391
)
390392
assert_content(txrequest, root, "GET", "listjobs", args, expected)
@@ -584,7 +586,7 @@ def test_schedule_parameters(txrequest, root_with_egg):
584586
b"jobid": [b"aaa"],
585587
b"priority": [b"5"],
586588
b"setting": [b"DOWNLOAD_DELAY=2", b"TRACK=Cause = Time"],
587-
b"other": [b"one", b"two"],
589+
b"arg1": [b"val1", b"val2"],
588590
}
589591
txrequest.method = "POST"
590592
content = root_with_egg.children[b"schedule.json"].render(txrequest)
@@ -604,7 +606,7 @@ def test_schedule_parameters(txrequest, root_with_egg):
604606
"DOWNLOAD_DELAY": "2",
605607
"TRACK": "Cause = Time",
606608
},
607-
"other": "one", # users are encouraged in api.rst to open an issue if they want multiple values
609+
"arg1": "val1", # users are encouraged in api.rst to open an issue if they want multiple values
608610
}
609611

610612

0 commit comments

Comments
 (0)