Skip to content

Commit 811985a

Browse files
authored
Merge pull request #29 from Gallaecio/scrapy-iterable-single-values
Restore support for nested Scrapy items
2 parents 5464069 + 077ac23 commit 811985a

File tree

5 files changed

+74
-12
lines changed

5 files changed

+74
-12
lines changed

.travis.yml

+8-7
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,21 @@ branches:
77

88
matrix:
99
include:
10-
- python: 3.5
11-
env: TOXENV=py35
10+
- python: 3.7
11+
env: TOXENV=docs
12+
1213
- python: 3.6
13-
env: TOXENV=py36
14+
env: TOXENV=py
1415
- python: 3.7
15-
env: TOXENV=py37
16+
env: TOXENV=py
1617
- python: 3.8
17-
env: TOXENV=py38
18+
env: TOXENV=py
1819

1920
- python: pypy3
2021
env: TOXENV=pypy3
2122

22-
- python: 3.7
23-
env: TOXENV=docs
23+
- python: 3.8
24+
env: TOXENV=extra-deps
2425

2526
install:
2627
- pip install -U tox codecov

itemloaders/utils.py

+9-3
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,13 @@
22
Copy/paste from scrapy source at the moment, to ensure tests are working.
33
Refactoring to come later
44
"""
5-
from functools import partial
65
import inspect
6+
from functools import partial
7+
8+
from itemadapter import is_item
79

810

9-
_ITERABLE_SINGLE_VALUES = dict, str, bytes
11+
_ITERABLE_SINGLE_VALUES = str, bytes
1012

1113

1214
def arg_to_iter(arg):
@@ -17,7 +19,11 @@ def arg_to_iter(arg):
1719
"""
1820
if arg is None:
1921
return []
20-
elif not isinstance(arg, _ITERABLE_SINGLE_VALUES) and hasattr(arg, '__iter__'):
22+
elif (
23+
hasattr(arg, '__iter__')
24+
and not isinstance(arg, _ITERABLE_SINGLE_VALUES)
25+
and not is_item(arg)
26+
):
2127
return arg
2228
else:
2329
return [arg]

setup.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -27,14 +27,13 @@
2727
'Operating System :: OS Independent',
2828
'Programming Language :: Python',
2929
'Programming Language :: Python :: 3',
30-
'Programming Language :: Python :: 3.5',
3130
'Programming Language :: Python :: 3.6',
3231
'Programming Language :: Python :: 3.7',
3332
'Programming Language :: Python :: 3.8',
3433
'Programming Language :: Python :: Implementation :: CPython',
3534
'Programming Language :: Python :: Implementation :: PyPy',
3635
],
37-
python_requires='>=3.5',
36+
python_requires='>=3.6',
3837
install_requires=[
3938
# before updating these versions, be sure they are not higher than
4039
# scrapy's requirements

tests/test_nested_items.py

+50
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
import unittest
2+
3+
from itemloaders import ItemLoader
4+
5+
6+
class NestedItemTest(unittest.TestCase):
7+
"""Test that adding items as values works as expected."""
8+
9+
def _test_item(self, item):
10+
il = ItemLoader()
11+
il.add_value('item_list', item)
12+
self.assertEqual(il.load_item(), {'item_list': [item]})
13+
14+
def test_attrs(self):
15+
try:
16+
import attr
17+
except ImportError:
18+
self.skipTest("Cannot import attr")
19+
20+
@attr.s
21+
class TestItem:
22+
foo = attr.ib()
23+
24+
self._test_item(TestItem(foo='bar'))
25+
26+
def test_dataclass(self):
27+
try:
28+
from dataclasses import dataclass
29+
except ImportError:
30+
self.skipTest("Cannot import dataclasses.dataclass")
31+
32+
@dataclass
33+
class TestItem:
34+
foo: str
35+
36+
self._test_item(TestItem(foo='bar'))
37+
38+
def test_dict(self):
39+
self._test_item({'foo': 'bar'})
40+
41+
def test_scrapy_item(self):
42+
try:
43+
from scrapy import Field, Item
44+
except ImportError:
45+
self.skipTest("Cannot import Field or Item from scrapy")
46+
47+
class TestItem(Item):
48+
foo = Field()
49+
50+
self._test_item(TestItem(foo='bar'))

tox.ini

+6
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,12 @@ commands =
1212
--doctest-modules \
1313
{posargs:itemloaders tests}
1414

15+
[testenv:extra-deps]
16+
deps =
17+
{[testenv]deps}
18+
attrs
19+
scrapy
20+
1521
[testenv:pypy3]
1622
basepython = pypy3
1723

0 commit comments

Comments
 (0)