Skip to content

Commit

Permalink
Merge pull request #69 from OpenIsraeliSupermarkets/scraping_backend_…
Browse files Browse the repository at this point in the history
…directly

Stable testing
  • Loading branch information
erlichsefi authored Oct 16, 2024
2 parents 503bf4c + 475c061 commit a5dce6d
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 43 deletions.
8 changes: 6 additions & 2 deletions il_supermarket_scarper/engines/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,13 @@ def apply_limit(
elif isinstance(when_date, str) and when_date == "latest":
intreable_ = self.get_only_latest(by_function, intreable_)
elif when_date is not None:
raise ValueError(f"when_date should be datetime or 'latest', got {when_date}")
raise ValueError(
f"when_date should be datetime or 'latest', got {when_date}"
)

Logger.info(f"Number of entry after filtering base on time is {len(intreable_)}")
Logger.info(
f"Number of entry after filtering base on time is {len(intreable_)}"
)

# filter by limit if the 'files_types' filter is not on.
if limit:
Expand Down
2 changes: 1 addition & 1 deletion il_supermarket_scarper/scrappers/tests/test_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class CityMarketGivatayimTestCase(


class CityMarketKirtatOnoTestCase(
make_test_case(ScraperFactory.CITY_MARKET_KIRYATONO, 3)
make_test_case(ScraperFactory.CITY_MARKET_KIRYATONO, 1)
):
"""Test case for CityMarketKirtatOno"""

Expand Down
51 changes: 11 additions & 40 deletions il_supermarket_scarper/scrappers/tests/test_cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,67 +195,38 @@ def _get_temp_folder(self):

def test_scrape_one(self):
"""scrape one file and make sure it exists"""
self._clean_scarpe_delete(scraper_enum, limit=1, when_date=_testing_now())
self._clean_scarpe_delete(scraper_enum, limit=1)

def test_scrape_ten(self):
"""scrape ten file and make sure they exists"""
self._clean_scarpe_delete(scraper_enum, limit=10, when_date=_testing_now())
def test_scrape_three(self):
"""scrape three file and make sure they exists"""
self._clean_scarpe_delete(scraper_enum, limit=3)

def test_scrape_promo(self):
"""scrape one promo file and make sure it exists"""
self._clean_scarpe_delete(
scraper_enum,
limit=1,
when_date=_testing_now(),
file_type=[FileTypesFilters.PROMO_FILE.name],
)

def test_scrape_promo_full(self):
"""scrape one promo file and make sure it exists"""
self._clean_scarpe_delete(
scraper_enum,
limit=1,
when_date=_testing_now(),
file_type=[FileTypesFilters.PROMO_FULL_FILE.name],
file_type=FileTypesFilters.only_promo(),
)

def test_scrape_store(self):
"""scrape one store file and make sure it exists"""
self._clean_scarpe_delete(
scraper_enum,
limit=1,
when_date=_testing_now(),
file_type=[FileTypesFilters.STORE_FILE.name],
scraper_enum, limit=1, file_type=FileTypesFilters.only_store()
)

def test_scrape_price(self):
"""scrape one price file and make sure it exists"""
self._clean_scarpe_delete(
scraper_enum,
limit=1,
when_date=_testing_now(),
file_type=[FileTypesFilters.PRICE_FILE.name],
)

def test_scrape_price_full(self):
"""scrape one price file and make sure it exists"""
self._clean_scarpe_delete(
scraper_enum,
limit=1,
when_date=_testing_now(),
file_type=[FileTypesFilters.PRICE_FULL_FILE.name],
scraper_enum, limit=1, file_type=FileTypesFilters.only_price()
)

def test_scrape_file_from_single_store(self):
"""test fetching only files from a ceriten store"""
self._clean_scarpe_delete(
scraper_enum, store_id=store_id, when_date=_testing_now(), limit=1
)
self._clean_scarpe_delete(scraper_enum, store_id=store_id, limit=1)

def test_scrape_file_from_single_store_last(self):
"""test fetching latest file only"""
self._clean_scarpe_delete(
scraper_enum, store_id=store_id, when_date=_testing_now(), limit=1
)
def test_scrape_file_today(self):
"""test fetching file from today"""
self._clean_scarpe_delete(scraper_enum, when_date=_testing_now(), limit=1)

return TestScapers

0 comments on commit a5dce6d

Please sign in to comment.