added all ATK sites to custom scraper
This commit is contained in:
parent
028b2dfb22
commit
50140db668
@ -4,8 +4,12 @@ from recipe_scrapers._abstract import AbstractScraper
|
||||
|
||||
class CooksIllustrated(AbstractScraper):
|
||||
@classmethod
|
||||
def host(cls):
|
||||
return "cooksillustrated.com"
|
||||
def host(cls, site='cooksillustrated'):
|
||||
return {
|
||||
'cooksillustrated': f"{site}.com",
|
||||
'americastestkitchen': f"{site}.com",
|
||||
'cookscountry': f"{site}.com",
|
||||
}.get(site)
|
||||
|
||||
def title(self):
|
||||
return self.schema.title()
|
||||
@ -29,10 +33,10 @@ class CooksIllustrated(AbstractScraper):
|
||||
ingredients = self.recipe['ingredientGroups'][0]['fields']['recipeIngredientItems']
|
||||
return [
|
||||
"{} {} {}{}".format(
|
||||
i['fields']['qty'],
|
||||
i['fields']['measurement'],
|
||||
i['fields']['ingredient']['fields']['title'],
|
||||
i['fields']['postText']
|
||||
i['fields']['qty'] or '',
|
||||
i['fields']['measurement'] or '',
|
||||
i['fields']['ingredient']['fields']['title'] or '',
|
||||
i['fields']['postText'] or ''
|
||||
)
|
||||
for i in ingredients
|
||||
]
|
||||
@ -51,6 +55,7 @@ class CooksIllustrated(AbstractScraper):
|
||||
raise NotImplementedError("This should be implemented.")
|
||||
|
||||
def get_recipe(self):
|
||||
# TODO add missing data to schema.data
|
||||
j = json.loads(self.soup.find(type='application/json').string)
|
||||
name = list(j['props']['initialState']['content']['documents'])[0]
|
||||
self.recipe = j['props']['initialState']['content']['documents'][name]
|
||||
|
@ -7,7 +7,9 @@ from recipe_scrapers._schemaorg import SchemaOrg
|
||||
from .cooksillustrated import CooksIllustrated
|
||||
|
||||
CUSTOM_SCRAPERS = {
|
||||
CooksIllustrated.host(): CooksIllustrated,
|
||||
CooksIllustrated.host(site="cooksillustrated"): CooksIllustrated,
|
||||
CooksIllustrated.host(site="americastestkitchen"): CooksIllustrated,
|
||||
CooksIllustrated.host(site="cookscountry"): CooksIllustrated,
|
||||
}
|
||||
SCRAPERS.update(CUSTOM_SCRAPERS)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user