Revert "bug fix url import"

This reverts commit 4ab8ca51e812d02911f4da801767612e52953a23.
This commit is contained in:
smilerz 2021-04-14 09:18:14 -05:00
parent 19e8e5cb5b
commit 0953af05fc
5 changed files with 21 additions and 25 deletions

View File

@ -76,9 +76,6 @@ def get_recipe_from_source(text, url, space):
# text = normalize_string(text) # text = normalize_string(text)
try: try:
parse_list.append(remove_graph(json.loads(text))) parse_list.append(remove_graph(json.loads(text)))
if not url and 'url' in parse_list[0]:
url = parse_list[0]['url']
scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
except JSONDecodeError: except JSONDecodeError:
soup = BeautifulSoup(text, "html.parser") soup = BeautifulSoup(text, "html.parser")
@ -86,8 +83,6 @@ def get_recipe_from_source(text, url, space):
images += get_images_from_source(soup, url) images += get_images_from_source(soup, url)
for el in soup.find_all('script', type='application/ld+json'): for el in soup.find_all('script', type='application/ld+json'):
el = remove_graph(el) el = remove_graph(el)
if not url and 'url' in el:
url = el['url']
if type(el) == list: if type(el) == list:
for le in el: for le in el:
parse_list.append(le) parse_list.append(le)
@ -100,6 +95,15 @@ def get_recipe_from_source(text, url, space):
parse_list.append(le) parse_list.append(le)
elif type(el) == dict: elif type(el) == dict:
parse_list.append(el) parse_list.append(el)
# if a url was not provided, try to find one in the first document
if not url and len(parse_list) > 0:
if 'url' in parse_list[0]:
url = parse_list[0]['url']
if type(text) == dict:
scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
elif type(text) == str:
scrape = text_scraper(text, url=url) scrape = text_scraper(text, url=url)
recipe_json = helper.get_from_scraper(scrape, space) recipe_json = helper.get_from_scraper(scrape, space)

View File

@ -39,9 +39,9 @@ def get_from_scraper(scrape, space):
pass pass
try: try:
recipe_json['image'] = parse_image(scrape.image()) or '' recipe_json['image'] = scrape.image()
except (AttributeError, TypeError): except AttributeError:
recipe_json['image'] = '' pass
keywords = [] keywords = []
try: try:
@ -282,11 +282,11 @@ def parse_keywords(keyword_json, space):
# keywords as list # keywords as list
for kw in keyword_json: for kw in keyword_json:
kw = normalize_string(kw) kw = normalize_string(kw)
if len(kw) != 0: if k := Keyword.objects.filter(name=kw, space=space).first():
if k := Keyword.objects.filter(name=kw, space=space).first(): if len(k) != 0:
keywords.append({'id': str(k.id), 'text': str(k)}) keywords.append({'id': str(k.id), 'text': str(k)})
else: else:
keywords.append({'id': random.randrange(1111111, 9999999, 1), 'text': kw}) keywords.append({'id': random.randrange(1111111, 9999999, 1), 'text': kw})
return keywords return keywords

View File

@ -30,7 +30,7 @@ def text_scraper(text, url=None):
url=None url=None
): ):
self.wild_mode = False self.wild_mode = False
self.exception_handling = None self.exception_handling = _exception_handling
self.meta_http_equiv = False self.meta_http_equiv = False
self.soup = BeautifulSoup(page_data, "html.parser") self.soup = BeautifulSoup(page_data, "html.parser")
self.url = url self.url = url

View File

@ -1705,7 +1705,7 @@ MADAME_DESSERT = {
"servings": 6, "servings": 6,
"prepTime": 0, "prepTime": 0,
"cookTime": 20, "cookTime": 20,
"image": "https://assets.madamedessert.de/wp-content/uploads/2020/02/25163328/Madame-Dessert_Schokopudding-Schokoladenpudding-mit-echter-Schokolade-0238-scaled.jpg", "image": "https://madamedessert.de/wp-content/uploads/2020/02/Madame-Dessert_Schokopudding-Schokoladenpudding-mit-echter-Schokolade-0238-scaled.jpg",
"keywords": [ "keywords": [
{ {
"id": 7588432, "id": 7588432,
@ -1795,7 +1795,7 @@ MADAME_DESSERT = {
"original": "1 TL Vanilleextrakt" "original": "1 TL Vanilleextrakt"
}, },
{ {
"amount": 150, "amount": 125,
"unit": { "unit": {
"text": "g", "text": "g",
"id": 24254 "id": 24254
@ -1805,7 +1805,7 @@ MADAME_DESSERT = {
"id": 42645 "id": 42645
}, },
"note": "", "note": "",
"original": "150 g Zucker" "original": "125 g Zucker"
}, },
{ {
"amount": 30, "amount": 30,

File diff suppressed because one or more lines are too long