removed old json importer

This commit is contained in:
smilerz 2021-04-01 16:21:28 -05:00
parent a54f4e1367
commit 0a62225797

View File

@ -15,73 +15,70 @@ from django.utils.translation import gettext as _
from recipe_scrapers._utils import get_minutes, normalize_string
def find_recipe_json(ld_json, url, space):
ld_json['name'] = parse_name(ld_json['name'])
# def find_recipe_json(ld_json, url, space):
# ld_json['name'] = parse_name(ld_json['name'])
# some sites use ingredients instead of recipeIngredients
if 'recipeIngredient' not in ld_json and 'ingredients' in ld_json:
ld_json['recipeIngredient'] = ld_json['ingredients']
# # some sites use ingredients instead of recipeIngredients
# if 'recipeIngredient' not in ld_json and 'ingredients' in ld_json:
# ld_json['recipeIngredient'] = ld_json['ingredients']
if 'recipeIngredient' in ld_json:
ld_json['recipeIngredient'] = parse_ingredients(ld_json['recipeIngredient'])
else:
ld_json['recipeIngredient'] = ""
# if 'recipeIngredient' in ld_json:
# ld_json['recipeIngredient'] = parse_ingredients(ld_json['recipeIngredient'])
# else:
# ld_json['recipeIngredient'] = ""
keywords = []
if 'keywords' in ld_json:
keywords += listify_keywords(ld_json['keywords'])
if 'recipeCategory' in ld_json:
keywords += listify_keywords(ld_json['recipeCategory'])
if 'recipeCuisine' in ld_json:
keywords += listify_keywords(ld_json['recipeCuisine'])
try:
ld_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), space)
except TypeError:
pass
# keywords = []
# if 'keywords' in ld_json:
# keywords += listify_keywords(ld_json['keywords'])
# if 'recipeCategory' in ld_json:
# keywords += listify_keywords(ld_json['recipeCategory'])
# if 'recipeCuisine' in ld_json:
# keywords += listify_keywords(ld_json['recipeCuisine'])
# try:
# ld_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), space)
# except TypeError:
# pass
if 'recipeInstructions' in ld_json:
ld_json['recipeInstructions'] = parse_instructions(ld_json['recipeInstructions'])
else:
ld_json['recipeInstructions'] = ""
# if 'recipeInstructions' in ld_json:
# ld_json['recipeInstructions'] = parse_instructions(ld_json['recipeInstructions'])
# else:
# ld_json['recipeInstructions'] = ""
if url:
ld_json['recipeInstructions'] += "\nImported from " + url
# if 'image' in ld_json:
# ld_json['image'] = parse_image(ld_json['image'])
# else:
# ld_json['image'] = ""
if 'image' in ld_json:
ld_json['image'] = parse_image(ld_json['image'])
else:
ld_json['image'] = ""
# if 'description' in ld_json:
# ld_json['description'] = normalize_string(ld_json['description'])
# else:
# ld_json['description'] = ""
if 'description' in ld_json:
ld_json['description'] = normalize_string(ld_json['description'])
else:
ld_json['description'] = ""
# if 'cookTime' in ld_json:
# ld_json['cookTime'] = parse_cooktime(ld_json['cookTime'])
# else:
# ld_json['cookTime'] = 0
if 'cookTime' in ld_json:
ld_json['cookTime'] = parse_cooktime(ld_json['cookTime'])
else:
ld_json['cookTime'] = 0
# if 'prepTime' in ld_json:
# ld_json['prepTime'] = parse_cooktime(ld_json['prepTime'])
# else:
# ld_json['prepTime'] = 0
if 'prepTime' in ld_json:
ld_json['prepTime'] = parse_cooktime(ld_json['prepTime'])
else:
ld_json['prepTime'] = 0
# if 'servings' in ld_json:
# ld_json['servings'] = parse_servings(ld_json['servings'])
# elif 'recipeYield' in ld_json:
# ld_json['servings'] = parse_servings(ld_json['recipeYield'])
# else:
# ld_json['servings'] = 1
if 'servings' in ld_json:
ld_json['servings'] = parse_servings(ld_json['servings'])
elif 'recipeYield' in ld_json:
ld_json['servings'] = parse_servings(ld_json['recipeYield'])
else:
ld_json['servings'] = 1
# for key in list(ld_json):
# if key not in [
# 'prepTime', 'cookTime', 'image', 'recipeInstructions',
# 'keywords', 'name', 'recipeIngredient', 'servings', 'description'
# ]:
# ld_json.pop(key, None)
for key in list(ld_json):
if key not in [
'prepTime', 'cookTime', 'image', 'recipeInstructions',
'keywords', 'name', 'recipeIngredient', 'servings', 'description'
]:
ld_json.pop(key, None)
return ld_json
# return ld_json
def get_from_scraper(scrape, space):