restored missing manual url scrape

This commit is contained in:
smilerz 2021-04-16 11:26:26 -05:00
parent 987be4b04d
commit b2c1c6e301

View File

@ -555,7 +555,6 @@ def recipe_from_source(request):
) )
if mode == 'url' and auto == 'true': if mode == 'url' and auto == 'true':
if auto == 'true':
try: try:
scrape = scrape_me(url) scrape = scrape_me(url)
except WebsiteNotImplementedError: except WebsiteNotImplementedError:
@ -585,7 +584,27 @@ def recipe_from_source(request):
status=400) status=400)
else: else:
return JsonResponse({"recipe_json": get_from_scraper(scrape, request.space)}) return JsonResponse({"recipe_json": get_from_scraper(scrape, request.space)})
else:
try:
response = requests.get(url, headers=HEADERS)
except requests.exceptions.ConnectionError:
return JsonResponse(
{
'error': True,
'msg': _('The requested page could not be found.')
},
status=400
)
if response.status_code == 403:
return JsonResponse(
{
'error': True,
'msg': _('The requested page refused to provide any information (Status Code 403).')
},
status=400
)
data = response.text
if (mode == 'source') or (mode == 'url' and auto == 'false'): if (mode == 'source') or (mode == 'url' and auto == 'false'):
if not data or data == 'undefined': if not data or data == 'undefined':
data = requests.get(url, headers=HEADERS).content data = requests.get(url, headers=HEADERS).content