restructured integration do_export
This commit is contained in:
@ -1,5 +1,5 @@
|
||||
import json
|
||||
from io import BytesIO
|
||||
from io import BytesIO, StringIO
|
||||
from re import match
|
||||
from zipfile import ZipFile
|
||||
|
||||
@ -35,3 +35,27 @@ class Default(Integration):
|
||||
export = RecipeExportSerializer(recipe).data
|
||||
|
||||
return 'recipe.json', JSONRenderer().render(export).decode("utf-8")
|
||||
|
||||
def get_files_from_recipes(self, recipes, cookie):
|
||||
|
||||
files = []
|
||||
for r in recipes:
|
||||
if r.internal and r.space == self.request.space:
|
||||
recipe_zip_stream = BytesIO()
|
||||
recipe_zip_obj = ZipFile(recipe_zip_stream, 'w')
|
||||
|
||||
recipe_stream = StringIO()
|
||||
filename, data = self.get_file_from_recipe(r)
|
||||
recipe_stream.write(data)
|
||||
recipe_zip_obj.writestr(filename, recipe_stream.getvalue())
|
||||
recipe_stream.close()
|
||||
try:
|
||||
recipe_zip_obj.writestr(f'image{get_filetype(r.image.file.name)}', r.image.file.read())
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
recipe_zip_obj.close()
|
||||
|
||||
files.append([ str(r.pk) + '.zip', recipe_zip_stream.getvalue() ])
|
||||
|
||||
return files
|
@ -65,60 +65,33 @@ class Integration:
|
||||
"""
|
||||
Perform the export based on a list of recipes
|
||||
:param recipes: list of recipe objects
|
||||
:return: HttpResponse with a ZIP file that is directly downloaded
|
||||
:return: HttpResponse with the file of the requested export format that is directly downloaded (When that format involve multiple files they are zipped together)
|
||||
"""
|
||||
# TODO this is temporary, find a better solution for different export formats when doing other exporters
|
||||
if self.export_type == ImportExportBase.PDF:
|
||||
|
||||
export_zip_stream = BytesIO()
|
||||
export_zip_obj = ZipFile(export_zip_stream, 'w')
|
||||
|
||||
files = self.get_files_from_recipes(recipes, self.request.COOKIES)
|
||||
for filename, data in files:
|
||||
export_zip_obj.writestr(filename, data)
|
||||
|
||||
export_zip_obj.close()
|
||||
if len(files) == 1:
|
||||
filename, file = files[0]
|
||||
export_filename = filename
|
||||
export_file = file
|
||||
|
||||
response = HttpResponse(export_zip_stream.getvalue(), content_type='application/force-download')
|
||||
response['Content-Disposition'] = 'attachment; filename="export.zip"'
|
||||
return response
|
||||
|
||||
elif self.export_type != ImportExportBase.RECIPESAGE:
|
||||
export_zip_stream = BytesIO()
|
||||
export_zip_obj = ZipFile(export_zip_stream, 'w')
|
||||
|
||||
for r in recipes:
|
||||
if r.internal and r.space == self.request.space:
|
||||
recipe_zip_stream = BytesIO()
|
||||
recipe_zip_obj = ZipFile(recipe_zip_stream, 'w')
|
||||
|
||||
recipe_stream = StringIO()
|
||||
filename, data = self.get_file_from_recipe(r)
|
||||
recipe_stream.write(data)
|
||||
recipe_zip_obj.writestr(filename, recipe_stream.getvalue())
|
||||
recipe_stream.close()
|
||||
try:
|
||||
recipe_zip_obj.writestr(f'image{get_filetype(r.image.file.name)}', r.image.file.read())
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
recipe_zip_obj.close()
|
||||
export_zip_obj.writestr(str(r.pk) + '.zip', recipe_zip_stream.getvalue())
|
||||
|
||||
export_zip_obj.close()
|
||||
|
||||
response = HttpResponse(export_zip_stream.getvalue(), content_type='application/force-download')
|
||||
response['Content-Disposition'] = 'attachment; filename="export.zip"'
|
||||
return response
|
||||
else:
|
||||
json_list = []
|
||||
for r in recipes:
|
||||
json_list.append(self.get_file_from_recipe(r))
|
||||
export_filename = "export.zip"
|
||||
export_stream = BytesIO()
|
||||
export_obj = ZipFile(export_stream, 'w')
|
||||
|
||||
response = HttpResponse(json.dumps(json_list), content_type='application/force-download')
|
||||
response['Content-Disposition'] = 'attachment; filename="recipes.json"'
|
||||
for filename, file in files:
|
||||
export_obj.writestr(filename, file)
|
||||
|
||||
export_obj.close()
|
||||
export_file = export_stream.getvalue()
|
||||
|
||||
|
||||
response = HttpResponse(export_file, content_type='application/force-download')
|
||||
response['Content-Disposition'] = 'attachment; filename="'+export_filename+'"'
|
||||
return response
|
||||
|
||||
|
||||
def import_file_name_filter(self, zip_info_object):
|
||||
"""
|
||||
Since zipfile.namelist() returns all files in all subdirectories this function allows filtering of files
|
||||
|
@ -23,7 +23,7 @@ class PDFexport(Integration):
|
||||
|
||||
|
||||
|
||||
async def gen_pdf(self, recipes, sessionid):
|
||||
async def get_files_from_recipes_async(self, recipes, cookie):
|
||||
cmd = runserver.Command()
|
||||
|
||||
browser = await launch(
|
||||
@ -33,7 +33,7 @@ class PDFexport(Integration):
|
||||
ignoreHTTPSErrors=True
|
||||
)
|
||||
|
||||
cookies = {'domain': cmd.default_addr, 'name': 'sessionid', 'value': sessionid,}
|
||||
cookies = {'domain': cmd.default_addr, 'name': 'sessionid', 'value': cookie['sessionid'],}
|
||||
options = { 'format': 'letter',
|
||||
'margin': {
|
||||
'top': '0.75in',
|
||||
@ -59,4 +59,4 @@ class PDFexport(Integration):
|
||||
|
||||
|
||||
def get_files_from_recipes(self, recipes, cookie):
|
||||
return asyncio.run(self.gen_pdf(recipes, cookie['sessionid']))
|
||||
return asyncio.run(self.get_files_from_recipes_async(recipes, cookie))
|
||||
|
@ -88,5 +88,12 @@ class RecipeSage(Integration):
|
||||
|
||||
return data
|
||||
|
||||
def get_files_from_recipes(self, recipes, cookie):
|
||||
json_list = []
|
||||
for r in recipes:
|
||||
json_list.append(self.get_file_from_recipe(r))
|
||||
|
||||
return [['export.json', json.dumps(json_list)]]
|
||||
|
||||
def split_recipe_file(self, file):
|
||||
return json.loads(file.read().decode("utf-8"))
|
||||
|
Reference in New Issue
Block a user