Merge branch 'develop' into ldap
This commit is contained in:
commit
072ca30c8a
@ -1,6 +1,7 @@
|
||||
# only set this to true when testing/debugging
|
||||
# when unset: 1 (true) - dont unset this, just for development
|
||||
DEBUG=0
|
||||
SQL_DEBUG=0
|
||||
|
||||
# hosts the application can run under e.g. recipes.mydomain.com,cooking.mydomain.com,...
|
||||
ALLOWED_HOSTS=*
|
||||
@ -78,6 +79,8 @@ GUNICORN_MEDIA=0
|
||||
# when unset: 0 (false)
|
||||
REVERSE_PROXY_AUTH=0
|
||||
|
||||
# If base URL is something other than just / (you are serving a subfolder in your proxy for instance http://recipe_app/recipes/)
|
||||
# SCRIPT_NAME=/recipes
|
||||
# Default settings for spaces, apply per space and can be changed in the admin view
|
||||
# SPACE_DEFAULT_MAX_RECIPES=0 # 0=unlimited recipes
|
||||
# SPACE_DEFAULT_MAX_USERS=0 # 0=unlimited users per space
|
||||
@ -117,8 +120,14 @@ REVERSE_PROXY_AUTH=0
|
||||
# Django session cookie settings. Can be changed to allow a single django application to authenticate several applications
|
||||
# when running under the same database
|
||||
# SESSION_COOKIE_DOMAIN=.example.com
|
||||
# SESSION_COOKIE_NAME=sessionid # use this only to not interfere with non unified django applications under the same top level domain
|
||||
|
||||
# by default SORT_TREE_BY_NAME is disabled this will store all Keywords and Food in the order they are created
|
||||
# enabling this setting makes saving new keywords and foods very slow, which doesn't matter in most usecases.
|
||||
# however, when doing large imports of recipes that will create new objects, can increase total run time by 10-15x
|
||||
# Keywords and Food can be manually sorted by name in Admin
|
||||
# This value can also be temporarily changed in Admin, it will revert the next time the application is started
|
||||
# This will be fixed/changed in the future by changing the implementation or finding a better workaround for sorting
|
||||
# SORT_TREE_BY_NAME=0
|
||||
# LDAP authentication
|
||||
# default 0 (false), when 1 (true) list of allowed users will be fetched from LDAP server
|
||||
#LDAP_AUTH=
|
||||
|
11
.github/dependabot.yml
vendored
11
.github/dependabot.yml
vendored
@ -5,7 +5,12 @@
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
interval: "monthly"
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/vue/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
|
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
@ -4,7 +4,7 @@ on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
if: github.repository_owner == 'vabene1111'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
max-parallel: 4
|
||||
@ -17,6 +17,16 @@ jobs:
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.9
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
3
.github/workflows/codeql-analysis.yml
vendored
3
.github/workflows/codeql-analysis.yml
vendored
@ -8,9 +8,8 @@ on:
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
if: github.repository_owner == 'vabene1111'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
20
.github/workflows/docker-publish-beta.yml
vendored
20
.github/workflows/docker-publish-beta.yml
vendored
@ -5,9 +5,11 @@ on:
|
||||
- 'beta'
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'vabene1111'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
@ -16,6 +18,17 @@ jobs:
|
||||
VERSION_NUMBER = 'beta'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
@ -24,3 +37,10 @@ jobs:
|
||||
tag: beta
|
||||
dockerHubUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerHubPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
# Send discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 The BETA Image has been updated! 🥳'
|
13
.github/workflows/docker-publish-dev.yml
vendored
13
.github/workflows/docker-publish-dev.yml
vendored
@ -7,9 +7,11 @@ on:
|
||||
- '!master'
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'vabene1111'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
@ -18,6 +20,17 @@ jobs:
|
||||
VERSION_NUMBER = 'develop'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Publish to Registry
|
||||
uses: elgohr/Publish-Docker-Github-Action@2.13
|
||||
with:
|
||||
|
13
.github/workflows/docker-publish-latest.yml
vendored
13
.github/workflows/docker-publish-latest.yml
vendored
@ -6,12 +6,14 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'vabene1111'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
@ -20,6 +22,17 @@ jobs:
|
||||
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
|
20
.github/workflows/docker-publish-release.yml
vendored
20
.github/workflows/docker-publish-release.yml
vendored
@ -7,6 +7,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'vabene1111'
|
||||
runs-on: ubuntu-latest
|
||||
name: Build image job
|
||||
steps:
|
||||
@ -15,6 +16,7 @@ jobs:
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
@ -23,6 +25,17 @@ jobs:
|
||||
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
@ -31,3 +44,10 @@ jobs:
|
||||
tag: ${{ steps.get_version.outputs.VERSION }}
|
||||
dockerHubUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerHubPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
# Send discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 A new Version of tandoor has been released 🥳 \n https://github.com/vabene1111/recipes/releases/tag/{{ steps.get_version.outputs.VERSION }}'
|
1
.github/workflows/docs.yml
vendored
1
.github/workflows/docs.yml
vendored
@ -7,6 +7,7 @@ on:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
if: github.repository_owner == 'vabene1111'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -79,3 +79,8 @@ postgresql/
|
||||
/docker-compose.override.yml
|
||||
vue/node_modules
|
||||
.vscode/
|
||||
vue/yarn.lock
|
||||
vetur.config.js
|
||||
cookbook/static/vue
|
||||
vue/webpack-stats.json
|
||||
cookbook/templates/sw.js
|
||||
|
@ -1,31 +0,0 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pre-commit-yarn-build
|
||||
name: Build javascript files
|
||||
entry: yarn --cwd ./vue build
|
||||
always_run: true
|
||||
language: system
|
||||
types: [ python ]
|
||||
pass_filenames: false
|
||||
|
||||
#- id: pre-commit-django-migrations
|
||||
# name: Check django migrations
|
||||
# entry: bash -c './venv/bin/activate && ./manage.py makemigrations --check'
|
||||
# language: system
|
||||
# types: [ python ]
|
||||
# pass_filenames: false
|
||||
# - id: pre-commit-django-make-messages
|
||||
# name: Make messages if necessary
|
||||
# entry: ./manage.py makemessages -i venv -a
|
||||
# language: system
|
||||
# types: [ python ]
|
||||
# pass_filenames: false
|
||||
# - id: pre-commit-django-compile-messages
|
||||
# name: Compile messages if necessary
|
||||
# entry: ./manage.py compilemessages -i venv
|
||||
# language: system
|
||||
# types: [ python ]
|
||||
# pass_filenames: false
|
@ -12,14 +12,16 @@
|
||||
<a href="https://github.com/vabene1111/recipes/actions" target="_blank" rel="noopener noreferrer"><img src="https://github.com/vabene1111/recipes/workflows/Continous%20Integration/badge.svg?branch=master" ></a>
|
||||
<a href="https://github.com/vabene1111/recipes/stargazers" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/github/stars/vabene1111/recipes" ></a>
|
||||
<a href="https://github.com/vabene1111/recipes/network/members" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/github/forks/vabene1111/recipes" ></a>
|
||||
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer"><img src="https://badgen.net/badge/icon/discord?icon=discord&label" ></a>
|
||||
<a href="https://hub.docker.com/r/vabene1111/recipes" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/docker/pulls/vabene1111/recipes" ></a>
|
||||
<a href="https://github.com/vabene1111/recipes/releases/latest" rel="noopener noreferrer"><img src="https://img.shields.io/github/v/release/vabene1111/recipes" ></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://docs.tandoor.dev/install/docker.html" target="_blank" rel="noopener noreferrer">Installation</a> •
|
||||
<a href="https://docs.tandoor.dev/install/docker/" target="_blank" rel="noopener noreferrer">Installation</a> •
|
||||
<a href="https://docs.tandoor.dev/" target="_blank" rel="noopener noreferrer">Documentation</a> •
|
||||
<a href="https://app.tandoor.dev/accounts/login/?demo" target="_blank" rel="noopener noreferrer">Demo</a>
|
||||
<a href="https://app.tandoor.dev/accounts/login/?demo" target="_blank" rel="noopener noreferrer">Demo</a> •
|
||||
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer">Discord server</a>
|
||||
</p>
|
||||
|
||||

|
||||
|
@ -1,6 +1,12 @@
|
||||
from django.conf import settings
|
||||
from django.contrib import admin
|
||||
from django.contrib.postgres.search import SearchVector
|
||||
from treebeard.admin import TreeAdmin
|
||||
from treebeard.forms import movenodeform_factory
|
||||
from django.contrib.auth.admin import UserAdmin
|
||||
from django.contrib.auth.models import User, Group
|
||||
from django_scopes import scopes_disabled
|
||||
from django.utils import translation
|
||||
|
||||
from .models import (Comment, CookLog, Food, Ingredient, InviteLink, Keyword,
|
||||
MealPlan, MealType, NutritionInformation, Recipe,
|
||||
@ -8,7 +14,9 @@ from .models import (Comment, CookLog, Food, Ingredient, InviteLink, Keyword,
|
||||
ShoppingList, ShoppingListEntry, ShoppingListRecipe,
|
||||
Space, Step, Storage, Sync, SyncLog, Unit, UserPreference,
|
||||
ViewLog, Supermarket, SupermarketCategory, SupermarketCategoryRelation,
|
||||
ImportLog, TelegramBot, BookmarkletImport, UserFile)
|
||||
ImportLog, TelegramBot, BookmarkletImport, UserFile, SearchPreference)
|
||||
|
||||
from cookbook.managers import DICTIONARY
|
||||
|
||||
|
||||
class CustomUserAdmin(UserAdmin):
|
||||
@ -46,6 +54,19 @@ class UserPreferenceAdmin(admin.ModelAdmin):
|
||||
admin.site.register(UserPreference, UserPreferenceAdmin)
|
||||
|
||||
|
||||
class SearchPreferenceAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'search', 'trigram_threshold',)
|
||||
search_fields = ('user__username',)
|
||||
list_filter = ('search',)
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.user.get_user_name()
|
||||
|
||||
|
||||
admin.site.register(SearchPreference, SearchPreferenceAdmin)
|
||||
|
||||
|
||||
class StorageAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'method')
|
||||
search_fields = ('name',)
|
||||
@ -80,7 +101,38 @@ class SyncLogAdmin(admin.ModelAdmin):
|
||||
|
||||
admin.site.register(SyncLog, SyncLogAdmin)
|
||||
|
||||
admin.site.register(Keyword)
|
||||
|
||||
@admin.action(description='Temporarily ENABLE sorting on Foods and Keywords.')
|
||||
def enable_tree_sorting(modeladmin, request, queryset):
|
||||
Food.node_order_by = ['name']
|
||||
Keyword.node_order_by = ['name']
|
||||
with scopes_disabled():
|
||||
Food.fix_tree(fix_paths=True)
|
||||
Keyword.fix_tree(fix_paths=True)
|
||||
|
||||
|
||||
@admin.action(description='Temporarily DISABLE sorting on Foods and Keywords.')
|
||||
def disable_tree_sorting(modeladmin, request, queryset):
|
||||
Food.node_order_by = []
|
||||
Keyword.node_order_by = []
|
||||
|
||||
|
||||
@admin.action(description='Fix problems and sort tree by name')
|
||||
def sort_tree(modeladmin, request, queryset):
|
||||
orginal_value = modeladmin.model.node_order_by[:]
|
||||
modeladmin.model.node_order_by = ['name']
|
||||
with scopes_disabled():
|
||||
modeladmin.model.fix_tree(fix_paths=True)
|
||||
modeladmin.model.node_order_by = orginal_value
|
||||
|
||||
|
||||
class KeywordAdmin(TreeAdmin):
|
||||
form = movenodeform_factory(Keyword)
|
||||
ordering = ('space', 'path',)
|
||||
actions = [sort_tree, enable_tree_sorting, disable_tree_sorting]
|
||||
|
||||
|
||||
admin.site.register(Keyword, KeywordAdmin)
|
||||
|
||||
|
||||
class StepAdmin(admin.ModelAdmin):
|
||||
@ -91,6 +143,17 @@ class StepAdmin(admin.ModelAdmin):
|
||||
admin.site.register(Step, StepAdmin)
|
||||
|
||||
|
||||
@admin.action(description='Rebuild index for selected recipes')
|
||||
def rebuild_index(modeladmin, request, queryset):
|
||||
language = DICTIONARY.get(translation.get_language(), 'simple')
|
||||
with scopes_disabled():
|
||||
Recipe.objects.all().update(
|
||||
name_search_vector=SearchVector('name__unaccent', weight='A', config=language),
|
||||
desc_search_vector=SearchVector('description__unaccent', weight='B', config=language)
|
||||
)
|
||||
Step.objects.all().update(search_vector=SearchVector('instruction__unaccent', weight='B', config=language))
|
||||
|
||||
|
||||
class RecipeAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'internal', 'created_by', 'storage')
|
||||
search_fields = ('name', 'created_by__username')
|
||||
@ -101,11 +164,22 @@ class RecipeAdmin(admin.ModelAdmin):
|
||||
def created_by(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']:
|
||||
actions = [rebuild_index]
|
||||
|
||||
|
||||
admin.site.register(Recipe, RecipeAdmin)
|
||||
|
||||
admin.site.register(Unit)
|
||||
admin.site.register(Food)
|
||||
|
||||
|
||||
class FoodAdmin(TreeAdmin):
|
||||
form = movenodeform_factory(Keyword)
|
||||
ordering = ('space', 'path',)
|
||||
actions = [sort_tree, enable_tree_sorting, disable_tree_sorting]
|
||||
|
||||
|
||||
admin.site.register(Food, FoodAdmin)
|
||||
|
||||
|
||||
class IngredientAdmin(admin.ModelAdmin):
|
||||
|
@ -1,5 +1,26 @@
|
||||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
from django.db import OperationalError, ProgrammingError
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
|
||||
class CookbookConfig(AppConfig):
|
||||
name = 'cookbook'
|
||||
|
||||
def ready(self):
|
||||
# post_save signal is only necessary if using full-text search on postgres
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']:
|
||||
import cookbook.signals # noqa
|
||||
|
||||
# when starting up run fix_tree to:
|
||||
# a) make sure that nodes are sorted when switching between sort modes
|
||||
# b) fix problems, if any, with tree consistency
|
||||
with scopes_disabled():
|
||||
try:
|
||||
from cookbook.models import Keyword, Food
|
||||
Keyword.fix_tree(fix_paths=True)
|
||||
Food.fix_tree(fix_paths=True)
|
||||
except OperationalError:
|
||||
pass # if model does not exist there is no need to fix it
|
||||
except ProgrammingError:
|
||||
pass # if migration has not been run database cannot be fixed yet
|
||||
|
@ -61,14 +61,12 @@ with scopes_disabled():
|
||||
model = Recipe
|
||||
fields = ['name', 'keywords', 'foods', 'internal']
|
||||
|
||||
# class FoodFilter(django_filters.FilterSet):
|
||||
# name = django_filters.CharFilter(lookup_expr='icontains')
|
||||
|
||||
class FoodFilter(django_filters.FilterSet):
|
||||
name = django_filters.CharFilter(lookup_expr='icontains')
|
||||
|
||||
class Meta:
|
||||
model = Food
|
||||
fields = ['name']
|
||||
|
||||
# class Meta:
|
||||
# model = Food
|
||||
# fields = ['name']
|
||||
|
||||
class ShoppingListFilter(django_filters.FilterSet):
|
||||
|
||||
|
@ -1,16 +1,16 @@
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.forms import widgets
|
||||
from django.forms import widgets, NumberInput
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_scopes import scopes_disabled
|
||||
from django_scopes.forms import SafeModelChoiceField, SafeModelMultipleChoiceField
|
||||
from emoji_picker.widgets import EmojiPickerTextInput
|
||||
from hcaptcha.fields import hCaptchaField
|
||||
|
||||
from .models import (Comment, Food, InviteLink, Keyword, MealPlan, Recipe,
|
||||
RecipeBook, RecipeBookEntry, Storage, Sync, Unit, User,
|
||||
UserPreference, SupermarketCategory, MealType, Space)
|
||||
from .models import (Comment, InviteLink, Keyword, MealPlan, Recipe,
|
||||
RecipeBook, RecipeBookEntry, Storage, Sync, User,
|
||||
UserPreference, MealType, Space,
|
||||
SearchPreference)
|
||||
|
||||
|
||||
class SelectWidget(widgets.Select):
|
||||
@ -128,13 +128,15 @@ class ImportExportBase(forms.Form):
|
||||
MEALMASTER = 'MEALMASTER'
|
||||
REZKONV = 'REZKONV'
|
||||
OPENEATS = 'OPENEATS'
|
||||
PLANTOEAT = 'PLANTOEAT'
|
||||
COOKBOOKAPP = 'COOKBOOKAPP'
|
||||
|
||||
type = forms.ChoiceField(choices=(
|
||||
(DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'),
|
||||
(MEALIE, 'Mealie'), (CHOWDOWN, 'Chowdown'), (SAFRON, 'Safron'), (CHEFTAP, 'ChefTap'),
|
||||
(PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'), (DOMESTICA, 'Domestica'),
|
||||
(MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
|
||||
|
||||
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'),
|
||||
))
|
||||
|
||||
|
||||
@ -155,52 +157,6 @@ class ExportForm(ImportExportBase):
|
||||
self.fields['recipes'].queryset = Recipe.objects.filter(space=space).all()
|
||||
|
||||
|
||||
class UnitMergeForm(forms.Form):
|
||||
prefix = 'unit'
|
||||
|
||||
new_unit = SafeModelChoiceField(
|
||||
queryset=Unit.objects.none(),
|
||||
widget=SelectWidget,
|
||||
label=_('New Unit'),
|
||||
help_text=_('New unit that other gets replaced by.'),
|
||||
)
|
||||
old_unit = SafeModelChoiceField(
|
||||
queryset=Unit.objects.none(),
|
||||
widget=SelectWidget,
|
||||
label=_('Old Unit'),
|
||||
help_text=_('Unit that should be replaced.'),
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['new_unit'].queryset = Unit.objects.filter(space=space).all()
|
||||
self.fields['old_unit'].queryset = Unit.objects.filter(space=space).all()
|
||||
|
||||
|
||||
class FoodMergeForm(forms.Form):
|
||||
prefix = 'food'
|
||||
|
||||
new_food = SafeModelChoiceField(
|
||||
queryset=Food.objects.none(),
|
||||
widget=SelectWidget,
|
||||
label=_('New Food'),
|
||||
help_text=_('New food that other gets replaced by.'),
|
||||
)
|
||||
old_food = SafeModelChoiceField(
|
||||
queryset=Food.objects.none(),
|
||||
widget=SelectWidget,
|
||||
label=_('Old Food'),
|
||||
help_text=_('Food that should be replaced.'),
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['new_food'].queryset = Food.objects.filter(space=space).all()
|
||||
self.fields['old_food'].queryset = Food.objects.filter(space=space).all()
|
||||
|
||||
|
||||
class CommentForm(forms.ModelForm):
|
||||
prefix = 'comment'
|
||||
|
||||
@ -216,32 +172,6 @@ class CommentForm(forms.ModelForm):
|
||||
}
|
||||
|
||||
|
||||
class KeywordForm(forms.ModelForm):
|
||||
class Meta:
|
||||
model = Keyword
|
||||
fields = ('name', 'icon', 'description')
|
||||
widgets = {'icon': EmojiPickerTextInput}
|
||||
|
||||
|
||||
class FoodForm(forms.ModelForm):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['recipe'].queryset = Recipe.objects.filter(space=space).all()
|
||||
self.fields['supermarket_category'].queryset = SupermarketCategory.objects.filter(space=space).all()
|
||||
|
||||
class Meta:
|
||||
model = Food
|
||||
fields = ('name', 'description', 'ignore_shopping', 'recipe', 'supermarket_category')
|
||||
widgets = {'recipe': SelectWidget}
|
||||
|
||||
field_classes = {
|
||||
'recipe': SafeModelChoiceField,
|
||||
'supermarket_category': SafeModelChoiceField,
|
||||
}
|
||||
|
||||
|
||||
class StorageForm(forms.ModelForm):
|
||||
username = forms.CharField(
|
||||
widget=forms.TextInput(attrs={'autocomplete': 'new-password'}),
|
||||
@ -339,21 +269,6 @@ class ImportRecipeForm(forms.ModelForm):
|
||||
}
|
||||
|
||||
|
||||
class RecipeBookForm(forms.ModelForm):
|
||||
def __init__(self, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['shared'].queryset = User.objects.filter(userpreference__space=space).all()
|
||||
|
||||
class Meta:
|
||||
model = RecipeBook
|
||||
fields = ('name', 'icon', 'description', 'shared')
|
||||
widgets = {'icon': EmojiPickerTextInput, 'shared': MultiSelectWidget}
|
||||
field_classes = {
|
||||
'shared': SafeModelMultipleChoiceField,
|
||||
}
|
||||
|
||||
|
||||
class MealPlanForm(forms.ModelForm):
|
||||
def __init__(self, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
@ -471,3 +386,43 @@ class UserCreateForm(forms.Form):
|
||||
attrs={'autocomplete': 'new-password', 'type': 'password'}
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class SearchPreferenceForm(forms.ModelForm):
|
||||
prefix = 'search'
|
||||
trigram_threshold = forms.DecimalField(min_value=0.01, max_value=1, decimal_places=2, widget=NumberInput(attrs={'class': "form-control-range", 'type': 'range'}),
|
||||
help_text=_('Determines how fuzzy a search is if it uses trigram similarity matching (e.g. low values mean more typos are ignored).'))
|
||||
preset = forms.CharField(widget=forms.HiddenInput(),required=False)
|
||||
|
||||
class Meta:
|
||||
model = SearchPreference
|
||||
fields = ('search', 'lookup', 'unaccent', 'icontains', 'istartswith', 'trigram', 'fulltext', 'trigram_threshold')
|
||||
|
||||
help_texts = {
|
||||
'search': _('Select type method of search. Click <a href="/docs/search/">here</a> for full desciption of choices.'),
|
||||
'lookup': _('Use fuzzy matching on units, keywords and ingredients when editing and importing recipes.'),
|
||||
'unaccent': _('Fields to search ignoring accents. Selecting this option can improve or degrade search quality depending on language'),
|
||||
'icontains': _("Fields to search for partial matches. (e.g. searching for 'Pie' will return 'pie' and 'piece' and 'soapie')"),
|
||||
'istartswith': _("Fields to search for beginning of word matches. (e.g. searching for 'sa' will return 'salad' and 'sandwich')"),
|
||||
'trigram': _("Fields to 'fuzzy' search. (e.g. searching for 'recpie' will find 'recipe'.) Note: this option will conflict with 'web' and 'raw' methods of search."),
|
||||
'fulltext': _("Fields to full text search. Note: 'web', 'phrase', and 'raw' search methods only function with fulltext fields."),
|
||||
}
|
||||
|
||||
labels = {
|
||||
'search': _('Search Method'),
|
||||
'lookup': _('Fuzzy Lookups'),
|
||||
'unaccent': _('Ignore Accent'),
|
||||
'icontains': _("Partial Match"),
|
||||
'istartswith': _("Starts Wtih"),
|
||||
'trigram': _("Fuzzy Search"),
|
||||
'fulltext': _("Full Text")
|
||||
}
|
||||
|
||||
widgets = {
|
||||
'search': SelectWidget,
|
||||
'unaccent': MultiSelectWidget,
|
||||
'icontains': MultiSelectWidget,
|
||||
'istartswith': MultiSelectWidget,
|
||||
'trigram': MultiSelectWidget,
|
||||
'fulltext': MultiSelectWidget,
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ def rescale_image_png(image_object, base_width=720):
|
||||
def get_filetype(name):
|
||||
try:
|
||||
return os.path.splitext(name)[1]
|
||||
except:
|
||||
except Exception:
|
||||
return '.jpeg'
|
||||
|
||||
|
||||
|
@ -2,10 +2,113 @@ import re
|
||||
import string
|
||||
import unicodedata
|
||||
|
||||
from cookbook.models import Unit, Food
|
||||
from django.core.cache import caches
|
||||
|
||||
from cookbook.models import Unit, Food, Automation
|
||||
|
||||
|
||||
def parse_fraction(x):
|
||||
class IngredientParser:
|
||||
request = None
|
||||
ignore_rules = False
|
||||
food_aliases = {}
|
||||
unit_aliases = {}
|
||||
|
||||
def __init__(self, request, cache_mode, ignore_automations=False):
|
||||
"""
|
||||
Initialize ingredient parser
|
||||
:param request: request context (to control caching, rule ownership, etc.)
|
||||
:param cache_mode: defines if all rules should be loaded on initialization (good when parser is used many times) or if they should be retrieved every time (good when parser is not used many times in a row)
|
||||
:param ignore_automations: ignore automation rules, allows to use ingredient parser without database access/request (request can be None)
|
||||
"""
|
||||
self.request = request
|
||||
self.ignore_rules = ignore_automations
|
||||
if cache_mode:
|
||||
FOOD_CACHE_KEY = f'automation_food_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(FOOD_CACHE_KEY, None):
|
||||
self.food_aliases = c
|
||||
caches['default'].touch(FOOD_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.FOOD_ALIAS).only('param_1', 'param_2').all():
|
||||
self.food_aliases[a.param_1] = a.param_2
|
||||
caches['default'].set(FOOD_CACHE_KEY, self.food_aliases, 30)
|
||||
|
||||
UNIT_CACHE_KEY = f'automation_unit_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(UNIT_CACHE_KEY, None):
|
||||
self.unit_aliases = c
|
||||
caches['default'].touch(UNIT_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.UNIT_ALIAS).only('param_1', 'param_2').all():
|
||||
self.unit_aliases[a.param_1] = a.param_2
|
||||
caches['default'].set(UNIT_CACHE_KEY, self.unit_aliases, 30)
|
||||
else:
|
||||
self.food_aliases = {}
|
||||
self.unit_aliases = {}
|
||||
|
||||
def apply_food_automation(self, food):
|
||||
"""
|
||||
Apply food alias automations to passed foood
|
||||
:param food: unit as string
|
||||
:return: food as string (possibly changed by automation)
|
||||
"""
|
||||
if self.ignore_rules:
|
||||
return food
|
||||
else:
|
||||
if self.food_aliases:
|
||||
try:
|
||||
return self.food_aliases[food]
|
||||
except KeyError:
|
||||
return food
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.FOOD_ALIAS, param_1=food, disabled=False).first():
|
||||
return automation.param_2
|
||||
return food
|
||||
|
||||
def apply_unit_automation(self, unit):
|
||||
"""
|
||||
Apply unit alias automations to passed unit
|
||||
:param unit: unit as string
|
||||
:return: unit as string (possibly changed by automation)
|
||||
"""
|
||||
if self.ignore_rules:
|
||||
return unit
|
||||
else:
|
||||
if self.unit_aliases:
|
||||
try:
|
||||
return self.unit_aliases[unit]
|
||||
except KeyError:
|
||||
return unit
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.UNIT_ALIAS, param_1=unit, disabled=False).first():
|
||||
return automation.param_2
|
||||
return unit
|
||||
|
||||
def get_unit(self, unit):
|
||||
"""
|
||||
Get or create a unit for given space respecting possible automations
|
||||
:param unit: string unit
|
||||
:return: None if unit passed is invalid, Unit object otherwise
|
||||
"""
|
||||
if not unit:
|
||||
return None
|
||||
if len(unit) > 0:
|
||||
u, created = Unit.objects.get_or_create(name=self.apply_unit_automation(unit), space=self.request.space)
|
||||
return u
|
||||
return None
|
||||
|
||||
def get_food(self, food):
|
||||
"""
|
||||
Get or create a food for given space respecting possible automations
|
||||
:param food: string food
|
||||
:return: None if food passed is invalid, Food object otherwise
|
||||
"""
|
||||
if not food:
|
||||
return None
|
||||
if len(food) > 0:
|
||||
f, created = Food.objects.get_or_create(name=self.apply_food_automation(food), space=self.request.space)
|
||||
return f
|
||||
return None
|
||||
|
||||
def parse_fraction(self, x):
|
||||
if len(x) == 1 and 'fraction' in unicodedata.decomposition(x):
|
||||
frac_split = unicodedata.decomposition(x[-1:]).split()
|
||||
return (float((frac_split[1]).replace('003', ''))
|
||||
@ -19,8 +122,7 @@ def parse_fraction(x):
|
||||
except ZeroDivisionError:
|
||||
raise ValueError
|
||||
|
||||
|
||||
def parse_amount(x):
|
||||
def parse_amount(self, x):
|
||||
amount = 0
|
||||
unit = ''
|
||||
note = ''
|
||||
@ -36,11 +138,11 @@ def parse_amount(x):
|
||||
end += 1
|
||||
if end > 0:
|
||||
if "/" in x[:end]:
|
||||
amount = parse_fraction(x[:end])
|
||||
amount = self.parse_fraction(x[:end])
|
||||
else:
|
||||
amount = float(x[:end].replace(',', '.'))
|
||||
else:
|
||||
amount = parse_fraction(x[0])
|
||||
amount = self.parse_fraction(x[0])
|
||||
end += 1
|
||||
did_check_frac = True
|
||||
if end < len(x):
|
||||
@ -48,7 +150,7 @@ def parse_amount(x):
|
||||
unit = x[end:]
|
||||
else:
|
||||
try:
|
||||
amount += parse_fraction(x[end])
|
||||
amount += self.parse_fraction(x[end])
|
||||
unit = x[end + 1:]
|
||||
except ValueError:
|
||||
unit = x[end:]
|
||||
@ -58,8 +160,7 @@ def parse_amount(x):
|
||||
note = x
|
||||
return amount, unit, note
|
||||
|
||||
|
||||
def parse_ingredient_with_comma(tokens):
|
||||
def parse_ingredient_with_comma(self, tokens):
|
||||
ingredient = ''
|
||||
note = ''
|
||||
start = 0
|
||||
@ -74,14 +175,13 @@ def parse_ingredient_with_comma(tokens):
|
||||
note = ' '.join(tokens[start + 1:])
|
||||
return ingredient, note
|
||||
|
||||
|
||||
def parse_ingredient(tokens):
|
||||
def parse_ingredient(self, tokens):
|
||||
ingredient = ''
|
||||
note = ''
|
||||
if tokens[-1].endswith(')'):
|
||||
# Check if the matching opening bracket is in the same token
|
||||
if (not tokens[-1].startswith('(')) and ('(' in tokens[-1]):
|
||||
return parse_ingredient_with_comma(tokens)
|
||||
return self.parse_ingredient_with_comma(tokens)
|
||||
# last argument ends with closing bracket -> look for opening bracket
|
||||
start = len(tokens) - 1
|
||||
while not tokens[start].startswith('(') and not start == 0:
|
||||
@ -91,17 +191,16 @@ def parse_ingredient(tokens):
|
||||
raise ValueError
|
||||
elif start < 0:
|
||||
# no opening bracket anywhere -> just ignore the last bracket
|
||||
ingredient, note = parse_ingredient_with_comma(tokens)
|
||||
ingredient, note = self.parse_ingredient_with_comma(tokens)
|
||||
else:
|
||||
# opening bracket found -> split in ingredient and note, remove brackets from note # noqa: E501
|
||||
note = ' '.join(tokens[start:])[1:-1]
|
||||
ingredient = ' '.join(tokens[:start])
|
||||
else:
|
||||
ingredient, note = parse_ingredient_with_comma(tokens)
|
||||
ingredient, note = self.parse_ingredient_with_comma(tokens)
|
||||
return ingredient, note
|
||||
|
||||
|
||||
def parse(x):
|
||||
def parse(self, x):
|
||||
# initialize default values
|
||||
amount = 0
|
||||
unit = ''
|
||||
@ -122,7 +221,7 @@ def parse(x):
|
||||
else:
|
||||
try:
|
||||
# try to parse first argument as amount
|
||||
amount, unit, unit_note = parse_amount(tokens[0])
|
||||
amount, unit, unit_note = self.parse_amount(tokens[0])
|
||||
# only try to parse second argument as amount if there are at least
|
||||
# three arguments if it already has a unit there can't be
|
||||
# a fraction for the amount
|
||||
@ -133,31 +232,31 @@ def parse(x):
|
||||
# probably not the best method to do it, but I didn't want to make an if check and paste the exact same thing in the else as already is in the except # noqa: E501
|
||||
raise ValueError
|
||||
# try to parse second argument as amount and add that, in case of '2 1/2' or '2 ½'
|
||||
amount += parse_fraction(tokens[1])
|
||||
amount += self.parse_fraction(tokens[1])
|
||||
# assume that units can't end with a comma
|
||||
if len(tokens) > 3 and not tokens[2].endswith(','):
|
||||
# try to use third argument as unit and everything else as ingredient, use everything as ingredient if it fails # noqa: E501
|
||||
try:
|
||||
ingredient, note = parse_ingredient(tokens[3:])
|
||||
ingredient, note = self.parse_ingredient(tokens[3:])
|
||||
unit = tokens[2]
|
||||
except ValueError:
|
||||
ingredient, note = parse_ingredient(tokens[2:])
|
||||
ingredient, note = self.parse_ingredient(tokens[2:])
|
||||
else:
|
||||
ingredient, note = parse_ingredient(tokens[2:])
|
||||
ingredient, note = self.parse_ingredient(tokens[2:])
|
||||
except ValueError:
|
||||
# assume that units can't end with a comma
|
||||
if not tokens[1].endswith(','):
|
||||
# try to use second argument as unit and everything else as ingredient, use everything as ingredient if it fails # noqa: E501
|
||||
try:
|
||||
ingredient, note = parse_ingredient(tokens[2:])
|
||||
ingredient, note = self.parse_ingredient(tokens[2:])
|
||||
if unit == '':
|
||||
unit = tokens[1]
|
||||
else:
|
||||
note = tokens[1]
|
||||
except ValueError:
|
||||
ingredient, note = parse_ingredient(tokens[1:])
|
||||
ingredient, note = self.parse_ingredient(tokens[1:])
|
||||
else:
|
||||
ingredient, note = parse_ingredient(tokens[1:])
|
||||
ingredient, note = self.parse_ingredient(tokens[1:])
|
||||
else:
|
||||
# only two arguments, first one is the amount
|
||||
# which means this is the ingredient
|
||||
@ -166,29 +265,10 @@ def parse(x):
|
||||
try:
|
||||
# can't parse first argument as amount
|
||||
# -> no unit -> parse everything as ingredient
|
||||
ingredient, note = parse_ingredient(tokens)
|
||||
ingredient, note = self.parse_ingredient(tokens)
|
||||
except ValueError:
|
||||
ingredient = ' '.join(tokens[1:])
|
||||
|
||||
if unit_note not in note:
|
||||
note += ' ' + unit_note
|
||||
return amount, unit.strip(), ingredient.strip(), note.strip()
|
||||
|
||||
|
||||
# small utility functions to prevent emtpy unit/food creation
|
||||
def get_unit(unit, space):
|
||||
if not unit:
|
||||
return None
|
||||
if len(unit) > 0:
|
||||
u, created = Unit.objects.get_or_create(name=unit, space=space)
|
||||
return u
|
||||
return None
|
||||
|
||||
|
||||
def get_food(food, space):
|
||||
if not food:
|
||||
return None
|
||||
if len(food) > 0:
|
||||
f, created = Food.objects.get_or_create(name=food, space=space)
|
||||
return f
|
||||
return None
|
||||
return amount, self.apply_unit_automation(unit.strip()), self.apply_food_automation(ingredient.strip()), note.strip()
|
||||
|
@ -3,8 +3,6 @@ Source: https://djangosnippets.org/snippets/1703/
|
||||
"""
|
||||
from django.conf import settings
|
||||
from django.core.cache import caches
|
||||
from django.views.generic.detail import SingleObjectTemplateResponseMixin
|
||||
from django.views.generic.edit import ModelFormMixin
|
||||
|
||||
from cookbook.models import ShareLink
|
||||
from django.contrib import messages
|
||||
@ -64,7 +62,7 @@ def is_object_owner(user, obj):
|
||||
return False
|
||||
try:
|
||||
return obj.get_owner() == user
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -10,7 +10,7 @@ from recipe_scrapers._utils import get_host_name, normalize_string
|
||||
from urllib.parse import unquote
|
||||
|
||||
|
||||
def get_recipe_from_source(text, url, space):
|
||||
def get_recipe_from_source(text, url, request):
|
||||
def build_node(k, v):
|
||||
if isinstance(v, dict):
|
||||
node = {
|
||||
@ -103,7 +103,7 @@ def get_recipe_from_source(text, url, space):
|
||||
parse_list.append(el)
|
||||
scrape = text_scraper(text, url=url)
|
||||
|
||||
recipe_json = helper.get_from_scraper(scrape, space)
|
||||
recipe_json = helper.get_from_scraper(scrape, request)
|
||||
|
||||
for el in parse_list:
|
||||
temp_tree = []
|
||||
|
@ -1,76 +1,397 @@
|
||||
from datetime import datetime, timedelta
|
||||
from functools import reduce
|
||||
from collections import Counter
|
||||
from datetime import timedelta
|
||||
|
||||
from django.contrib.postgres.search import TrigramSimilarity
|
||||
from django.db.models import Q, Case, When, Value
|
||||
from django.forms import IntegerField
|
||||
|
||||
from cookbook.models import ViewLog
|
||||
from recipes import settings
|
||||
from django.contrib.postgres.search import (
|
||||
SearchQuery, SearchRank, TrigramSimilarity
|
||||
)
|
||||
from django.core.cache import caches
|
||||
from django.db.models import Avg, Case, Count, Func, Max, Q, Subquery, Value, When
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone, translation
|
||||
|
||||
from cookbook.managers import DICTIONARY
|
||||
from cookbook.models import Food, Keyword, ViewLog, SearchPreference
|
||||
|
||||
|
||||
class Round(Func):
|
||||
function = 'ROUND'
|
||||
template = '%(function)s(%(expressions)s, 0)'
|
||||
|
||||
|
||||
def str2bool(v):
|
||||
if type(v) == bool:
|
||||
return v
|
||||
else:
|
||||
return v.lower() in ("yes", "true", "1")
|
||||
|
||||
|
||||
# TODO create extensive tests to make sure ORs ANDs and various filters, sorting, etc work as expected
|
||||
# TODO consider creating a simpleListRecipe API that only includes minimum of recipe info and minimal filtering
|
||||
def search_recipes(request, queryset, params):
|
||||
search_string = params.get('query', '')
|
||||
if request.user.is_authenticated:
|
||||
search_prefs = request.user.searchpreference
|
||||
else:
|
||||
search_prefs = SearchPreference()
|
||||
search_string = params.get('query', '').strip()
|
||||
search_rating = int(params.get('rating', 0))
|
||||
search_keywords = params.getlist('keywords', [])
|
||||
search_foods = params.getlist('foods', [])
|
||||
search_books = params.getlist('books', [])
|
||||
search_units = params.get('units', None)
|
||||
|
||||
search_keywords_or = params.get('keywords_or', True)
|
||||
search_foods_or = params.get('foods_or', True)
|
||||
search_books_or = params.get('books_or', True)
|
||||
# TODO I think default behavior should be 'AND' which is how most sites operate with facet/filters based on results
|
||||
search_keywords_or = str2bool(params.get('keywords_or', True))
|
||||
search_foods_or = str2bool(params.get('foods_or', True))
|
||||
search_books_or = str2bool(params.get('books_or', True))
|
||||
|
||||
search_internal = params.get('internal', None)
|
||||
search_random = params.get('random', False)
|
||||
search_new = params.get('new', False)
|
||||
search_internal = str2bool(params.get('internal', False))
|
||||
search_random = str2bool(params.get('random', False))
|
||||
search_new = str2bool(params.get('new', False))
|
||||
search_last_viewed = int(params.get('last_viewed', 0))
|
||||
orderby = []
|
||||
|
||||
# only sort by recent not otherwise filtering/sorting
|
||||
if search_last_viewed > 0:
|
||||
last_viewed_recipes = ViewLog.objects.filter(created_by=request.user, space=request.space,
|
||||
created_at__gte=datetime.now() - timedelta(days=14)).order_by('pk').values_list('recipe__pk', flat=True).distinct()
|
||||
last_viewed_recipes = ViewLog.objects.filter(
|
||||
created_by=request.user, space=request.space,
|
||||
created_at__gte=timezone.now() - timedelta(days=14) # TODO make recent days a setting
|
||||
).order_by('-pk').values_list('recipe__pk', flat=True)
|
||||
last_viewed_recipes = list(dict.fromkeys(last_viewed_recipes))[:search_last_viewed] # removes duplicates from list prior to slicing
|
||||
|
||||
return queryset.filter(pk__in=last_viewed_recipes[len(last_viewed_recipes) - min(len(last_viewed_recipes), search_last_viewed):])
|
||||
# return queryset.annotate(last_view=Max('viewlog__pk')).annotate(new=Case(When(pk__in=last_viewed_recipes, then=('last_view')), default=Value(0))).filter(new__gt=0).order_by('-new')
|
||||
# queryset that only annotates most recent view (higher pk = lastest view)
|
||||
queryset = queryset.annotate(recent=Coalesce(Max('viewlog__pk'), Value(0)))
|
||||
orderby += ['-recent']
|
||||
|
||||
if search_new == 'true':
|
||||
queryset = queryset.annotate(
|
||||
new_recipe=Case(When(created_at__gte=(datetime.now() - timedelta(days=7)), then=Value(100)),
|
||||
default=Value(0), )).order_by('-new_recipe', 'name')
|
||||
# TODO create setting for default ordering - most cooked, rating,
|
||||
# TODO create options for live sorting
|
||||
# TODO make days of new recipe a setting
|
||||
if search_new:
|
||||
queryset = (
|
||||
queryset.annotate(new_recipe=Case(
|
||||
When(created_at__gte=(timezone.now() - timedelta(days=7)), then=('pk')), default=Value(0), ))
|
||||
)
|
||||
# only sort by new recipes if not otherwise filtering/sorting
|
||||
orderby += ['-new_recipe']
|
||||
|
||||
search_type = search_prefs.search or 'plain'
|
||||
if len(search_string) > 0:
|
||||
unaccent_include = search_prefs.unaccent.values_list('field', flat=True)
|
||||
|
||||
icontains_include = [x + '__unaccent' if x in unaccent_include else x for x in search_prefs.icontains.values_list('field', flat=True)]
|
||||
istartswith_include = [x + '__unaccent' if x in unaccent_include else x for x in search_prefs.istartswith.values_list('field', flat=True)]
|
||||
trigram_include = [x + '__unaccent' if x in unaccent_include else x for x in search_prefs.trigram.values_list('field', flat=True)]
|
||||
fulltext_include = search_prefs.fulltext.values_list('field', flat=True) # fulltext doesn't use field name directly
|
||||
|
||||
# if no filters are configured use name__icontains as default
|
||||
if len(icontains_include) + len(istartswith_include) + len(trigram_include) + len(fulltext_include) == 0:
|
||||
filters = [Q(**{"name__icontains": search_string})]
|
||||
else:
|
||||
queryset = queryset.order_by('name')
|
||||
filters = []
|
||||
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2',
|
||||
'django.db.backends.postgresql']:
|
||||
queryset = queryset.annotate(similarity=TrigramSimilarity('name', search_string), ).filter(
|
||||
Q(similarity__gt=0.1) | Q(name__unaccent__icontains=search_string)).order_by('-similarity')
|
||||
# dynamically build array of filters that will be applied
|
||||
for f in icontains_include:
|
||||
filters += [Q(**{"%s__icontains" % f: search_string})]
|
||||
|
||||
for f in istartswith_include:
|
||||
filters += [Q(**{"%s__istartswith" % f: search_string})]
|
||||
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']:
|
||||
language = DICTIONARY.get(translation.get_language(), 'simple')
|
||||
# django full text search https://docs.djangoproject.com/en/3.2/ref/contrib/postgres/search/#searchquery
|
||||
# TODO can options install this extension to further enhance search query language https://github.com/caub/pg-tsquery
|
||||
# trigram breaks full text search 'websearch' and 'raw' capabilities and will be ignored if those methods are chosen
|
||||
if search_type in ['websearch', 'raw']:
|
||||
search_trigram = False
|
||||
else:
|
||||
search_trigram = True
|
||||
search_query = SearchQuery(
|
||||
search_string,
|
||||
search_type=search_type,
|
||||
config=language,
|
||||
)
|
||||
|
||||
# iterate through fields to use in trigrams generating a single trigram
|
||||
if search_trigram and len(trigram_include) > 0:
|
||||
trigram = None
|
||||
for f in trigram_include:
|
||||
if trigram:
|
||||
trigram += TrigramSimilarity(f, search_string)
|
||||
else:
|
||||
trigram = TrigramSimilarity(f, search_string)
|
||||
queryset = queryset.annotate(similarity=trigram)
|
||||
filters += [Q(similarity__gt=search_prefs.trigram_threshold)]
|
||||
|
||||
if 'name' in fulltext_include:
|
||||
filters += [Q(name_search_vector=search_query)]
|
||||
if 'description' in fulltext_include:
|
||||
filters += [Q(desc_search_vector=search_query)]
|
||||
if 'instructions' in fulltext_include:
|
||||
filters += [Q(steps__search_vector=search_query)]
|
||||
if 'keywords' in fulltext_include:
|
||||
filters += [Q(keywords__in=Subquery(Keyword.objects.filter(name__search=search_query).values_list('id', flat=True)))]
|
||||
if 'foods' in fulltext_include:
|
||||
filters += [Q(steps__ingredients__food__in=Subquery(Food.objects.filter(name__search=search_query).values_list('id', flat=True)))]
|
||||
query_filter = None
|
||||
for f in filters:
|
||||
if query_filter:
|
||||
query_filter |= f
|
||||
else:
|
||||
query_filter = f
|
||||
|
||||
# TODO add order by user settings - only do search rank and annotation if rank order is configured
|
||||
search_rank = (
|
||||
SearchRank('name_search_vector', search_query, cover_density=True)
|
||||
+ SearchRank('desc_search_vector', search_query, cover_density=True)
|
||||
+ SearchRank('steps__search_vector', search_query, cover_density=True)
|
||||
)
|
||||
queryset = queryset.filter(query_filter).annotate(rank=search_rank)
|
||||
orderby += ['-rank']
|
||||
else:
|
||||
queryset = queryset.filter(name__icontains=search_string)
|
||||
|
||||
if len(search_keywords) > 0:
|
||||
if search_keywords_or == 'true':
|
||||
if search_keywords_or:
|
||||
# TODO creating setting to include descendants of keywords a setting
|
||||
# for kw in Keyword.objects.filter(pk__in=search_keywords):
|
||||
# search_keywords += list(kw.get_descendants().values_list('pk', flat=True))
|
||||
queryset = queryset.filter(keywords__id__in=search_keywords)
|
||||
else:
|
||||
for k in search_keywords:
|
||||
queryset = queryset.filter(keywords__id=k)
|
||||
# when performing an 'and' search returned recipes should include a parent OR any of its descedants
|
||||
# AND other keywords selected so filters are appended using keyword__id__in the list of keywords and descendants
|
||||
for kw in Keyword.objects.filter(pk__in=search_keywords):
|
||||
queryset = queryset.filter(keywords__id__in=list(kw.get_descendants_and_self().values_list('pk', flat=True)))
|
||||
|
||||
if len(search_foods) > 0:
|
||||
if search_foods_or == 'true':
|
||||
if search_foods_or:
|
||||
# TODO creating setting to include descendants of food a setting
|
||||
queryset = queryset.filter(steps__ingredients__food__id__in=search_foods)
|
||||
else:
|
||||
for k in search_foods:
|
||||
queryset = queryset.filter(steps__ingredients__food__id=k)
|
||||
# when performing an 'and' search returned recipes should include a parent OR any of its descedants
|
||||
# AND other foods selected so filters are appended using steps__ingredients__food__id__in the list of foods and descendants
|
||||
for fd in Food.objects.filter(pk__in=search_foods):
|
||||
queryset = queryset.filter(steps__ingredients__food__id__in=list(fd.get_descendants_and_self().values_list('pk', flat=True)))
|
||||
|
||||
if len(search_books) > 0:
|
||||
if search_books_or == 'true':
|
||||
if search_books_or:
|
||||
queryset = queryset.filter(recipebookentry__book__id__in=search_books)
|
||||
else:
|
||||
for k in search_books:
|
||||
queryset = queryset.filter(recipebookentry__book__id=k)
|
||||
|
||||
queryset = queryset.distinct()
|
||||
if search_rating:
|
||||
queryset = queryset.annotate(rating=Round(Avg(Case(When(cooklog__created_by=request.user, then='cooklog__rating'), default=Value(0)))))
|
||||
if search_rating == -1:
|
||||
queryset = queryset.filter(rating=0)
|
||||
else:
|
||||
queryset = queryset.filter(rating__gte=search_rating)
|
||||
|
||||
if search_internal == 'true':
|
||||
# probably only useful in Unit list view, so keeping it simple
|
||||
if search_units:
|
||||
queryset = queryset.filter(steps__ingredients__unit__id=search_units)
|
||||
|
||||
if search_internal:
|
||||
queryset = queryset.filter(internal=True)
|
||||
|
||||
if search_random == 'true':
|
||||
queryset = queryset.order_by("?")
|
||||
queryset = queryset.distinct()
|
||||
|
||||
if search_random:
|
||||
queryset = queryset.order_by("?")
|
||||
else:
|
||||
queryset = queryset.order_by(*orderby)
|
||||
return queryset
|
||||
|
||||
|
||||
def get_facet(qs=None, request=None, use_cache=True, hash_key=None):
|
||||
"""
|
||||
Gets an annotated list from a queryset.
|
||||
:param qs:
|
||||
|
||||
recipe queryset to build facets from
|
||||
|
||||
:param request:
|
||||
|
||||
the web request that contains the necessary query parameters
|
||||
|
||||
:param use_cache:
|
||||
|
||||
will find results in cache, if any, and return them or empty list.
|
||||
will save the list of recipes IDs in the cache for future processing
|
||||
|
||||
:param hash_key:
|
||||
|
||||
the cache key of the recipe list to process
|
||||
only evaluated if the use_cache parameter is false
|
||||
"""
|
||||
facets = {}
|
||||
recipe_list = []
|
||||
cache_timeout = 600
|
||||
|
||||
if use_cache:
|
||||
qs_hash = hash(frozenset(qs.values_list('pk')))
|
||||
facets['cache_key'] = str(qs_hash)
|
||||
SEARCH_CACHE_KEY = f"recipes_filter_{qs_hash}"
|
||||
if c := caches['default'].get(SEARCH_CACHE_KEY, None):
|
||||
facets['Keywords'] = c['Keywords'] or []
|
||||
facets['Foods'] = c['Foods'] or []
|
||||
facets['Books'] = c['Books'] or []
|
||||
facets['Ratings'] = c['Ratings'] or []
|
||||
facets['Recent'] = c['Recent'] or []
|
||||
else:
|
||||
facets['Keywords'] = []
|
||||
facets['Foods'] = []
|
||||
facets['Books'] = []
|
||||
rating_qs = qs.annotate(rating=Round(Avg(Case(When(cooklog__created_by=request.user, then='cooklog__rating'), default=Value(0)))))
|
||||
facets['Ratings'] = dict(Counter(r.rating for r in rating_qs))
|
||||
facets['Recent'] = ViewLog.objects.filter(
|
||||
created_by=request.user, space=request.space,
|
||||
created_at__gte=timezone.now() - timedelta(days=14) # TODO make days of recent recipe a setting
|
||||
).values_list('recipe__pk', flat=True)
|
||||
|
||||
cached_search = {
|
||||
'recipe_list': list(qs.values_list('id', flat=True)),
|
||||
'keyword_list': request.query_params.getlist('keywords', []),
|
||||
'food_list': request.query_params.getlist('foods', []),
|
||||
'book_list': request.query_params.getlist('book', []),
|
||||
'search_keywords_or': str2bool(request.query_params.get('keywords_or', True)),
|
||||
'search_foods_or': str2bool(request.query_params.get('foods_or', True)),
|
||||
'search_books_or': str2bool(request.query_params.get('books_or', True)),
|
||||
'space': request.space,
|
||||
'Ratings': facets['Ratings'],
|
||||
'Recent': facets['Recent'],
|
||||
'Keywords': facets['Keywords'],
|
||||
'Foods': facets['Foods'],
|
||||
'Books': facets['Books']
|
||||
}
|
||||
caches['default'].set(SEARCH_CACHE_KEY, cached_search, cache_timeout)
|
||||
return facets
|
||||
|
||||
SEARCH_CACHE_KEY = f'recipes_filter_{hash_key}'
|
||||
if c := caches['default'].get(SEARCH_CACHE_KEY, None):
|
||||
recipe_list = c['recipe_list']
|
||||
keyword_list = c['keyword_list']
|
||||
food_list = c['food_list']
|
||||
book_list = c['book_list']
|
||||
search_keywords_or = c['search_keywords_or']
|
||||
search_foods_or = c['search_foods_or']
|
||||
search_books_or = c['search_books_or']
|
||||
else:
|
||||
return {}
|
||||
|
||||
# if using an OR search, will annotate all keywords, otherwise, just those that appear in results
|
||||
if search_keywords_or:
|
||||
keywords = Keyword.objects.filter(space=request.space).annotate(recipe_count=Count('recipe'))
|
||||
else:
|
||||
keywords = Keyword.objects.filter(recipe__in=recipe_list, space=request.space).annotate(recipe_count=Count('recipe'))
|
||||
# custom django-tree function annotates a queryset to make building a tree easier.
|
||||
# see https://django-treebeard.readthedocs.io/en/latest/api.html#treebeard.models.Node.get_annotated_list_qs for details
|
||||
kw_a = annotated_qs(keywords, root=True, fill=True)
|
||||
|
||||
# # if using an OR search, will annotate all keywords, otherwise, just those that appear in results
|
||||
if search_foods_or:
|
||||
foods = Food.objects.filter(space=request.space).annotate(recipe_count=Count('ingredient'))
|
||||
else:
|
||||
foods = Food.objects.filter(ingredient__step__recipe__in=recipe_list, space=request.space).annotate(recipe_count=Count('ingredient'))
|
||||
food_a = annotated_qs(foods, root=True, fill=True)
|
||||
|
||||
# TODO add rating facet
|
||||
facets['Keywords'] = fill_annotated_parents(kw_a, keyword_list)
|
||||
facets['Foods'] = fill_annotated_parents(food_a, food_list)
|
||||
# TODO add book facet
|
||||
facets['Books'] = []
|
||||
c['Keywords'] = facets['Keywords']
|
||||
c['Foods'] = facets['Foods']
|
||||
c['Books'] = facets['Books']
|
||||
caches['default'].set(SEARCH_CACHE_KEY, c, cache_timeout)
|
||||
return facets
|
||||
|
||||
|
||||
def fill_annotated_parents(annotation, filters):
|
||||
tree_list = []
|
||||
parent = []
|
||||
i = 0
|
||||
level = -1
|
||||
for r in annotation:
|
||||
expand = False
|
||||
|
||||
annotation[i][1]['id'] = r[0].id
|
||||
annotation[i][1]['name'] = r[0].name
|
||||
annotation[i][1]['count'] = getattr(r[0], 'recipe_count', 0)
|
||||
annotation[i][1]['isDefaultExpanded'] = False
|
||||
|
||||
if str(r[0].id) in filters:
|
||||
expand = True
|
||||
if r[1]['level'] < level:
|
||||
parent = parent[:r[1]['level'] - level]
|
||||
parent[-1] = i
|
||||
level = r[1]['level']
|
||||
elif r[1]['level'] > level:
|
||||
parent.extend([i])
|
||||
level = r[1]['level']
|
||||
else:
|
||||
parent[-1] = i
|
||||
j = 0
|
||||
|
||||
while j < level:
|
||||
# this causes some double counting when a recipe has both a child and an ancestor
|
||||
annotation[parent[j]][1]['count'] += getattr(r[0], 'recipe_count', 0)
|
||||
if expand:
|
||||
annotation[parent[j]][1]['isDefaultExpanded'] = True
|
||||
j += 1
|
||||
if level == 0:
|
||||
tree_list.append(annotation[i][1])
|
||||
elif level > 0:
|
||||
annotation[parent[level - 1]][1].setdefault('children', []).append(annotation[i][1])
|
||||
i += 1
|
||||
return tree_list
|
||||
|
||||
|
||||
def annotated_qs(qs, root=False, fill=False):
|
||||
"""
|
||||
Gets an annotated list from a queryset.
|
||||
:param root:
|
||||
|
||||
Will backfill in annotation to include all parents to root node.
|
||||
|
||||
:param fill:
|
||||
Will fill in gaps in annotation where nodes between children
|
||||
and ancestors are not included in the queryset.
|
||||
"""
|
||||
|
||||
result, info = [], {}
|
||||
start_depth, prev_depth = (None, None)
|
||||
nodes_list = list(qs.values_list('pk', flat=True))
|
||||
for node in qs.order_by('path'):
|
||||
node_queue = [node]
|
||||
while len(node_queue) > 0:
|
||||
dirty = False
|
||||
current_node = node_queue[-1]
|
||||
depth = current_node.get_depth()
|
||||
# TODO if node is at the wrong depth for some reason this fails
|
||||
# either create a 'fix node' page, or automatically move the node to the root
|
||||
parent_id = current_node.parent
|
||||
if root and depth > 1 and parent_id not in nodes_list:
|
||||
parent_id = current_node.parent
|
||||
nodes_list.append(parent_id)
|
||||
node_queue.append(current_node.__class__.objects.get(pk=parent_id))
|
||||
dirty = True
|
||||
|
||||
if fill and depth > 1 and prev_depth and depth > prev_depth and parent_id not in nodes_list:
|
||||
nodes_list.append(parent_id)
|
||||
node_queue.append(current_node.__class__.objects.get(pk=parent_id))
|
||||
dirty = True
|
||||
|
||||
if not dirty:
|
||||
working_node = node_queue.pop()
|
||||
if start_depth is None:
|
||||
start_depth = depth
|
||||
open = (depth and (prev_depth is None or depth > prev_depth))
|
||||
if prev_depth is not None and depth < prev_depth:
|
||||
info['close'] = list(range(0, prev_depth - depth))
|
||||
info = {'open': open, 'close': [], 'level': depth - start_depth}
|
||||
result.append((working_node, info,))
|
||||
prev_depth = depth
|
||||
if start_depth and start_depth > 0:
|
||||
info['close'] = list(range(0, prev_depth - start_depth + 1))
|
||||
return result
|
||||
|
@ -2,17 +2,15 @@ import random
|
||||
import re
|
||||
from isodate import parse_duration as iso_parse_duration
|
||||
from isodate.isoerror import ISO8601Error
|
||||
from recipe_scrapers._exceptions import ElementNotFoundInHtml
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse as parse_single_ingredient
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.models import Keyword
|
||||
from django.utils.dateparse import parse_duration
|
||||
from html import unescape
|
||||
from recipe_scrapers._schemaorg import SchemaOrgException
|
||||
from recipe_scrapers._utils import get_minutes
|
||||
|
||||
|
||||
def get_from_scraper(scrape, space):
|
||||
def get_from_scraper(scrape, request):
|
||||
# converting the scrape_me object to the existing json format based on ld+json
|
||||
recipe_json = {}
|
||||
try:
|
||||
@ -56,6 +54,7 @@ def get_from_scraper(scrape, space):
|
||||
recipe_json['cookTime'] = get_minutes(scrape.schema.data.get("cookTime")) or 0
|
||||
except Exception:
|
||||
recipe_json['cookTime'] = 0
|
||||
|
||||
if recipe_json['cookTime'] + recipe_json['prepTime'] == 0:
|
||||
try:
|
||||
recipe_json['prepTime'] = get_minutes(scrape.total_time()) or 0
|
||||
@ -92,15 +91,16 @@ def get_from_scraper(scrape, space):
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), space)
|
||||
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), request.space)
|
||||
except AttributeError:
|
||||
recipe_json['keywords'] = keywords
|
||||
|
||||
ingredient_parser = IngredientParser(request, True)
|
||||
try:
|
||||
ingredients = []
|
||||
for x in scrape.ingredients():
|
||||
try:
|
||||
amount, unit, ingredient, note = parse_single_ingredient(x)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(x)
|
||||
ingredients.append(
|
||||
{
|
||||
'amount': amount,
|
||||
|
@ -1,4 +1,3 @@
|
||||
from django.shortcuts import redirect
|
||||
from django.urls import reverse
|
||||
from django_scopes import scope, scopes_disabled
|
||||
|
||||
|
@ -30,7 +30,6 @@ def text_scraper(text, url=None):
|
||||
url=None
|
||||
):
|
||||
self.wild_mode = False
|
||||
# self.exception_handling = None # TODO add new method here, old one was deprecated
|
||||
self.meta_http_equiv = False
|
||||
self.soup = BeautifulSoup(page_data, "html.parser")
|
||||
self.url = url
|
||||
|
@ -6,6 +6,7 @@ from cookbook.helper.mdx_urlize import UrlizeExtension
|
||||
from jinja2 import Template, TemplateSyntaxError, UndefinedError
|
||||
from gettext import gettext as _
|
||||
|
||||
|
||||
class IngredientObject(object):
|
||||
amount = ""
|
||||
unit = ""
|
||||
|
@ -1,10 +1,8 @@
|
||||
import re
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
|
||||
|
||||
class ChefTap(Integration):
|
||||
@ -44,11 +42,12 @@ class ChefTap(Integration):
|
||||
step.instruction += '\n' + source_url
|
||||
step.save()
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
@ -1,12 +1,11 @@
|
||||
import json
|
||||
import re
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient, Keyword
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
|
||||
|
||||
class Chowdown(Integration):
|
||||
@ -51,6 +50,7 @@ class Chowdown(Integration):
|
||||
recipe = Recipe.objects.create(name=title, created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
for k in tags.split(','):
|
||||
print(f'adding keyword {k.strip()}')
|
||||
keyword, created = Keyword.objects.get_or_create(name=k.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
@ -58,10 +58,11 @@ class Chowdown(Integration):
|
||||
instruction='\n'.join(directions) + '\n\n' + '\n'.join(descriptions), space=self.request.space,
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
67
cookbook/integration/cookbookapp.py
Normal file
67
cookbook/integration/cookbookapp.py
Normal file
@ -0,0 +1,67 @@
|
||||
import base64
|
||||
import gzip
|
||||
import json
|
||||
import re
|
||||
from io import BytesIO
|
||||
|
||||
import yaml
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from gettext import gettext as _
|
||||
|
||||
|
||||
class CookBookApp(Integration):
|
||||
|
||||
def import_file_name_filter(self, zip_info_object):
|
||||
return zip_info_object.filename.endswith('.yml')
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_yml = yaml.safe_load(file.getvalue().decode("utf-8"))
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_yml['name'].strip(),
|
||||
created_by=self.request.user, internal=True,
|
||||
space=self.request.space)
|
||||
|
||||
try:
|
||||
recipe.servings = re.findall('([0-9])+', recipe_yml['recipeYield'])[0]
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
try:
|
||||
recipe.working_time = recipe_yml['prep_time'].replace(' minutes', '')
|
||||
recipe.waiting_time = recipe_yml['cook_time'].replace(' minutes', '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if recipe_yml['on_favorites']:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(name=_('Favorites'), space=self.request.space))
|
||||
|
||||
step = Step.objects.create(instruction=recipe_yml['directions'], space=self.request.space, )
|
||||
|
||||
if 'notes' in recipe_yml and recipe_yml['notes'].strip() != '':
|
||||
step.instruction = step.instruction + '\n\n' + recipe_yml['notes']
|
||||
|
||||
if 'nutritional_info' in recipe_yml:
|
||||
step.instruction = step.instruction + '\n\n' + recipe_yml['nutritional_info']
|
||||
|
||||
if 'source' in recipe_yml and recipe_yml['source'].strip() != '':
|
||||
step.instruction = step.instruction + '\n\n' + recipe_yml['source']
|
||||
|
||||
step.save()
|
||||
recipe.steps.add(step)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_yml['ingredients'].split('\n'):
|
||||
if ingredient.strip() != '':
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
||||
recipe.save()
|
||||
return recipe
|
@ -2,7 +2,7 @@ import base64
|
||||
import json
|
||||
from io import BytesIO
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
|
||||
@ -34,11 +34,12 @@ class Domestica(Integration):
|
||||
if file['source'] != '':
|
||||
step.instruction += '\n' + file['source']
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file['ingredients'].split('\n'):
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
@ -1,12 +1,13 @@
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
import uuid
|
||||
from io import BytesIO, StringIO
|
||||
from zipfile import ZipFile, BadZipFile
|
||||
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.files import File
|
||||
from django.db import IntegrityError
|
||||
from django.http import HttpResponse
|
||||
from django.utils.formats import date_format
|
||||
from django.utils.translation import gettext as _
|
||||
@ -15,6 +16,7 @@ from django_scopes import scope
|
||||
from cookbook.forms import ImportExportBase
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.models import Keyword, Recipe
|
||||
from recipes.settings import DATABASES, DEBUG
|
||||
|
||||
|
||||
class Integration:
|
||||
@ -31,10 +33,30 @@ class Integration:
|
||||
"""
|
||||
self.request = request
|
||||
self.export_type = export_type
|
||||
self.keyword = Keyword.objects.create(
|
||||
name=f'Import {export_type} {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}.{datetime.datetime.now().strftime("%S")}',
|
||||
description=f'Imported by {request.user.get_user_name()} at {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}. Type: {export_type}',
|
||||
icon='📥',
|
||||
self.ignored_recipes = []
|
||||
|
||||
description = f'Imported by {request.user.get_user_name()} at {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}. Type: {export_type}'
|
||||
icon = '📥'
|
||||
|
||||
try:
|
||||
last_kw = Keyword.objects.filter(name__regex=r'^(Import [0-9]+)', space=request.space).latest('created_at')
|
||||
name = f'Import {int(last_kw.name.replace("Import ", "")) + 1}'
|
||||
except ObjectDoesNotExist:
|
||||
name = 'Import 1'
|
||||
|
||||
parent, created = Keyword.objects.get_or_create(name='Import', space=request.space)
|
||||
try:
|
||||
self.keyword = parent.add_child(
|
||||
name=name,
|
||||
description=description,
|
||||
icon=icon,
|
||||
space=request.space
|
||||
)
|
||||
except IntegrityError: # in case, for whatever reason, the name does exist append UUID to it. Not nice but works for now.
|
||||
self.keyword = parent.add_child(
|
||||
name=f'{name} {str(uuid.uuid4())[0:8]}',
|
||||
description=description,
|
||||
icon=icon,
|
||||
space=request.space
|
||||
)
|
||||
|
||||
@ -142,9 +164,10 @@ class Integration:
|
||||
il.imported_recipes += 1
|
||||
il.save()
|
||||
except Exception as e:
|
||||
il.msg += f'-------------------- \n ERROR \n{e}\n--------------------\n'
|
||||
traceback.print_exc()
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
import_zip.close()
|
||||
elif '.json' in f['name'] or '.txt' in f['name']:
|
||||
elif '.json' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name']:
|
||||
data_list = self.split_recipe_file(f['file'])
|
||||
il.total_recipes += len(data_list)
|
||||
for d in data_list:
|
||||
@ -156,7 +179,7 @@ class Integration:
|
||||
il.imported_recipes += 1
|
||||
il.save()
|
||||
except Exception as e:
|
||||
il.msg += f'-------------------- \n ERROR \n{e}\n--------------------\n'
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
elif '.rtk' in f['name']:
|
||||
import_zip = ZipFile(f['file'])
|
||||
for z in import_zip.filelist:
|
||||
@ -173,7 +196,7 @@ class Integration:
|
||||
il.imported_recipes += 1
|
||||
il.save()
|
||||
except Exception as e:
|
||||
il.msg += f'-------------------- \n ERROR \n{e}\n--------------------\n'
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
import_zip.close()
|
||||
else:
|
||||
recipe = self.get_recipe_from_file(f['file'])
|
||||
@ -183,9 +206,10 @@ class Integration:
|
||||
except BadZipFile:
|
||||
il.msg += 'ERROR ' + _(
|
||||
'Importer expected a .zip file. Did you choose the correct importer type for your data ?') + '\n'
|
||||
except:
|
||||
il.msg += 'ERROR ' + _(
|
||||
except Exception as e:
|
||||
msg = 'ERROR ' + _(
|
||||
'An unexpected error occurred during the import. Please make sure you have uploaded a valid file.') + '\n'
|
||||
self.handle_exception(e, log=il, message=msg)
|
||||
|
||||
if len(self.ignored_recipes) > 0:
|
||||
il.msg += '\n' + _(
|
||||
@ -204,8 +228,8 @@ class Integration:
|
||||
:param import_duplicates: if duplicates should be imported
|
||||
"""
|
||||
if Recipe.objects.filter(space=self.request.space, name=recipe.name).count() > 1 and not import_duplicates:
|
||||
recipe.delete()
|
||||
self.ignored_recipes.append(recipe.name)
|
||||
recipe.delete()
|
||||
|
||||
@staticmethod
|
||||
def import_recipe_image(recipe, image_file, filetype='.jpeg'):
|
||||
@ -244,3 +268,12 @@ class Integration:
|
||||
- data - string content for file to get created in export zip
|
||||
"""
|
||||
raise NotImplementedError('Method not implemented in integration')
|
||||
|
||||
def handle_exception(self, exception, log=None, message=''):
|
||||
if log:
|
||||
if message:
|
||||
log.msg += message
|
||||
else:
|
||||
log.msg += exception.msg
|
||||
if DEBUG:
|
||||
traceback.print_exc()
|
||||
|
@ -4,9 +4,9 @@ from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
|
||||
|
||||
class Mealie(Integration):
|
||||
@ -24,6 +24,7 @@ class Mealie(Integration):
|
||||
created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
# TODO parse times (given in PT2H3M )
|
||||
# @vabene check recipe_url_import.iso_duration_to_minutes I think it does what you are looking for
|
||||
|
||||
ingredients_added = False
|
||||
for s in recipe_json['recipe_instructions']:
|
||||
@ -36,21 +37,22 @@ class Mealie(Integration):
|
||||
if len(recipe_json['description'].strip()) > 500:
|
||||
step.instruction = recipe_json['description'].strip() + '\n\n' + step.instruction
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['recipe_ingredient']:
|
||||
try:
|
||||
if ingredient['food']:
|
||||
f = get_food(ingredient['food'], self.request.space)
|
||||
u = get_unit(ingredient['unit'], self.request.space)
|
||||
f = ingredient_parser.get_food(ingredient['food'])
|
||||
u = ingredient_parser.get_unit(ingredient['unit'])
|
||||
amount = ingredient['quantity']
|
||||
note = ingredient['note']
|
||||
else:
|
||||
amount, unit, ingredient, note = parse(ingredient['note'])
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient['note'])
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
recipe.steps.add(step)
|
||||
|
||||
@ -59,7 +61,7 @@ class Mealie(Integration):
|
||||
import_zip = ZipFile(f['file'])
|
||||
try:
|
||||
self.import_recipe_image(recipe, BytesIO(import_zip.read(f'recipes/{recipe_json["slug"]}/images/min-original.webp')), filetype=get_filetype(f'recipes/{recipe_json["slug"]}/images/original'))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return recipe
|
||||
|
@ -1,22 +1,17 @@
|
||||
import json
|
||||
import re
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient, Keyword
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
|
||||
|
||||
class MealMaster(Integration):
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
print('------------ getting recipe')
|
||||
servings = 1
|
||||
ingredients = []
|
||||
directions = []
|
||||
for line in file.replace('\r', '').split('\n'):
|
||||
print('testing line')
|
||||
if not line.startswith('MMMMM') and line.strip != '':
|
||||
if 'Title:' in line:
|
||||
title = line.replace('Title:', '').strip()
|
||||
@ -47,11 +42,12 @@ class MealMaster(Integration):
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
@ -4,9 +4,9 @@ from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
|
||||
|
||||
class NextcloudCookbook(Integration):
|
||||
@ -25,6 +25,7 @@ class NextcloudCookbook(Integration):
|
||||
servings=recipe_json['recipeYield'], space=self.request.space)
|
||||
|
||||
# TODO parse times (given in PT2H3M )
|
||||
# @vabene check recipe_url_import.iso_duration_to_minutes I think it does what you are looking for
|
||||
# TODO parse keywords
|
||||
|
||||
ingredients_added = False
|
||||
@ -38,6 +39,7 @@ class NextcloudCookbook(Integration):
|
||||
|
||||
ingredients_added = True
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['recipeIngredient']:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
|
@ -1,11 +1,8 @@
|
||||
import json
|
||||
import re
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
|
||||
|
||||
class OpenEats(Integration):
|
||||
@ -26,9 +23,10 @@ class OpenEats(Integration):
|
||||
|
||||
step = Step.objects.create(instruction=instructions, space=self.request.space,)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file['ingredients']:
|
||||
f = get_food(ingredient['food'], self.request.space)
|
||||
u = get_unit(ingredient['unit'], self.request.space)
|
||||
f = ingredient_parser.get_food(ingredient['food'])
|
||||
u = ingredient_parser.get_unit(ingredient['unit'])
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=ingredient['amount'], space=self.request.space,
|
||||
))
|
||||
|
@ -4,7 +4,7 @@ import json
|
||||
import re
|
||||
from io import BytesIO
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from gettext import gettext as _
|
||||
@ -58,7 +58,7 @@ class Paprika(Integration):
|
||||
instruction=instructions, space=self.request.space,
|
||||
)
|
||||
|
||||
if len(recipe_json['description'].strip()) > 500:
|
||||
if 'description' in recipe_json and len(recipe_json['description'].strip()) > 500:
|
||||
step.instruction = recipe_json['description'].strip() + '\n\n' + step.instruction
|
||||
|
||||
if 'categories' in recipe_json:
|
||||
@ -66,12 +66,13 @@ class Paprika(Integration):
|
||||
keyword, created = Keyword.objects.get_or_create(name=c.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
try:
|
||||
for ingredient in recipe_json['ingredients'].split('\n'):
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
@ -1,11 +1,6 @@
|
||||
import json
|
||||
import re
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient, Keyword
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
|
||||
|
||||
class Pepperplate(Integration):
|
||||
@ -43,11 +38,12 @@ class Pepperplate(Integration):
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
94
cookbook/integration/plantoeat.py
Normal file
94
cookbook/integration/plantoeat.py
Normal file
@ -0,0 +1,94 @@
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
|
||||
|
||||
class Plantoeat(Integration):
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
ingredient_mode = False
|
||||
direction_mode = False
|
||||
|
||||
image_url = None
|
||||
tags = None
|
||||
ingredients = []
|
||||
directions = []
|
||||
description = ''
|
||||
for line in file.replace('\r', '').split('\n'):
|
||||
if line.strip() != '':
|
||||
if 'Title:' in line:
|
||||
title = line.replace('Title:', '').replace('"', '').strip()
|
||||
if 'Description:' in line:
|
||||
description = line.replace('Description:', '').strip()
|
||||
if 'Source:' in line or 'Serves:' in line or 'Prep Time:' in line or 'Cook Time:' in line:
|
||||
directions.append(line.strip() + '\n')
|
||||
if 'Photo Url:' in line:
|
||||
image_url = line.replace('Photo Url:', '').strip()
|
||||
if 'Tags:' in line:
|
||||
tags = line.replace('Tags:', '').strip()
|
||||
if ingredient_mode:
|
||||
if len(line) > 2 and 'Instructions:' not in line:
|
||||
ingredients.append(line.strip())
|
||||
if direction_mode:
|
||||
if len(line) > 2:
|
||||
directions.append(line.strip() + '\n')
|
||||
if 'Ingredients:' in line:
|
||||
ingredient_mode = True
|
||||
if 'Directions:' in line:
|
||||
ingredient_mode = False
|
||||
direction_mode = True
|
||||
|
||||
recipe = Recipe.objects.create(name=title, description=description, created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
|
||||
)
|
||||
|
||||
if tags:
|
||||
for k in tags.split(','):
|
||||
keyword, created = Keyword.objects.get_or_create(name=k.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
if image_url:
|
||||
try:
|
||||
response = requests.get(image_url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
return recipe
|
||||
|
||||
def split_recipe_file(self, file):
|
||||
recipe_list = []
|
||||
current_recipe = ''
|
||||
|
||||
for fl in file.readlines():
|
||||
line = fl.decode("ANSI")
|
||||
if line.startswith('--------------'):
|
||||
if current_recipe != '':
|
||||
recipe_list.append(current_recipe)
|
||||
current_recipe = ''
|
||||
else:
|
||||
current_recipe = ''
|
||||
else:
|
||||
current_recipe += line + '\n'
|
||||
|
||||
if current_recipe != '':
|
||||
recipe_list.append(current_recipe)
|
||||
|
||||
return recipe_list
|
@ -1,16 +1,14 @@
|
||||
import re
|
||||
import json
|
||||
import base64
|
||||
import requests
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
import imghdr
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient, Keyword
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
|
||||
|
||||
class RecetteTek(Integration):
|
||||
@ -57,11 +55,12 @@ class RecetteTek(Integration):
|
||||
|
||||
try:
|
||||
# Process the ingredients. Assumes 1 ingredient per line.
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file['ingredients'].split('\n'):
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
@ -108,7 +107,7 @@ class RecetteTek(Integration):
|
||||
recipe.keywords.add(k)
|
||||
recipe.save()
|
||||
except Exception as e:
|
||||
pass
|
||||
print(recipe.name, ': failed to parse keywords ', str(e))
|
||||
|
||||
# TODO: Parse Nutritional Information
|
||||
|
||||
@ -123,7 +122,7 @@ class RecetteTek(Integration):
|
||||
else:
|
||||
if file['originalPicture'] != '':
|
||||
response = requests.get(file['originalPicture'])
|
||||
if imghdr.what(BytesIO(response.content)) != None:
|
||||
if imghdr.what(BytesIO(response.content)) is not None:
|
||||
self.import_recipe_image(recipe, BytesIO(response.content), filetype=get_filetype(file['originalPicture']))
|
||||
else:
|
||||
raise Exception("Original image failed to download.")
|
||||
|
@ -3,12 +3,10 @@ from bs4 import BeautifulSoup
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, iso_duration_to_minutes
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient, Keyword
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
|
||||
|
||||
class RecipeKeeper(Integration):
|
||||
@ -43,12 +41,13 @@ class RecipeKeeper(Integration):
|
||||
|
||||
step = Step.objects.create(instruction='', space=self.request.space,)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file.find("div", {"itemprop": "recipeIngredients"}).findChildren("p"):
|
||||
if ingredient.text == "":
|
||||
continue
|
||||
amount, unit, ingredient, note = parse(ingredient.text.strip())
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
@ -61,7 +60,6 @@ class RecipeKeeper(Integration):
|
||||
if file.find("span", {"itemprop": "recipeSource"}).text != '':
|
||||
step.instruction += "\n\nImported from: " + file.find("span", {"itemprop": "recipeSource"}).text
|
||||
step.save()
|
||||
source_url_added = True
|
||||
|
||||
recipe.steps.add(step)
|
||||
|
||||
@ -72,7 +70,7 @@ class RecipeKeeper(Integration):
|
||||
import_zip = ZipFile(f['file'])
|
||||
self.import_recipe_image(recipe, BytesIO(import_zip.read(file.find("img", class_="recipe-photo").get("src"))), filetype='.jpeg')
|
||||
except Exception as e:
|
||||
pass
|
||||
print(recipe.name, ': failed to import image ', str(e))
|
||||
|
||||
return recipe
|
||||
|
||||
|
@ -1,11 +1,9 @@
|
||||
import base64
|
||||
import json
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
|
||||
@ -33,6 +31,7 @@ class RecipeSage(Integration):
|
||||
except Exception as e:
|
||||
print('failed to parse yield or time ', str(e))
|
||||
|
||||
ingredient_parser = IngredientParser(self.request,True)
|
||||
ingredients_added = False
|
||||
for s in file['recipeInstructions']:
|
||||
step = Step.objects.create(
|
||||
@ -42,9 +41,9 @@ class RecipeSage(Integration):
|
||||
ingredients_added = True
|
||||
|
||||
for ingredient in file['recipeIngredient']:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
@ -1,11 +1,6 @@
|
||||
import json
|
||||
import re
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient, Keyword
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
|
||||
|
||||
class RezKonv(Integration):
|
||||
@ -46,11 +41,12 @@ class RezKonv(Integration):
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
@ -1,8 +1,8 @@
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.ingredient_parser import parse, get_food, get_unit
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Food, Unit, Ingredient
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
|
||||
|
||||
class Safron(Integration):
|
||||
@ -43,12 +43,13 @@ class Safron(Integration):
|
||||
|
||||
recipe = Recipe.objects.create(name=title, description=description, created_by=self.request.user, internal=True, space=self.request.space, )
|
||||
|
||||
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space,)
|
||||
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space, )
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
amount, unit, ingredient, note = parse(ingredient)
|
||||
f = get_food(ingredient, self.request.space)
|
||||
u = get_unit(unit, self.request.space)
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -11,7 +11,7 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-02-09 18:01+0100\n"
|
||||
"PO-Revision-Date: 2021-04-12 20:22+0000\n"
|
||||
"PO-Revision-Date: 2021-10-13 12:50+0000\n"
|
||||
"Last-Translator: Hrachya Kocharyan <hkocharyan@ctemplar.com>\n"
|
||||
"Language-Team: Armenian <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/hy/>\n"
|
||||
@ -20,7 +20,7 @@ msgstr ""
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
|
||||
"X-Generator: Weblate 4.5.3\n"
|
||||
"X-Generator: Weblate 4.8\n"
|
||||
|
||||
#: .\cookbook\filters.py:22 .\cookbook\templates\base.html:87
|
||||
#: .\cookbook\templates\forms\edit_internal_recipe.html:219
|
||||
@ -79,7 +79,7 @@ msgid ""
|
||||
"mobile data. If lower than instance limit it is reset when saving."
|
||||
msgstr ""
|
||||
"0-ն կանջատի ավտոմատ սինքրոնացումը։ Գնումների ցուցակը թարմացվում է "
|
||||
"յուրաքանչյուր սահմանված վարկյանը մեկ, մեկ ուրիշի կատարած փոփոխությունները "
|
||||
"յուրաքանչյուր սահմանված վարկյանը մեկ, ուրիշի կատարած փոփոխությունները "
|
||||
"սինքրոնացնելու համար։ Հարմար է, երբ մեկից ավել մարդ է կատարում գնումները, "
|
||||
"բայց կարող է օգտագործել բջջային ինտերնետ։"
|
||||
|
||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
31
cookbook/management/commands/rebuildindex.py
Normal file
31
cookbook/management/commands/rebuildindex.py
Normal file
@ -0,0 +1,31 @@
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.search import SearchVector
|
||||
from django.core.management.base import BaseCommand
|
||||
from django_scopes import scopes_disabled
|
||||
from django.utils import translation
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from cookbook.managers import DICTIONARY
|
||||
from cookbook.models import Recipe, Step
|
||||
|
||||
|
||||
# can be executed at the command line with 'python manage.py rebuildindex'
|
||||
class Command(BaseCommand):
|
||||
help = _('Rebuilds full text search index on Recipe')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if settings.DATABASES['default']['ENGINE'] not in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']:
|
||||
self.stdout.write(self.style.WARNING(_('Only Postgress databases use full text search, no index to rebuild')))
|
||||
|
||||
try:
|
||||
language = DICTIONARY.get(translation.get_language(), 'simple')
|
||||
with scopes_disabled():
|
||||
Recipe.objects.all().update(
|
||||
name_search_vector=SearchVector('name__unaccent', weight='A', config=language),
|
||||
desc_search_vector=SearchVector('description__unaccent', weight='B', config=language)
|
||||
)
|
||||
Step.objects.all().update(search_vector=SearchVector('instruction__unaccent', weight='B', config=language))
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(_('Recipe index rebuild complete.')))
|
||||
except Exception:
|
||||
self.stdout.write(self.style.ERROR(_('Recipe index rebuild failed.')))
|
69
cookbook/managers.py
Normal file
69
cookbook/managers.py
Normal file
@ -0,0 +1,69 @@
|
||||
from django.contrib.postgres.aggregates import StringAgg
|
||||
from django.contrib.postgres.search import (
|
||||
SearchQuery, SearchRank, SearchVector,
|
||||
)
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils import translation
|
||||
|
||||
DICTIONARY = {
|
||||
# TODO find custom dictionaries - maybe from here https://www.postgresql.org/message-id/CAF4Au4x6X_wSXFwsQYE8q5o0aQZANrvYjZJ8uOnsiHDnOVPPEg%40mail.gmail.com
|
||||
# 'hy': 'Armenian',
|
||||
# 'ca': 'Catalan',
|
||||
# 'cs': 'Czech',
|
||||
'nl': 'dutch',
|
||||
'en': 'english',
|
||||
'fr': 'french',
|
||||
'de': 'german',
|
||||
'it': 'italian',
|
||||
# 'lv': 'Latvian',
|
||||
'es': 'spanish',
|
||||
}
|
||||
|
||||
|
||||
# TODO add schedule index rebuild
|
||||
class RecipeSearchManager(models.Manager):
|
||||
def search(self, search_text, space):
|
||||
language = DICTIONARY.get(translation.get_language(), 'simple')
|
||||
search_query = SearchQuery(
|
||||
search_text,
|
||||
config=language,
|
||||
search_type="websearch"
|
||||
)
|
||||
search_vectors = (
|
||||
SearchVector('search_vector')
|
||||
+ SearchVector(StringAgg('steps__ingredients__food__name__unaccent', delimiter=' '), weight='B', config=language)
|
||||
+ SearchVector(StringAgg('keywords__name__unaccent', delimiter=' '), weight='B', config=language))
|
||||
search_rank = SearchRank(search_vectors, search_query)
|
||||
# USING TRIGRAM BREAKS WEB SEARCH
|
||||
# ADDING MULTIPLE TRIGRAMS CREATES DUPLICATE RESULTS
|
||||
# DISTINCT NOT COMPAITBLE WITH ANNOTATE
|
||||
# trigram_name = (TrigramSimilarity('name', search_text))
|
||||
# trigram_description = (TrigramSimilarity('description', search_text))
|
||||
# trigram_food = (TrigramSimilarity('steps__ingredients__food__name', search_text))
|
||||
# trigram_keyword = (TrigramSimilarity('keywords__name', search_text))
|
||||
# adding additional trigrams created duplicates
|
||||
# + TrigramSimilarity('description', search_text)
|
||||
# + TrigramSimilarity('steps__ingredients__food__name', search_text)
|
||||
# + TrigramSimilarity('keywords__name', search_text)
|
||||
return (
|
||||
self.get_queryset()
|
||||
.annotate(
|
||||
search=search_vectors,
|
||||
rank=search_rank,
|
||||
# trigram=trigram_name+trigram_description+trigram_food+trigram_keyword
|
||||
# trigram_name=trigram_name,
|
||||
# trigram_description=trigram_description,
|
||||
# trigram_food=trigram_food,
|
||||
# trigram_keyword=trigram_keyword
|
||||
)
|
||||
.filter(
|
||||
Q(search=search_query)
|
||||
# | Q(trigram_name__gt=0.1)
|
||||
# | Q(name__icontains=search_text)
|
||||
# | Q(trigram_name__gt=0.2)
|
||||
# | Q(trigram_description__gt=0.2)
|
||||
# | Q(trigram_food__gt=0.2)
|
||||
# | Q(trigram_keyword__gt=0.2)
|
||||
)
|
||||
.order_by('-rank'))
|
@ -18,6 +18,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='userpreference',
|
||||
name='use_fractions',
|
||||
field=models.BooleanField(default=False),
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
|
110
cookbook/migrations/0143_build_full_text_index.py
Normal file
110
cookbook/migrations/0143_build_full_text_index.py
Normal file
@ -0,0 +1,110 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-07 20:00
|
||||
import annoying.fields
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.contrib.postgres.search import SearchVectorField, SearchVector
|
||||
from django.db import migrations, models
|
||||
from django.db.models import deletion
|
||||
from django_scopes import scopes_disabled
|
||||
from django.utils import translation
|
||||
from cookbook.managers import DICTIONARY
|
||||
from cookbook.models import Recipe, Step, Index, PermissionModelMixin, nameSearchField, allSearchFields
|
||||
|
||||
|
||||
def set_default_search_vector(apps, schema_editor):
|
||||
if settings.DATABASES['default']['ENGINE'] not in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']:
|
||||
return
|
||||
language = DICTIONARY.get(translation.get_language(), 'simple')
|
||||
with scopes_disabled():
|
||||
# TODO this approach doesn't work terribly well if multiple languages are in use
|
||||
# I'm also uncertain about forcing unaccent here
|
||||
Recipe.objects.all().update(
|
||||
name_search_vector=SearchVector('name__unaccent', weight='A', config=language),
|
||||
desc_search_vector=SearchVector('description__unaccent', weight='B', config=language)
|
||||
)
|
||||
Step.objects.all().update(search_vector=SearchVector('instruction__unaccent', weight='B'))
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('cookbook', '0142_alter_userpreference_search_style'),
|
||||
]
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='recipe',
|
||||
name='desc_search_vector',
|
||||
field=SearchVectorField(null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='recipe',
|
||||
name='name_search_vector',
|
||||
field=SearchVectorField(null=True),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='recipe',
|
||||
index=GinIndex(fields=['name_search_vector', 'desc_search_vector'], name='cookbook_re_name_se_bdf3ca_gin'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='step',
|
||||
name='search_vector',
|
||||
field=SearchVectorField(null=True),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='step',
|
||||
index=GinIndex(fields=['search_vector'], name='cookbook_st_search__2ef7fa_gin'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='cooklog',
|
||||
index=Index(fields=['id', 'recipe', '-created_at', 'rating'], name='cookbook_co_id_37485a_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='food',
|
||||
index=Index(fields=['id', 'name'], name='cookbook_fo_id_22b733_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='ingredient',
|
||||
index=Index(fields=['id', 'food', 'unit'], name='cookbook_in_id_3368be_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='keyword',
|
||||
index=Index(fields=['id', 'name'], name='cookbook_ke_id_ebc03f_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='recipe',
|
||||
index=Index(fields=['id', 'name', 'description'], name='cookbook_re_id_e4c2d4_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='recipebook',
|
||||
index=Index(fields=['name', 'description'], name='cookbook_re_name_bbe446_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='viewlog',
|
||||
index=Index(fields=['recipe', '-created_at'], name='cookbook_vi_recipe__5cd178_idx'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SearchFields',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=32, unique=True)),
|
||||
('field', models.CharField(max_length=64, unique=True)),
|
||||
],
|
||||
bases=(models.Model, PermissionModelMixin),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SearchPreference',
|
||||
fields=[
|
||||
('user', annoying.fields.AutoOneToOneField(on_delete=deletion.CASCADE, primary_key=True, serialize=False, to='auth.user')),
|
||||
('search', models.CharField(choices=[('plain', 'Simple'), ('phrase', 'Phrase'), ('websearch', 'Web'), ('raw', 'Raw')], default='plain', max_length=32)),
|
||||
('lookup', models.BooleanField(default=False)),
|
||||
('fulltext', models.ManyToManyField(blank=True, related_name='fulltext_fields', to='cookbook.SearchFields')),
|
||||
('icontains', models.ManyToManyField(blank=True, default=nameSearchField, related_name='icontains_fields', to='cookbook.SearchFields')),
|
||||
('istartswith', models.ManyToManyField(blank=True, related_name='istartswith_fields', to='cookbook.SearchFields')),
|
||||
('trigram', models.ManyToManyField(blank=True, related_name='trigram_fields', to='cookbook.SearchFields')),
|
||||
('unaccent', models.ManyToManyField(blank=True, default=allSearchFields, related_name='unaccent_fields', to='cookbook.SearchFields')),
|
||||
],
|
||||
bases=(models.Model, PermissionModelMixin),
|
||||
),
|
||||
migrations.RunPython(
|
||||
set_default_search_vector
|
||||
),
|
||||
]
|
23
cookbook/migrations/0144_create_searchfields.py
Normal file
23
cookbook/migrations/0144_create_searchfields.py
Normal file
@ -0,0 +1,23 @@
|
||||
from cookbook.models import SearchFields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def create_searchfields(apps, schema_editor):
|
||||
SearchFields.objects.create(name='Name', field='name')
|
||||
SearchFields.objects.create(name='Description', field='description')
|
||||
SearchFields.objects.create(name='Instructions', field='steps__instruction')
|
||||
SearchFields.objects.create(name='Ingredients', field='steps__ingredients__food__name')
|
||||
SearchFields.objects.create(name='Keywords', field='keywords__name')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0143_build_full_text_index'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
create_searchfields
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2 on 2021-04-22 21:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0144_create_searchfields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='userpreference',
|
||||
name='search_style',
|
||||
field=models.CharField(choices=[('SMALL', 'Small'), ('LARGE', 'Large'), ('NEW', 'New')], default='LARGE', max_length=64),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.4 on 2021-07-03 08:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0145_alter_userpreference_search_style'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='userpreference',
|
||||
name='use_fractions',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
70
cookbook/migrations/0147_keyword_to_tree.py
Normal file
70
cookbook/migrations/0147_keyword_to_tree.py
Normal file
@ -0,0 +1,70 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-30 19:42
|
||||
|
||||
from treebeard.mp_tree import MP_Node
|
||||
from django.db import migrations, models
|
||||
from django_scopes import scopes_disabled
|
||||
# update if needed
|
||||
steplen = MP_Node.steplen
|
||||
alphabet = MP_Node.alphabet
|
||||
node_order_by = ["name"]
|
||||
|
||||
|
||||
def update_paths(apps, schema_editor):
|
||||
with scopes_disabled():
|
||||
Node = apps.get_model("cookbook", "Keyword")
|
||||
nodes = Node.objects.all().order_by(*node_order_by)
|
||||
for i, node in enumerate(nodes, 1):
|
||||
# for default values, this resolves to: "{:04d}".format(i)
|
||||
node.path = f"{{:{alphabet[0]}{steplen}d}}".format(i)
|
||||
if nodes:
|
||||
Node.objects.bulk_update(nodes, ["path"])
|
||||
|
||||
|
||||
def backwards(apps, schema_editor):
|
||||
"""nothing to do"""
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0146_alter_userpreference_use_fractions'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='keyword',
|
||||
name='depth',
|
||||
field=models.PositiveIntegerField(default=1),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='keyword',
|
||||
name='numchild',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='keyword',
|
||||
name='path',
|
||||
field=models.CharField(default="", max_length=255, unique=False),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='userpreference',
|
||||
name='use_fractions',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.RunPython(update_paths, backwards),
|
||||
migrations.AlterField(
|
||||
model_name="keyword",
|
||||
name="path",
|
||||
field=models.CharField(max_length=255, unique=True),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='keyword',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='keyword',
|
||||
constraint=models.UniqueConstraint(fields=('space', 'name'), name='unique_name_per_space'),
|
||||
),
|
||||
]
|
66
cookbook/migrations/0148_auto_20210813_1829.py
Normal file
66
cookbook/migrations/0148_auto_20210813_1829.py
Normal file
@ -0,0 +1,66 @@
|
||||
# Generated by Django 3.2.5 on 2021-08-13 16:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0147_keyword_to_tree'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name='keyword',
|
||||
name='unique_name_per_space',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='userpreference',
|
||||
name='use_fractions',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='food',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='recipebookentry',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='supermarket',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='supermarketcategory',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='unit',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='food',
|
||||
constraint=models.UniqueConstraint(fields=('space', 'name'), name='f_unique_name_per_space'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='keyword',
|
||||
constraint=models.UniqueConstraint(fields=('space', 'name'), name='kw_unique_name_per_space'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='recipebookentry',
|
||||
constraint=models.UniqueConstraint(fields=('recipe', 'book'), name='rbe_unique_name_per_space'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='supermarket',
|
||||
constraint=models.UniqueConstraint(fields=('space', 'name'), name='sm_unique_name_per_space'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='supermarketcategory',
|
||||
constraint=models.UniqueConstraint(fields=('space', 'name'), name='smc_unique_name_per_space'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='unit',
|
||||
constraint=models.UniqueConstraint(fields=('space', 'name'), name='u_unique_name_per_space'),
|
||||
),
|
||||
]
|
31
cookbook/migrations/0149_fix_leading_trailing_spaces.py
Normal file
31
cookbook/migrations/0149_fix_leading_trailing_spaces.py
Normal file
@ -0,0 +1,31 @@
|
||||
from django.db import migrations, models
|
||||
from django_scopes import scopes_disabled
|
||||
models = ["Keyword", "Food", "Unit"]
|
||||
|
||||
def update_paths(apps, schema_editor):
|
||||
with scopes_disabled():
|
||||
for model in models:
|
||||
Node = apps.get_model("cookbook", model)
|
||||
nodes = Node.objects.all().filter(name__startswith=" ")
|
||||
for i in nodes:
|
||||
i.name = "_" + i.name
|
||||
i.save()
|
||||
nodes = Node.objects.all().filter(name__endswith=" ")
|
||||
for i in nodes:
|
||||
i.name = i.name + "_"
|
||||
i.save()
|
||||
|
||||
|
||||
def backwards(apps, schema_editor):
|
||||
"""nothing to do"""
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0148_auto_20210813_1829'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_paths, backwards),
|
||||
]
|
57
cookbook/migrations/0150_food_to_tree.py
Normal file
57
cookbook/migrations/0150_food_to_tree.py
Normal file
@ -0,0 +1,57 @@
|
||||
# Generated by Django 3.2.5 on 2021-08-14 15:40
|
||||
|
||||
from treebeard.mp_tree import MP_Node
|
||||
from django.db import migrations, models
|
||||
from django_scopes import scopes_disabled
|
||||
# update if needed
|
||||
steplen = MP_Node.steplen
|
||||
alphabet = MP_Node.alphabet
|
||||
node_order_by = ["name"]
|
||||
|
||||
|
||||
def update_paths(apps, schema_editor):
|
||||
with scopes_disabled():
|
||||
Node = apps.get_model("cookbook", "Food")
|
||||
nodes = Node.objects.all().order_by(*node_order_by)
|
||||
for i, node in enumerate(nodes, 1):
|
||||
# for default values, this resolves to: "{:04d}".format(i)
|
||||
node.path = f"{{:{alphabet[0]}{steplen}d}}".format(i)
|
||||
if nodes:
|
||||
Node.objects.bulk_update(nodes, ["path"])
|
||||
|
||||
|
||||
def backwards(apps, schema_editor):
|
||||
"""nothing to do"""
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0149_fix_leading_trailing_spaces'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='food',
|
||||
name='depth',
|
||||
field=models.PositiveIntegerField(default=1),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='food',
|
||||
name='numchild',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='food',
|
||||
name='path',
|
||||
field=models.CharField(default=0, max_length=255, unique=False),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.RunPython(update_paths, backwards),
|
||||
migrations.AlterField(
|
||||
model_name="food",
|
||||
name="path",
|
||||
field=models.CharField(max_length=255, unique=True),
|
||||
),
|
||||
]
|
40
cookbook/migrations/0151_auto_20210915_1037.py
Normal file
40
cookbook/migrations/0151_auto_20210915_1037.py
Normal file
@ -0,0 +1,40 @@
|
||||
# Generated by Django 3.2.7 on 2021-09-15 08:37
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0150_food_to_tree'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveIndex(
|
||||
model_name='cooklog',
|
||||
name='cookbook_co_id_37485a_idx',
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name='viewlog',
|
||||
name='cookbook_vi_recipe__5cd178_idx',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='ingredient',
|
||||
name='food',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='cookbook.food'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='userpreference',
|
||||
name='search_style',
|
||||
field=models.CharField(choices=[('SMALL', 'Small'), ('LARGE', 'Large'), ('NEW', 'New')], default='NEW', max_length=64),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='cooklog',
|
||||
index=models.Index(fields=['id', 'recipe', '-created_at', 'rating', 'created_by'], name='cookbook_co_id_93d841_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='viewlog',
|
||||
index=models.Index(fields=['recipe', '-created_at', 'created_by'], name='cookbook_vi_recipe__1b051f_idx'),
|
||||
),
|
||||
]
|
35
cookbook/migrations/0152_automation.py
Normal file
35
cookbook/migrations/0152_automation.py
Normal file
@ -0,0 +1,35 @@
|
||||
# Generated by Django 3.2.7 on 2021-09-15 10:12
|
||||
|
||||
import cookbook.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('cookbook', '0151_auto_20210915_1037'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Automation',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('type', models.CharField(choices=[('FOOD_ALIAS', 'Food Alias'), ('UNIT_ALIAS', 'Unit Alias'), ('KEYWORD_ALIAS', 'Keyword Alias')], max_length=128)),
|
||||
('name', models.CharField(default='', max_length=128)),
|
||||
('description', models.TextField(blank=True, null=True)),
|
||||
('param_1', models.CharField(blank=True, max_length=128, null=True)),
|
||||
('param_2', models.CharField(blank=True, max_length=128, null=True)),
|
||||
('param_3', models.CharField(blank=True, max_length=128, null=True)),
|
||||
('disabled', models.BooleanField(default=False)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
('space', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
|
||||
],
|
||||
bases=(models.Model, cookbook.models.PermissionModelMixin),
|
||||
),
|
||||
]
|
106
cookbook/migrations/0153_auto_20210915_2327.py
Normal file
106
cookbook/migrations/0153_auto_20210915_2327.py
Normal file
@ -0,0 +1,106 @@
|
||||
# Generated by Django 3.2.7 on 2021-09-15 21:27
|
||||
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0152_automation'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveIndex(
|
||||
model_name='cooklog',
|
||||
name='cookbook_co_id_93d841_idx',
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name='food',
|
||||
name='cookbook_fo_id_22b733_idx',
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name='ingredient',
|
||||
name='cookbook_in_id_3368be_idx',
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name='recipe',
|
||||
name='cookbook_re_name_se_bdf3ca_gin',
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name='recipe',
|
||||
name='cookbook_re_id_e4c2d4_idx',
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name='recipebook',
|
||||
name='cookbook_re_name_bbe446_idx',
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='cooklog',
|
||||
index=models.Index(fields=['id'], name='cookbook_co_id_553a6d_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='cooklog',
|
||||
index=models.Index(fields=['recipe'], name='cookbook_co_recipe__8ec719_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='cooklog',
|
||||
index=models.Index(fields=['-created_at'], name='cookbook_co_created_f6e244_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='cooklog',
|
||||
index=models.Index(fields=['rating'], name='cookbook_co_rating_aa7662_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='cooklog',
|
||||
index=models.Index(fields=['created_by'], name='cookbook_co_created_7ea086_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='cooklog',
|
||||
index=models.Index(fields=['created_by', 'rating'], name='cookbook_co_created_f5ccd7_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='food',
|
||||
index=models.Index(fields=['id'], name='cookbook_fo_id_3c379b_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='food',
|
||||
index=models.Index(fields=['name'], name='cookbook_fo_name_c848b6_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='ingredient',
|
||||
index=models.Index(fields=['id'], name='cookbook_in_id_2c1f57_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='recipe',
|
||||
index=django.contrib.postgres.indexes.GinIndex(fields=['name_search_vector'], name='cookbook_re_name_se_5dbbd5_gin'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='recipe',
|
||||
index=django.contrib.postgres.indexes.GinIndex(fields=['desc_search_vector'], name='cookbook_re_desc_se_fdee30_gin'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='recipe',
|
||||
index=models.Index(fields=['id'], name='cookbook_re_id_b2bdcf_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='recipe',
|
||||
index=models.Index(fields=['name'], name='cookbook_re_name_b8a027_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='recipebook',
|
||||
index=models.Index(fields=['name'], name='cookbook_re_name_94cc63_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='viewlog',
|
||||
index=models.Index(fields=['recipe'], name='cookbook_vi_recipe__ce995d_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='viewlog',
|
||||
index=models.Index(fields=['-created_at'], name='cookbook_vi_created_bd2b5f_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='viewlog',
|
||||
index=models.Index(fields=['created_by'], name='cookbook_vi_created_f9385c_idx'),
|
||||
),
|
||||
]
|
23
cookbook/migrations/0154_auto_20210922_1705.py
Normal file
23
cookbook/migrations/0154_auto_20210922_1705.py
Normal file
@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.7 on 2021-09-22 15:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0153_auto_20210915_2327'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='mealtype',
|
||||
name='color',
|
||||
field=models.CharField(blank=True, max_length=7, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='mealtype',
|
||||
name='icon',
|
||||
field=models.CharField(blank=True, max_length=16, null=True),
|
||||
),
|
||||
]
|
18
cookbook/migrations/0155_mealtype_default.py
Normal file
18
cookbook/migrations/0155_mealtype_default.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.7 on 2021-09-23 11:38
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0154_auto_20210922_1705'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='mealtype',
|
||||
name='default',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.7 on 2021-09-28 16:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0155_mealtype_default'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='searchpreference',
|
||||
name='trigram_threshold',
|
||||
field=models.DecimalField(decimal_places=2, default=0.1, max_digits=3),
|
||||
),
|
||||
]
|
33
cookbook/migrations/0157_alter_searchpreference_trigram.py
Normal file
33
cookbook/migrations/0157_alter_searchpreference_trigram.py
Normal file
@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.7 on 2021-09-29 06:37
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from django.db import migrations, models
|
||||
from cookbook.models import nameSearchField
|
||||
|
||||
|
||||
def add_default_trigram(apps, schema_editor):
|
||||
with scopes_disabled():
|
||||
SearchFields = apps.get_model('cookbook', 'SearchFields')
|
||||
SearchPreference = apps.get_model('cookbook', 'SearchPreference')
|
||||
|
||||
name_field = SearchFields.objects.get(name='Name')
|
||||
|
||||
for p in SearchPreference.objects.all():
|
||||
if not p.trigram.all() and p.search == 'plain':
|
||||
p.trigram.add(name_field)
|
||||
p.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('cookbook', '0156_searchpreference_trigram_threshold'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='searchpreference',
|
||||
name='trigram',
|
||||
field=models.ManyToManyField(blank=True, default=nameSearchField, related_name='trigram_fields', to='cookbook.SearchFields'),
|
||||
),
|
||||
migrations.RunPython(add_default_trigram),
|
||||
]
|
@ -7,16 +7,19 @@ from datetime import date, timedelta
|
||||
from annoying.fields import AutoOneToOneField
|
||||
from django.contrib import auth
|
||||
from django.contrib.auth.models import Group, User
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.contrib.postgres.search import SearchVectorField
|
||||
from django.core.files.uploadedfile import UploadedFile, InMemoryUploadedFile
|
||||
from django.core.validators import MinLengthValidator
|
||||
from django.db import models
|
||||
from django.db import models, IntegrityError
|
||||
from django.db.models import Index, ProtectedError
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext as _
|
||||
from treebeard.mp_tree import MP_Node, MP_NodeManager
|
||||
from django_scopes import ScopedManager, scopes_disabled
|
||||
from django_prometheus.models import ExportModelOperationsMixin
|
||||
from django_scopes import ScopedManager
|
||||
|
||||
from recipes.settings import (COMMENT_PREF_DEFAULT, FRACTION_PREF_DEFAULT,
|
||||
STICKY_NAV_PREF_DEFAULT)
|
||||
STICKY_NAV_PREF_DEFAULT, SORT_TREE_BY_NAME)
|
||||
|
||||
|
||||
def get_user_name(self):
|
||||
@ -33,8 +36,82 @@ def get_model_name(model):
|
||||
return ('_'.join(re.findall('[A-Z][^A-Z]*', model.__name__))).lower()
|
||||
|
||||
|
||||
class PermissionModelMixin:
|
||||
class TreeManager(MP_NodeManager):
|
||||
# model.Manager get_or_create() is not compatible with MP_Tree
|
||||
def get_or_create(self, **kwargs):
|
||||
kwargs['name'] = kwargs['name'].strip()
|
||||
try:
|
||||
return self.get(name__exact=kwargs['name'], space=kwargs['space']), False
|
||||
except self.model.DoesNotExist:
|
||||
with scopes_disabled():
|
||||
try:
|
||||
return self.model.add_root(**kwargs), True
|
||||
except IntegrityError as e:
|
||||
if 'Key (path)' in e.args[0]:
|
||||
self.model.fix_tree(fix_paths=True)
|
||||
return self.model.add_root(**kwargs), True
|
||||
|
||||
|
||||
class TreeModel(MP_Node):
|
||||
_full_name_separator = ' > '
|
||||
|
||||
def __str__(self):
|
||||
if self.icon:
|
||||
return f"{self.icon} {self.name}"
|
||||
else:
|
||||
return f"{self.name}"
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
parent = self.get_parent()
|
||||
if parent:
|
||||
return self.get_parent().id
|
||||
return None
|
||||
|
||||
@property
|
||||
def full_name(self):
|
||||
"""
|
||||
Returns a string representation of a tree node and it's ancestors,
|
||||
e.g. 'Cuisine > Asian > Chinese > Catonese'.
|
||||
"""
|
||||
names = [node.name for node in self.get_ancestors_and_self()]
|
||||
return self._full_name_separator.join(names)
|
||||
|
||||
def get_ancestors_and_self(self):
|
||||
"""
|
||||
Gets ancestors and includes itself. Use treebeard's get_ancestors
|
||||
if you don't want to include the node itself. It's a separate
|
||||
function as it's commonly used in templates.
|
||||
"""
|
||||
if self.is_root():
|
||||
return [self]
|
||||
return list(self.get_ancestors()) + [self]
|
||||
|
||||
def get_descendants_and_self(self):
|
||||
"""
|
||||
Gets descendants and includes itself. Use treebeard's get_descendants
|
||||
if you don't want to include the node itself. It's a separate
|
||||
function as it's commonly used in templates.
|
||||
"""
|
||||
return self.get_tree(self)
|
||||
|
||||
def has_children(self):
|
||||
return self.get_num_children() > 0
|
||||
|
||||
def get_num_children(self):
|
||||
return self.get_children().count()
|
||||
|
||||
# use self.objects.get_or_create() instead
|
||||
@classmethod
|
||||
def add_root(self, **kwargs):
|
||||
with scopes_disabled():
|
||||
return super().add_root(**kwargs)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class PermissionModelMixin:
|
||||
@staticmethod
|
||||
def get_space_key():
|
||||
return ('space',)
|
||||
@ -107,7 +184,8 @@ class UserPreference(models.Model, PermissionModelMixin):
|
||||
COLORS = (
|
||||
(PRIMARY, 'Primary'),
|
||||
(SECONDARY, 'Secondary'),
|
||||
(SUCCESS, 'Success'), (INFO, 'Info'),
|
||||
(SUCCESS, 'Success'),
|
||||
(INFO, 'Info'),
|
||||
(WARNING, 'Warning'),
|
||||
(DANGER, 'Danger'),
|
||||
(LIGHT, 'Light'),
|
||||
@ -212,7 +290,9 @@ class SupermarketCategory(models.Model, PermissionModelMixin):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
unique_together = (('space', 'name'),)
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='smc_unique_name_per_space')
|
||||
]
|
||||
|
||||
|
||||
class Supermarket(models.Model, PermissionModelMixin):
|
||||
@ -227,7 +307,9 @@ class Supermarket(models.Model, PermissionModelMixin):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
unique_together = (('space', 'name'),)
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='sm_unique_name_per_space')
|
||||
]
|
||||
|
||||
|
||||
class SupermarketCategoryRelation(models.Model, PermissionModelMixin):
|
||||
@ -257,7 +339,9 @@ class SyncLog(models.Model, PermissionModelMixin):
|
||||
return f"{self.created_at}:{self.sync} - {self.status}"
|
||||
|
||||
|
||||
class Keyword(ExportModelOperationsMixin('keyword'), models.Model, PermissionModelMixin):
|
||||
class Keyword(ExportModelOperationsMixin('keyword'), TreeModel, PermissionModelMixin):
|
||||
if SORT_TREE_BY_NAME:
|
||||
node_order_by = ['name']
|
||||
name = models.CharField(max_length=64)
|
||||
icon = models.CharField(max_length=16, blank=True, null=True)
|
||||
description = models.TextField(default="", blank=True)
|
||||
@ -265,16 +349,13 @@ class Keyword(ExportModelOperationsMixin('keyword'), models.Model, PermissionMod
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
objects = ScopedManager(space='space')
|
||||
|
||||
def __str__(self):
|
||||
if self.icon:
|
||||
return f"{self.icon} {self.name}"
|
||||
else:
|
||||
return f"{self.name}"
|
||||
objects = ScopedManager(space='space', _manager_class=TreeManager)
|
||||
|
||||
class Meta:
|
||||
unique_together = (('space', 'name'),)
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='kw_unique_name_per_space')
|
||||
]
|
||||
indexes = (Index(fields=['id', 'name']),)
|
||||
|
||||
|
||||
class Unit(ExportModelOperationsMixin('unit'), models.Model, PermissionModelMixin):
|
||||
@ -288,10 +369,14 @@ class Unit(ExportModelOperationsMixin('unit'), models.Model, PermissionModelMixi
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
unique_together = (('space', 'name'),)
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='u_unique_name_per_space')
|
||||
]
|
||||
|
||||
|
||||
class Food(ExportModelOperationsMixin('food'), models.Model, PermissionModelMixin):
|
||||
class Food(ExportModelOperationsMixin('food'), TreeModel, PermissionModelMixin):
|
||||
if SORT_TREE_BY_NAME:
|
||||
node_order_by = ['name']
|
||||
name = models.CharField(max_length=128, validators=[MinLengthValidator(1)])
|
||||
recipe = models.ForeignKey('Recipe', null=True, blank=True, on_delete=models.SET_NULL)
|
||||
supermarket_category = models.ForeignKey(SupermarketCategory, null=True, blank=True, on_delete=models.SET_NULL)
|
||||
@ -299,17 +384,30 @@ class Food(ExportModelOperationsMixin('food'), models.Model, PermissionModelMixi
|
||||
description = models.TextField(default='', blank=True)
|
||||
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
objects = ScopedManager(space='space')
|
||||
objects = ScopedManager(space='space', _manager_class=TreeManager)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def delete(self):
|
||||
if self.ingredient_set.all().exclude(step=None).count() > 0:
|
||||
raise ProtectedError(self.name + _(" is part of a recipe step and cannot be deleted"), self.ingredient_set.all().exclude(step=None))
|
||||
else:
|
||||
return super().delete()
|
||||
|
||||
class Meta:
|
||||
unique_together = (('space', 'name'),)
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='f_unique_name_per_space')
|
||||
]
|
||||
indexes = (
|
||||
Index(fields=['id']),
|
||||
Index(fields=['name']),
|
||||
)
|
||||
|
||||
|
||||
class Ingredient(ExportModelOperationsMixin('ingredient'), models.Model, PermissionModelMixin):
|
||||
food = models.ForeignKey(Food, on_delete=models.PROTECT, null=True, blank=True)
|
||||
# a pre-delete signal on Food checks if the Ingredient is part of a Step, if it is raises a ProtectedError instead of cascading the delete
|
||||
food = models.ForeignKey(Food, on_delete=models.CASCADE, null=True, blank=True)
|
||||
unit = models.ForeignKey(Unit, on_delete=models.PROTECT, null=True, blank=True)
|
||||
amount = models.DecimalField(default=0, decimal_places=16, max_digits=32)
|
||||
note = models.CharField(max_length=256, null=True, blank=True)
|
||||
@ -325,6 +423,9 @@ class Ingredient(ExportModelOperationsMixin('ingredient'), models.Model, Permiss
|
||||
|
||||
class Meta:
|
||||
ordering = ['order', 'pk']
|
||||
indexes = (
|
||||
Index(fields=['id']),
|
||||
)
|
||||
|
||||
|
||||
class Step(ExportModelOperationsMixin('step'), models.Model, PermissionModelMixin):
|
||||
@ -345,6 +446,7 @@ class Step(ExportModelOperationsMixin('step'), models.Model, PermissionModelMixi
|
||||
order = models.IntegerField(default=0)
|
||||
file = models.ForeignKey('UserFile', on_delete=models.PROTECT, null=True, blank=True)
|
||||
show_as_header = models.BooleanField(default=True)
|
||||
search_vector = SearchVectorField(null=True)
|
||||
step_recipe = models.ForeignKey('Recipe', default=None, blank=True, null=True, on_delete=models.PROTECT)
|
||||
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
@ -356,6 +458,7 @@ class Step(ExportModelOperationsMixin('step'), models.Model, PermissionModelMixi
|
||||
|
||||
class Meta:
|
||||
ordering = ['order', 'pk']
|
||||
indexes = (GinIndex(fields=["search_vector"]),)
|
||||
|
||||
|
||||
class NutritionInformation(models.Model, PermissionModelMixin):
|
||||
@ -401,12 +504,23 @@ class Recipe(ExportModelOperationsMixin('recipe'), models.Model, PermissionModel
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
name_search_vector = SearchVectorField(null=True)
|
||||
desc_search_vector = SearchVectorField(null=True)
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
|
||||
objects = ScopedManager(space='space')
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta():
|
||||
indexes = (
|
||||
GinIndex(fields=["name_search_vector"]),
|
||||
GinIndex(fields=["desc_search_vector"]),
|
||||
Index(fields=['id']),
|
||||
Index(fields=['name']),
|
||||
)
|
||||
|
||||
|
||||
class Comment(ExportModelOperationsMixin('comment'), models.Model, PermissionModelMixin):
|
||||
recipe = models.ForeignKey(Recipe, on_delete=models.CASCADE)
|
||||
@ -455,6 +569,9 @@ class RecipeBook(ExportModelOperationsMixin('book'), models.Model, PermissionMod
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta():
|
||||
indexes = (Index(fields=['name']),)
|
||||
|
||||
|
||||
class RecipeBookEntry(ExportModelOperationsMixin('book_entry'), models.Model, PermissionModelMixin):
|
||||
recipe = models.ForeignKey(Recipe, on_delete=models.CASCADE)
|
||||
@ -476,12 +593,17 @@ class RecipeBookEntry(ExportModelOperationsMixin('book_entry'), models.Model, Pe
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
unique_together = (('recipe', 'book'),)
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['recipe', 'book'], name='rbe_unique_name_per_space')
|
||||
]
|
||||
|
||||
|
||||
class MealType(models.Model, PermissionModelMixin):
|
||||
name = models.CharField(max_length=128)
|
||||
order = models.IntegerField(default=0)
|
||||
icon = models.CharField(max_length=16, blank=True, null=True)
|
||||
color = models.CharField(max_length=7, blank=True, null=True)
|
||||
default = models.BooleanField(default=False, blank=True)
|
||||
created_by = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
@ -651,6 +773,16 @@ class CookLog(ExportModelOperationsMixin('cook_log'), models.Model, PermissionMo
|
||||
def __str__(self):
|
||||
return self.recipe.name
|
||||
|
||||
class Meta():
|
||||
indexes = (
|
||||
Index(fields=['id']),
|
||||
Index(fields=['recipe']),
|
||||
Index(fields=['-created_at']),
|
||||
Index(fields=['rating']),
|
||||
Index(fields=['created_by']),
|
||||
Index(fields=['created_by', 'rating']),
|
||||
)
|
||||
|
||||
|
||||
class ViewLog(ExportModelOperationsMixin('view_log'), models.Model, PermissionModelMixin):
|
||||
recipe = models.ForeignKey(Recipe, on_delete=models.CASCADE)
|
||||
@ -663,6 +795,14 @@ class ViewLog(ExportModelOperationsMixin('view_log'), models.Model, PermissionMo
|
||||
def __str__(self):
|
||||
return self.recipe.name
|
||||
|
||||
class Meta():
|
||||
indexes = (
|
||||
Index(fields=['recipe']),
|
||||
Index(fields=['-created_at']),
|
||||
Index(fields=['created_by']),
|
||||
Index(fields=['recipe', '-created_at', 'created_by']),
|
||||
)
|
||||
|
||||
|
||||
class ImportLog(models.Model, PermissionModelMixin):
|
||||
type = models.CharField(max_length=32)
|
||||
@ -693,6 +833,54 @@ class BookmarkletImport(ExportModelOperationsMixin('bookmarklet_import'), models
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
|
||||
|
||||
# field names used to configure search behavior - all data populated during data migration
|
||||
# other option is to use a MultiSelectField from https://github.com/goinnn/django-multiselectfield
|
||||
class SearchFields(models.Model, PermissionModelMixin):
|
||||
name = models.CharField(max_length=32, unique=True)
|
||||
field = models.CharField(max_length=64, unique=True)
|
||||
|
||||
def __str__(self):
|
||||
return _(self.name)
|
||||
|
||||
@staticmethod
|
||||
def get_name(self):
|
||||
return _(self.name)
|
||||
|
||||
|
||||
def allSearchFields():
|
||||
return list(SearchFields.objects.values_list('id', flat=True))
|
||||
|
||||
|
||||
def nameSearchField():
|
||||
return [SearchFields.objects.get(name='Name').id]
|
||||
|
||||
|
||||
class SearchPreference(models.Model, PermissionModelMixin):
|
||||
# Search Style (validation parsleyjs.org)
|
||||
# phrase or plain or raw (websearch and trigrams are mutually exclusive)
|
||||
SIMPLE = 'plain'
|
||||
PHRASE = 'phrase'
|
||||
WEB = 'websearch'
|
||||
RAW = 'raw'
|
||||
SEARCH_STYLE = (
|
||||
(SIMPLE, _('Simple')),
|
||||
(PHRASE, _('Phrase')),
|
||||
(WEB, _('Web')),
|
||||
(RAW, _('Raw'))
|
||||
)
|
||||
|
||||
user = AutoOneToOneField(User, on_delete=models.CASCADE, primary_key=True)
|
||||
search = models.CharField(choices=SEARCH_STYLE, max_length=32, default=SIMPLE)
|
||||
|
||||
lookup = models.BooleanField(default=False)
|
||||
unaccent = models.ManyToManyField(SearchFields, related_name="unaccent_fields", blank=True, default=allSearchFields)
|
||||
icontains = models.ManyToManyField(SearchFields, related_name="icontains_fields", blank=True, default=nameSearchField)
|
||||
istartswith = models.ManyToManyField(SearchFields, related_name="istartswith_fields", blank=True)
|
||||
trigram = models.ManyToManyField(SearchFields, related_name="trigram_fields", blank=True, default=nameSearchField)
|
||||
fulltext = models.ManyToManyField(SearchFields, related_name="fulltext_fields", blank=True)
|
||||
trigram_threshold = models.DecimalField(default=0.1, decimal_places=2, max_digits=3)
|
||||
|
||||
|
||||
class UserFile(ExportModelOperationsMixin('user_files'), models.Model, PermissionModelMixin):
|
||||
name = models.CharField(max_length=128)
|
||||
file = models.FileField(upload_to='files/')
|
||||
@ -708,3 +896,27 @@ class UserFile(ExportModelOperationsMixin('user_files'), models.Model, Permissio
|
||||
self.file.name = f'{uuid.uuid4()}' + pathlib.Path(self.file.name).suffix
|
||||
self.file_size_kb = round(self.file.size / 1000)
|
||||
super(UserFile, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
class Automation(ExportModelOperationsMixin('automations'), models.Model, PermissionModelMixin):
|
||||
FOOD_ALIAS = 'FOOD_ALIAS'
|
||||
UNIT_ALIAS = 'UNIT_ALIAS'
|
||||
KEYWORD_ALIAS = 'KEYWORD_ALIAS'
|
||||
|
||||
type = models.CharField(max_length=128,
|
||||
choices=((FOOD_ALIAS, _('Food Alias')), (UNIT_ALIAS, _('Unit Alias')), (KEYWORD_ALIAS, _('Keyword Alias')),))
|
||||
name = models.CharField(max_length=128, default='')
|
||||
description = models.TextField(blank=True, null=True)
|
||||
|
||||
param_1 = models.CharField(max_length=128, blank=True, null=True)
|
||||
param_2 = models.CharField(max_length=128, blank=True, null=True)
|
||||
param_3 = models.CharField(max_length=128, blank=True, null=True)
|
||||
|
||||
disabled = models.BooleanField(default=False)
|
||||
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
created_by = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
|
||||
objects = ScopedManager(space='space')
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
|
@ -1,6 +1,5 @@
|
||||
import io
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
from os import listdir
|
||||
from os.path import isfile, join
|
||||
|
112
cookbook/schemas.py
Normal file
112
cookbook/schemas.py
Normal file
@ -0,0 +1,112 @@
|
||||
from rest_framework.schemas.openapi import AutoSchema
|
||||
from rest_framework.schemas.utils import is_list_view
|
||||
|
||||
|
||||
# TODO move to separate class to cleanup
|
||||
class RecipeSchema(AutoSchema):
|
||||
def get_path_parameters(self, path, method):
|
||||
if not is_list_view(path, method, self.view):
|
||||
return super(RecipeSchema, self).get_path_parameters(path, method)
|
||||
|
||||
parameters = super().get_path_parameters(path, method)
|
||||
parameters.append({
|
||||
"name": 'query', "in": "query", "required": False,
|
||||
"description": 'Query string matched (fuzzy) against recipe name. In the future also fulltext search.',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'keywords', "in": "query", "required": False,
|
||||
"description": 'Id of keyword a recipe should have. For multiple repeat parameter.',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'foods', "in": "query", "required": False,
|
||||
"description": 'Id of food a recipe should have. For multiple repeat parameter.',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'units', "in": "query", "required": False,
|
||||
"description": 'Id of unit a recipe should have.',
|
||||
'schema': {'type': 'int', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'rating', "in": "query", "required": False,
|
||||
"description": 'Id of unit a recipe should have.',
|
||||
'schema': {'type': 'int', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'books', "in": "query", "required": False,
|
||||
"description": 'Id of book a recipe should have. For multiple repeat parameter.',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'keywords_or', "in": "query", "required": False,
|
||||
"description": 'If recipe should have all (AND) or any (OR) of the provided keywords.',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'foods_or', "in": "query", "required": False,
|
||||
"description": 'If recipe should have all (AND) or any (OR) any of the provided foods.',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'books_or', "in": "query", "required": False,
|
||||
"description": 'If recipe should be in all (AND) or any (OR) any of the provided books.',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'internal', "in": "query", "required": False,
|
||||
"description": 'true or false. If only internal recipes should be returned or not.',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'random', "in": "query", "required": False,
|
||||
"description": 'true or false. returns the results in randomized order.',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'new', "in": "query", "required": False,
|
||||
"description": 'true or false. returns new results first in search results',
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
return parameters
|
||||
|
||||
|
||||
class TreeSchema(AutoSchema):
|
||||
def get_path_parameters(self, path, method):
|
||||
if not is_list_view(path, method, self.view):
|
||||
return super(TreeSchema, self).get_path_parameters(path, method)
|
||||
|
||||
api_name = path.split('/')[2]
|
||||
parameters = super().get_path_parameters(path, method)
|
||||
parameters.append({
|
||||
"name": 'query', "in": "query", "required": False,
|
||||
"description": 'Query string matched against {} name.'.format(api_name),
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'root', "in": "query", "required": False,
|
||||
"description": 'Return first level children of {obj} with ID [int]. Integer 0 will return root {obj}s.'.format(obj=api_name),
|
||||
'schema': {'type': 'int', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'tree', "in": "query", "required": False,
|
||||
"description": 'Return all self and children of {} with ID [int].'.format(api_name),
|
||||
'schema': {'type': 'int', },
|
||||
})
|
||||
return parameters
|
||||
|
||||
|
||||
class FilterSchema(AutoSchema):
|
||||
def get_path_parameters(self, path, method):
|
||||
if not is_list_view(path, method, self.view):
|
||||
return super(FilterSchema, self).get_path_parameters(path, method)
|
||||
|
||||
api_name = path.split('/')[2]
|
||||
parameters = super().get_path_parameters(path, method)
|
||||
parameters.append({
|
||||
"name": 'query', "in": "query", "required": False,
|
||||
"description": 'Query string matched against {} name.'.format(api_name),
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
return parameters
|
@ -1,8 +1,11 @@
|
||||
import random
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from gettext import gettext as _
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models import QuerySet, Sum, Avg
|
||||
from django.db.models import Avg, QuerySet, Sum
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from drf_writable_nested import (UniqueFieldsMixin,
|
||||
WritableNestedModelSerializer)
|
||||
from rest_framework import serializers
|
||||
@ -14,10 +17,49 @@ from cookbook.models import (Comment, CookLog, Food, Ingredient, Keyword,
|
||||
ShareLink, ShoppingList, ShoppingListEntry,
|
||||
ShoppingListRecipe, Step, Storage, Sync, SyncLog,
|
||||
Unit, UserPreference, ViewLog, SupermarketCategory, Supermarket,
|
||||
SupermarketCategoryRelation, ImportLog, BookmarkletImport, UserFile)
|
||||
SupermarketCategoryRelation, ImportLog, BookmarkletImport, UserFile, Automation)
|
||||
from cookbook.templatetags.custom_tags import markdown
|
||||
|
||||
|
||||
class ExtendedRecipeMixin(serializers.ModelSerializer):
|
||||
# adds image and recipe count to serializer when query param extended=1
|
||||
image = serializers.SerializerMethodField('get_image')
|
||||
numrecipe = serializers.SerializerMethodField('count_recipes')
|
||||
recipe_filter = None
|
||||
|
||||
def get_fields(self, *args, **kwargs):
|
||||
fields = super().get_fields(*args, **kwargs)
|
||||
try:
|
||||
api_serializer = self.context['view'].serializer_class
|
||||
except KeyError:
|
||||
api_serializer = None
|
||||
# extended values are computationally expensive and not needed in normal circumstances
|
||||
if self.context.get('request', False) and bool(int(self.context['request'].query_params.get('extended', False))) and self.__class__ == api_serializer:
|
||||
return fields
|
||||
else:
|
||||
del fields['image']
|
||||
del fields['numrecipe']
|
||||
return fields
|
||||
|
||||
def get_image(self, obj):
|
||||
# TODO add caching
|
||||
recipes = Recipe.objects.filter(**{self.recipe_filter: obj}, space=obj.space).exclude(image__isnull=True).exclude(image__exact='')
|
||||
try:
|
||||
if recipes.count() == 0 and obj.has_children():
|
||||
obj__in = self.recipe_filter + '__in'
|
||||
recipes = Recipe.objects.filter(**{obj__in: obj.get_descendants()}, space=obj.space).exclude(image__isnull=True).exclude(image__exact='') # if no recipes found - check whole tree
|
||||
except AttributeError:
|
||||
# probably not a tree
|
||||
pass
|
||||
if recipes.count() != 0:
|
||||
return random.choice(recipes).image.url
|
||||
else:
|
||||
return None
|
||||
|
||||
def count_recipes(self, obj):
|
||||
return Recipe.objects.filter(**{self.recipe_filter: obj}, space=obj.space).count()
|
||||
|
||||
|
||||
class CustomDecimalField(serializers.Field):
|
||||
"""
|
||||
Custom decimal field to normalize useless decimal places
|
||||
@ -25,10 +67,9 @@ class CustomDecimalField(serializers.Field):
|
||||
"""
|
||||
|
||||
def to_representation(self, value):
|
||||
if isinstance(value, Decimal):
|
||||
return value.normalize()
|
||||
else:
|
||||
return Decimal(value).normalize()
|
||||
if not isinstance(value, Decimal):
|
||||
value = Decimal(value)
|
||||
return round(value, 2).normalize()
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if type(data) == int or type(data) == float:
|
||||
@ -45,7 +86,7 @@ class CustomDecimalField(serializers.Field):
|
||||
class SpaceFilterSerializer(serializers.ListSerializer):
|
||||
|
||||
def to_representation(self, data):
|
||||
if type(data) == QuerySet and data.query.is_sliced:
|
||||
if (type(data) == QuerySet and data.query.is_sliced):
|
||||
# if query is sliced it came from api request not nested serializer
|
||||
return super().to_representation(data)
|
||||
if self.child.Meta.model == User:
|
||||
@ -61,7 +102,7 @@ class SpacedModelSerializer(serializers.ModelSerializer):
|
||||
return super().create(validated_data)
|
||||
|
||||
|
||||
class MealTypeSerializer(SpacedModelSerializer):
|
||||
class MealTypeSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['created_by'] = self.context['request'].user
|
||||
@ -70,7 +111,7 @@ class MealTypeSerializer(SpacedModelSerializer):
|
||||
class Meta:
|
||||
list_serializer_class = SpaceFilterSerializer
|
||||
model = MealType
|
||||
fields = ('id', 'name', 'order', 'created_by')
|
||||
fields = ('id', 'name', 'order', 'icon', 'color', 'default', 'created_by')
|
||||
read_only_fields = ('created_by',)
|
||||
|
||||
|
||||
@ -105,15 +146,19 @@ class UserPreferenceSerializer(serializers.ModelSerializer):
|
||||
class UserFileSerializer(serializers.ModelSerializer):
|
||||
|
||||
def check_file_limit(self, validated_data):
|
||||
if 'file' in validated_data:
|
||||
if self.context['request'].space.max_file_storage_mb == -1:
|
||||
raise ValidationError(_('File uploads are not enabled for this Space.'))
|
||||
|
||||
try:
|
||||
current_file_size_mb = UserFile.objects.filter(space=self.context['request'].space).aggregate(Sum('file_size_kb'))['file_size_kb__sum'] / 1000
|
||||
current_file_size_mb = \
|
||||
UserFile.objects.filter(space=self.context['request'].space).aggregate(Sum('file_size_kb'))[
|
||||
'file_size_kb__sum'] / 1000
|
||||
except TypeError:
|
||||
current_file_size_mb = 0
|
||||
|
||||
if (validated_data['file'].size / 1000 / 1000 + current_file_size_mb - 5) > self.context['request'].space.max_file_storage_mb != 0:
|
||||
if ((validated_data['file'].size / 1000 / 1000 + current_file_size_mb - 5)
|
||||
> self.context['request'].space.max_file_storage_mb != 0):
|
||||
raise ValidationError(_('You have reached your file upload limit.'))
|
||||
|
||||
def create(self, validated_data):
|
||||
@ -198,28 +243,63 @@ class KeywordLabelSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = ('id', 'label')
|
||||
|
||||
|
||||
class KeywordSerializer(UniqueFieldsMixin, serializers.ModelSerializer):
|
||||
class KeywordSerializer(UniqueFieldsMixin, ExtendedRecipeMixin):
|
||||
label = serializers.SerializerMethodField('get_label')
|
||||
# image = serializers.SerializerMethodField('get_image')
|
||||
# numrecipe = serializers.SerializerMethodField('count_recipes')
|
||||
recipe_filter = 'keywords'
|
||||
|
||||
def get_label(self, obj):
|
||||
return str(obj)
|
||||
|
||||
# def get_image(self, obj):
|
||||
# recipes = obj.recipe_set.all().filter(space=obj.space).exclude(image__isnull=True).exclude(image__exact='')
|
||||
# if recipes.count() == 0 and obj.has_children():
|
||||
# recipes = Recipe.objects.filter(keywords__in=obj.get_descendants(), space=obj.space).exclude(image__isnull=True).exclude(image__exact='') # if no recipes found - check whole tree
|
||||
# if recipes.count() != 0:
|
||||
# return random.choice(recipes).image.url
|
||||
# else:
|
||||
# return None
|
||||
|
||||
# def count_recipes(self, obj):
|
||||
# return obj.recipe_set.filter(space=self.context['request'].space).all().count()
|
||||
|
||||
def create(self, validated_data):
|
||||
obj, created = Keyword.objects.get_or_create(name=validated_data['name'].strip(), space=self.context['request'].space)
|
||||
# since multi select tags dont have id's
|
||||
# duplicate names might be routed to create
|
||||
validated_data['name'] = validated_data['name'].strip()
|
||||
validated_data['space'] = self.context['request'].space
|
||||
obj, created = Keyword.objects.get_or_create(**validated_data)
|
||||
return obj
|
||||
|
||||
class Meta:
|
||||
list_serializer_class = SpaceFilterSerializer
|
||||
model = Keyword
|
||||
fields = ('id', 'name', 'icon', 'label', 'description', 'created_at', 'updated_at')
|
||||
|
||||
read_only_fields = ('id',)
|
||||
fields = (
|
||||
'id', 'name', 'icon', 'label', 'description', 'image', 'parent', 'numchild', 'numrecipe', 'created_at',
|
||||
'updated_at')
|
||||
read_only_fields = ('id', 'numchild', 'parent', 'image')
|
||||
|
||||
|
||||
class UnitSerializer(UniqueFieldsMixin, serializers.ModelSerializer):
|
||||
class UnitSerializer(UniqueFieldsMixin, ExtendedRecipeMixin):
|
||||
# image = serializers.SerializerMethodField('get_image')
|
||||
# numrecipe = serializers.SerializerMethodField('count_recipes')
|
||||
recipe_filter = 'steps__ingredients__unit'
|
||||
|
||||
# def get_image(self, obj):
|
||||
# recipes = Recipe.objects.filter(steps__ingredients__unit=obj, space=obj.space).exclude(image__isnull=True).exclude(image__exact='')
|
||||
|
||||
# if recipes.count() != 0:
|
||||
# return random.choice(recipes).image.url
|
||||
# else:
|
||||
# return None
|
||||
|
||||
# def count_recipes(self, obj):
|
||||
# return Recipe.objects.filter(steps__ingredients__unit=obj, space=obj.space).count()
|
||||
|
||||
def create(self, validated_data):
|
||||
obj, created = Unit.objects.get_or_create(name=validated_data['name'].strip(), space=self.context['request'].space)
|
||||
validated_data['name'] = validated_data['name'].strip()
|
||||
validated_data['space'] = self.context['request'].space
|
||||
obj, created = Unit.objects.get_or_create(**validated_data)
|
||||
return obj
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
@ -228,14 +308,16 @@ class UnitSerializer(UniqueFieldsMixin, serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Unit
|
||||
fields = ('id', 'name', 'description')
|
||||
read_only_fields = ('id',)
|
||||
fields = ('id', 'name', 'description', 'numrecipe', 'image')
|
||||
read_only_fields = ('id', 'numrecipe', 'image')
|
||||
|
||||
|
||||
class SupermarketCategorySerializer(UniqueFieldsMixin, WritableNestedModelSerializer):
|
||||
|
||||
def create(self, validated_data):
|
||||
obj, created = SupermarketCategory.objects.get_or_create(name=validated_data['name'], space=self.context['request'].space)
|
||||
validated_data['name'] = validated_data['name'].strip()
|
||||
validated_data['space'] = self.context['request'].space
|
||||
obj, created = SupermarketCategory.objects.get_or_create(**validated_data)
|
||||
return obj
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
@ -243,7 +325,7 @@ class SupermarketCategorySerializer(UniqueFieldsMixin, WritableNestedModelSerial
|
||||
|
||||
class Meta:
|
||||
model = SupermarketCategory
|
||||
fields = ('id', 'name')
|
||||
fields = ('id', 'name', 'description')
|
||||
|
||||
|
||||
class SupermarketCategoryRelationSerializer(WritableNestedModelSerializer):
|
||||
@ -259,14 +341,55 @@ class SupermarketSerializer(UniqueFieldsMixin, SpacedModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Supermarket
|
||||
fields = ('id', 'name', 'category_to_supermarket')
|
||||
fields = ('id', 'name', 'description', 'category_to_supermarket')
|
||||
|
||||
|
||||
class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer):
|
||||
class RecipeSimpleSerializer(serializers.ModelSerializer):
|
||||
url = serializers.SerializerMethodField('get_url')
|
||||
|
||||
def get_url(self, obj):
|
||||
return reverse('view_recipe', args=[obj.id])
|
||||
|
||||
class Meta:
|
||||
model = Recipe
|
||||
fields = ('id', 'name', 'url')
|
||||
read_only_fields = ['id', 'name', 'url']
|
||||
|
||||
|
||||
class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer, ExtendedRecipeMixin):
|
||||
supermarket_category = SupermarketCategorySerializer(allow_null=True, required=False)
|
||||
recipe = RecipeSimpleSerializer(allow_null=True, required=False)
|
||||
# image = serializers.SerializerMethodField('get_image')
|
||||
# numrecipe = serializers.SerializerMethodField('count_recipes')
|
||||
recipe_filter = 'steps__ingredients__food'
|
||||
|
||||
# def get_image(self, obj):
|
||||
# if obj.recipe and obj.space == obj.recipe.space:
|
||||
# if obj.recipe.image and obj.recipe.image != '':
|
||||
# return obj.recipe.image.url
|
||||
# # if food is not also a recipe, look for recipe images that use the food
|
||||
# recipes = Recipe.objects.filter(steps__ingredients__food=obj, space=obj.space).exclude(image__isnull=True).exclude(image__exact='')
|
||||
# # if no recipes found - check whole tree
|
||||
# if recipes.count() == 0 and obj.has_children():
|
||||
# recipes = Recipe.objects.filter(steps__ingredients__food__in=obj.get_descendants(), space=obj.space).exclude(image__isnull=True).exclude(image__exact='')
|
||||
|
||||
# if recipes.count() != 0:
|
||||
# return random.choice(recipes).image.url
|
||||
# else:
|
||||
# return None
|
||||
|
||||
# def count_recipes(self, obj):
|
||||
# return Recipe.objects.filter(steps__ingredients__food=obj, space=obj.space).count()
|
||||
|
||||
def create(self, validated_data):
|
||||
obj, created = Food.objects.get_or_create(name=validated_data['name'].strip(), space=self.context['request'].space)
|
||||
validated_data['name'] = validated_data['name'].strip()
|
||||
validated_data['space'] = self.context['request'].space
|
||||
# supermarket category needs to be handled manually as food.get or create does not create nested serializers unlike a super.create of serializer
|
||||
if 'supermarket_category' in validated_data and validated_data['supermarket_category']:
|
||||
validated_data['supermarket_category'], sc_created = SupermarketCategory.objects.get_or_create(
|
||||
name=validated_data.pop('supermarket_category')['name'],
|
||||
space=self.context['request'].space)
|
||||
obj, created = Food.objects.get_or_create(**validated_data)
|
||||
return obj
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
@ -275,7 +398,8 @@ class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Food
|
||||
fields = ('id', 'name', 'recipe', 'ignore_shopping', 'supermarket_category')
|
||||
fields = ('id', 'name', 'description', 'recipe', 'ignore_shopping', 'supermarket_category', 'image', 'parent', 'numchild', 'numrecipe')
|
||||
read_only_fields = ('id', 'numchild', 'parent', 'image')
|
||||
|
||||
|
||||
class IngredientSerializer(WritableNestedModelSerializer):
|
||||
@ -350,7 +474,8 @@ class NutritionInformationSerializer(serializers.ModelSerializer):
|
||||
class RecipeBaseSerializer(WritableNestedModelSerializer):
|
||||
def get_recipe_rating(self, obj):
|
||||
try:
|
||||
rating = obj.cooklog_set.filter(created_by=self.context['request'].user, rating__gt=0).aggregate(Avg('rating'))
|
||||
rating = obj.cooklog_set.filter(created_by=self.context['request'].user, rating__gt=0).aggregate(
|
||||
Avg('rating'))
|
||||
if rating['rating__avg']:
|
||||
return rating['rating__avg']
|
||||
except TypeError:
|
||||
@ -366,11 +491,19 @@ class RecipeBaseSerializer(WritableNestedModelSerializer):
|
||||
pass
|
||||
return None
|
||||
|
||||
# TODO make days of new recipe a setting
|
||||
def is_recipe_new(self, obj):
|
||||
if obj.created_at > (timezone.now() - timedelta(days=7)):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class RecipeOverviewSerializer(RecipeBaseSerializer):
|
||||
keywords = KeywordLabelSerializer(many=True)
|
||||
rating = serializers.SerializerMethodField('get_recipe_rating')
|
||||
last_cooked = serializers.SerializerMethodField('get_recipe_last_cooked')
|
||||
new = serializers.SerializerMethodField('is_recipe_new')
|
||||
|
||||
def create(self, validated_data):
|
||||
pass
|
||||
@ -383,7 +516,7 @@ class RecipeOverviewSerializer(RecipeBaseSerializer):
|
||||
fields = (
|
||||
'id', 'name', 'description', 'image', 'keywords', 'working_time',
|
||||
'waiting_time', 'created_by', 'created_at', 'updated_at',
|
||||
'internal', 'servings', 'servings_text', 'rating', 'last_cooked',
|
||||
'internal', 'servings', 'servings_text', 'rating', 'last_cooked', 'new'
|
||||
)
|
||||
read_only_fields = ['image', 'created_by', 'created_at']
|
||||
|
||||
@ -428,7 +561,8 @@ class CommentSerializer(serializers.ModelSerializer):
|
||||
fields = '__all__'
|
||||
|
||||
|
||||
class RecipeBookSerializer(SpacedModelSerializer):
|
||||
class RecipeBookSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
shared = UserNameSerializer(many=True)
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['created_by'] = self.context['request'].user
|
||||
@ -452,9 +586,11 @@ class RecipeBookEntrySerializer(serializers.ModelSerializer):
|
||||
|
||||
def create(self, validated_data):
|
||||
book = validated_data['book']
|
||||
recipe = validated_data['recipe']
|
||||
if not book.get_owner() == self.context['request'].user:
|
||||
raise NotFound(detail=None, code=None)
|
||||
return super().create(validated_data)
|
||||
obj, created = RecipeBookEntry.objects.get_or_create(book=book, recipe=recipe)
|
||||
return obj
|
||||
|
||||
class Meta:
|
||||
model = RecipeBookEntry
|
||||
@ -464,7 +600,8 @@ class RecipeBookEntrySerializer(serializers.ModelSerializer):
|
||||
class MealPlanSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
recipe = RecipeOverviewSerializer(required=False, allow_null=True)
|
||||
recipe_name = serializers.ReadOnlyField(source='recipe.name')
|
||||
meal_type_name = serializers.ReadOnlyField(source='meal_type.name')
|
||||
meal_type = MealTypeSerializer()
|
||||
meal_type_name = serializers.ReadOnlyField(source='meal_type.name') # TODO deprecate once old meal plan was removed
|
||||
note_markdown = serializers.SerializerMethodField('get_note_markdown')
|
||||
servings = CustomDecimalField()
|
||||
|
||||
@ -582,7 +719,22 @@ class ImportLogSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = ImportLog
|
||||
fields = ('id', 'type', 'msg', 'running', 'keyword', 'total_recipes', 'imported_recipes', 'created_by', 'created_at')
|
||||
fields = (
|
||||
'id', 'type', 'msg', 'running', 'keyword', 'total_recipes', 'imported_recipes', 'created_by', 'created_at')
|
||||
read_only_fields = ('created_by',)
|
||||
|
||||
|
||||
class AutomationSerializer(serializers.ModelSerializer):
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['created_by'] = self.context['request'].user
|
||||
validated_data['space'] = self.context['request'].space
|
||||
return super().create(validated_data)
|
||||
|
||||
class Meta:
|
||||
model = Automation
|
||||
fields = (
|
||||
'id', 'type', 'name', 'description', 'param_1', 'param_2', 'param_3', 'disabled', 'created_by',)
|
||||
read_only_fields = ('created_by',)
|
||||
|
||||
|
||||
|
47
cookbook/signals.py
Normal file
47
cookbook/signals.py
Normal file
@ -0,0 +1,47 @@
|
||||
from django.contrib.postgres.search import SearchVector
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
from django.utils import translation
|
||||
|
||||
from cookbook.models import Recipe, Step
|
||||
from cookbook.managers import DICTIONARY
|
||||
|
||||
|
||||
# TODO there is probably a way to generalize this
|
||||
@receiver(post_save, sender=Recipe)
|
||||
def update_recipe_search_vector(sender, instance=None, created=False, **kwargs):
|
||||
if not instance:
|
||||
return
|
||||
|
||||
# needed to ensure search vector update doesn't trigger recursion
|
||||
if hasattr(instance, '_dirty'):
|
||||
return
|
||||
|
||||
language = DICTIONARY.get(translation.get_language(), 'simple')
|
||||
instance.name_search_vector = SearchVector('name__unaccent', weight='A', config=language)
|
||||
instance.desc_search_vector = SearchVector('description__unaccent', weight='C', config=language)
|
||||
|
||||
try:
|
||||
instance._dirty = True
|
||||
instance.save()
|
||||
finally:
|
||||
del instance._dirty
|
||||
|
||||
|
||||
@receiver(post_save, sender=Step)
|
||||
def update_step_search_vector(sender, instance=None, created=False, **kwargs):
|
||||
if not instance:
|
||||
return
|
||||
|
||||
# needed to ensure search vector update doesn't trigger recursion
|
||||
if hasattr(instance, '_dirty'):
|
||||
return
|
||||
|
||||
language = DICTIONARY.get(translation.get_language(), 'simple')
|
||||
instance.search_vector = SearchVector('instruction__unaccent', weight='B', config=language)
|
||||
|
||||
try:
|
||||
instance._dirty = True
|
||||
instance.save()
|
||||
finally:
|
||||
del instance._dirty
|
13
cookbook/static/css/app.min.css
vendored
13
cookbook/static/css/app.min.css
vendored
@ -1127,3 +1127,16 @@
|
||||
color: #000;
|
||||
background-color: #fff;
|
||||
}
|
||||
|
||||
@media (min-width: 992px) {
|
||||
.dropdown-menu-center {
|
||||
right: auto;
|
||||
left: 65%;
|
||||
-webkit-transform: translate(-65%, 0);
|
||||
-o-transform: translate(-65%, 0);
|
||||
transform: translate(-65%, 0);
|
||||
}
|
||||
.dropdown-menu-center-large {
|
||||
min-width: 28rem;
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +0,0 @@
|
||||
$(document).ready(function () {
|
||||
$('.emojiwidget').emojioneArea();
|
||||
});
|
File diff suppressed because one or more lines are too long
@ -1,560 +0,0 @@
|
||||
.dropdown-menu.textcomplete-dropdown[data-strategy="emojionearea"] {
|
||||
position: absolute;
|
||||
z-index: 1000;
|
||||
min-width: 160px;
|
||||
padding: 5px 0;
|
||||
margin: 2px 0 0;
|
||||
font-size: 14px;
|
||||
text-align: left;
|
||||
list-style: none;
|
||||
background-color: #fff;
|
||||
-webkit-background-clip: padding-box;
|
||||
background-clip: padding-box;
|
||||
border: 1px solid #ccc;
|
||||
border: 1px solid rgba(0, 0, 0, 0.15);
|
||||
-moz-border-radius: 4px;
|
||||
-webkit-border-radius: 4px;
|
||||
border-radius: 4px;
|
||||
-moz-box-shadow: 0 6px 12px rgba(0, 0, 0, 0.175);
|
||||
-webkit-box-shadow: 0 6px 12px rgba(0, 0, 0, 0.175);
|
||||
box-shadow: 0 6px 12px rgba(0, 0, 0, 0.175); }
|
||||
.dropdown-menu.textcomplete-dropdown[data-strategy="emojionearea"] li.textcomplete-item {
|
||||
font-size: 14px;
|
||||
padding: 1px 3px;
|
||||
border: 0; }
|
||||
.dropdown-menu.textcomplete-dropdown[data-strategy="emojionearea"] li.textcomplete-item a {
|
||||
text-decoration: none;
|
||||
display: block;
|
||||
height: 100%;
|
||||
line-height: 1.8em;
|
||||
padding: 0 1.54em 0 .615em;
|
||||
color: #4f4f4f; }
|
||||
.dropdown-menu.textcomplete-dropdown[data-strategy="emojionearea"] li.textcomplete-item:hover, .dropdown-menu.textcomplete-dropdown[data-strategy="emojionearea"] li.textcomplete-item.active {
|
||||
background-color: #e4e4e4; }
|
||||
.dropdown-menu.textcomplete-dropdown[data-strategy="emojionearea"] li.textcomplete-item:hover a, .dropdown-menu.textcomplete-dropdown[data-strategy="emojionearea"] li.textcomplete-item.active a {
|
||||
color: #333; }
|
||||
.dropdown-menu.textcomplete-dropdown[data-strategy="emojionearea"] li.textcomplete-item .emojioneemoji {
|
||||
font-size: inherit;
|
||||
height: 2ex;
|
||||
width: 2.1ex;
|
||||
min-height: 20px;
|
||||
min-width: 20px;
|
||||
display: inline-block;
|
||||
margin: 0 5px .2ex 0;
|
||||
line-height: normal;
|
||||
vertical-align: middle;
|
||||
max-width: 100%;
|
||||
top: 0; }
|
||||
|
||||
.emojionearea-text [class*=emojione-], .emojionearea-text .emojioneemoji {
|
||||
font-size: inherit;
|
||||
height: 2ex;
|
||||
width: 2.1ex;
|
||||
min-height: 20px;
|
||||
min-width: 20px;
|
||||
display: inline-block;
|
||||
margin: -.2ex .15em .2ex;
|
||||
line-height: normal;
|
||||
vertical-align: middle;
|
||||
max-width: 100%;
|
||||
top: 0; }
|
||||
|
||||
.emojionearea, .emojionearea * {
|
||||
-moz-box-sizing: border-box;
|
||||
-webkit-box-sizing: border-box;
|
||||
box-sizing: border-box; }
|
||||
.emojionearea.emojionearea-disable {
|
||||
position: relative;
|
||||
background-color: #eee;
|
||||
-moz-user-select: -moz-none;
|
||||
-ms-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
user-select: none; }
|
||||
.emojionearea.emojionearea-disable:before {
|
||||
content: "";
|
||||
display: block;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
z-index: 1;
|
||||
opacity: 0.3;
|
||||
position: absolute;
|
||||
background-color: #eee; }
|
||||
.emojionearea, .emojionearea.form-control {
|
||||
display: block;
|
||||
position: relative !important;
|
||||
width: 100%;
|
||||
height: auto;
|
||||
padding: 0;
|
||||
font-size: 14px;
|
||||
border: 0;
|
||||
background-color: #FFFFFF;
|
||||
border: 1px solid #CCCCCC;
|
||||
-moz-border-radius: 3px;
|
||||
-webkit-border-radius: 3px;
|
||||
border-radius: 3px;
|
||||
-moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
||||
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
||||
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
||||
-moz-transition: border-color 0.15s ease-in-out, -moz-box-shadow 0.15s ease-in-out;
|
||||
-o-transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out;
|
||||
-webkit-transition: border-color 0.15s ease-in-out, -webkit-box-shadow 0.15s ease-in-out;
|
||||
transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out; }
|
||||
.emojionearea.focused {
|
||||
border-color: #66AFE9;
|
||||
outline: 0;
|
||||
-moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(102, 175, 233, 0.6);
|
||||
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(102, 175, 233, 0.6);
|
||||
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(102, 175, 233, 0.6); }
|
||||
.emojionearea .emojionearea-editor {
|
||||
display: block;
|
||||
height: auto;
|
||||
min-height: 8em;
|
||||
max-height: 15em;
|
||||
overflow: auto;
|
||||
padding: 6px 24px 6px 12px;
|
||||
line-height: 1.42857143;
|
||||
font-size: inherit;
|
||||
color: #555555;
|
||||
background-color: transparent;
|
||||
border: 0;
|
||||
cursor: text;
|
||||
margin-right: 1px;
|
||||
-moz-border-radius: 0;
|
||||
-webkit-border-radius: 0;
|
||||
border-radius: 0;
|
||||
-moz-box-shadow: none;
|
||||
-webkit-box-shadow: none;
|
||||
box-shadow: none; }
|
||||
.emojionearea .emojionearea-editor:empty:before {
|
||||
content: attr(placeholder);
|
||||
display: block;
|
||||
color: #BBBBBB; }
|
||||
.emojionearea .emojionearea-editor:focus {
|
||||
border: 0;
|
||||
outline: 0;
|
||||
-moz-box-shadow: none;
|
||||
-webkit-box-shadow: none;
|
||||
box-shadow: none; }
|
||||
.emojionearea .emojionearea-editor [class*=emojione-], .emojionearea .emojionearea-editor .emojioneemoji {
|
||||
font-size: inherit;
|
||||
height: 2ex;
|
||||
width: 2.1ex;
|
||||
min-height: 20px;
|
||||
min-width: 20px;
|
||||
display: inline-block;
|
||||
margin: -.2ex .15em .2ex;
|
||||
line-height: normal;
|
||||
vertical-align: middle;
|
||||
max-width: 100%;
|
||||
top: 0; }
|
||||
.emojionearea.emojionearea-inline {
|
||||
height: 34px; }
|
||||
.emojionearea.emojionearea-inline > .emojionearea-editor {
|
||||
height: 32px;
|
||||
min-height: 20px;
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 12px;
|
||||
right: 24px;
|
||||
padding: 6px 0; }
|
||||
.emojionearea.emojionearea-inline > .emojionearea-button {
|
||||
top: 4px; }
|
||||
.emojionearea .emojionearea-button {
|
||||
z-index: 5;
|
||||
position: absolute;
|
||||
right: 3px;
|
||||
top: 3px;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
opacity: 0.6;
|
||||
cursor: pointer;
|
||||
-moz-transition: opacity 300ms ease-in-out;
|
||||
-o-transition: opacity 300ms ease-in-out;
|
||||
-webkit-transition: opacity 300ms ease-in-out;
|
||||
transition: opacity 300ms ease-in-out; }
|
||||
.emojionearea .emojionearea-button:hover {
|
||||
opacity: 1; }
|
||||
.emojionearea .emojionearea-button > div {
|
||||
display: block;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
position: absolute;
|
||||
-moz-transition: all 400ms ease-in-out;
|
||||
-o-transition: all 400ms ease-in-out;
|
||||
-webkit-transition: all 400ms ease-in-out;
|
||||
transition: all 400ms ease-in-out; }
|
||||
.emojionearea .emojionearea-button > div.emojionearea-button-open {
|
||||
background-position: 0 -24px;
|
||||
filter: progid:DXImageTransform.Microsoft.Alpha(enabled=false);
|
||||
opacity: 1; }
|
||||
.emojionearea .emojionearea-button > div.emojionearea-button-close {
|
||||
background-position: 0 0;
|
||||
-webkit-transform: rotate(-45deg);
|
||||
-o-transform: rotate(-45deg);
|
||||
transform: rotate(-45deg);
|
||||
filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=0);
|
||||
opacity: 0; }
|
||||
.emojionearea .emojionearea-button.active > div.emojionearea-button-open {
|
||||
-webkit-transform: rotate(45deg);
|
||||
-o-transform: rotate(45deg);
|
||||
transform: rotate(45deg);
|
||||
filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=0);
|
||||
opacity: 0; }
|
||||
.emojionearea .emojionearea-button.active > div.emojionearea-button-close {
|
||||
-webkit-transform: rotate(0deg);
|
||||
-o-transform: rotate(0deg);
|
||||
transform: rotate(0deg);
|
||||
filter: progid:DXImageTransform.Microsoft.Alpha(enabled=false);
|
||||
opacity: 1; }
|
||||
.emojionearea .emojionearea-picker {
|
||||
background: #FFFFFF;
|
||||
position: absolute;
|
||||
-moz-box-shadow: 0 1px 5px rgba(0, 0, 0, 0.32);
|
||||
-webkit-box-shadow: 0 1px 5px rgba(0, 0, 0, 0.32);
|
||||
box-shadow: 0 1px 5px rgba(0, 0, 0, 0.32);
|
||||
-moz-border-radius: 5px;
|
||||
-webkit-border-radius: 5px;
|
||||
border-radius: 5px;
|
||||
height: 276px;
|
||||
width: 316px;
|
||||
top: -15px;
|
||||
right: -15px;
|
||||
z-index: 90;
|
||||
-moz-transition: all 0.25s ease-in-out;
|
||||
-o-transition: all 0.25s ease-in-out;
|
||||
-webkit-transition: all 0.25s ease-in-out;
|
||||
transition: all 0.25s ease-in-out;
|
||||
filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=0);
|
||||
opacity: 0;
|
||||
-moz-user-select: -moz-none;
|
||||
-ms-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
user-select: none; }
|
||||
.emojionearea .emojionearea-picker.hidden {
|
||||
display: none; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-wrapper {
|
||||
position: relative;
|
||||
height: 276px;
|
||||
width: 316px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-wrapper:after {
|
||||
content: "";
|
||||
display: block;
|
||||
position: absolute;
|
||||
background-repeat: no-repeat;
|
||||
z-index: 91; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-filters {
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
z-index: 95; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-filters {
|
||||
background: #F5F7F9;
|
||||
padding: 0 0 0 7px;
|
||||
height: 40px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-filters .emojionearea-filter {
|
||||
display: block;
|
||||
float: left;
|
||||
height: 40px;
|
||||
width: 32px;
|
||||
filter: inherit;
|
||||
padding: 7px 1px 0;
|
||||
cursor: pointer;
|
||||
-webkit-filter: grayscale(1);
|
||||
filter: grayscale(1); }
|
||||
.emojionearea .emojionearea-picker .emojionearea-filters .emojionearea-filter.active {
|
||||
background: #fff; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-filters .emojionearea-filter.active, .emojionearea .emojionearea-picker .emojionearea-filters .emojionearea-filter:hover {
|
||||
-webkit-filter: grayscale(0);
|
||||
filter: grayscale(0); }
|
||||
.emojionearea .emojionearea-picker .emojionearea-filters .emojionearea-filter > i {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
top: 0; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-filters .emojionearea-filter > img {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
margin: 0 3px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-search-panel {
|
||||
height: 30px;
|
||||
position: absolute;
|
||||
z-index: 95;
|
||||
top: 40px;
|
||||
left: 0;
|
||||
right: 0;
|
||||
padding: 5px 0 5px 8px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-search-panel .emojionearea-tones {
|
||||
float: right;
|
||||
margin-right: 10px;
|
||||
margin-top: -1px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones-panel .emojionearea-tones {
|
||||
position: absolute;
|
||||
top: 4px;
|
||||
left: 171px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-search {
|
||||
float: left;
|
||||
padding: 0;
|
||||
height: 20px;
|
||||
width: 160px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-search > input {
|
||||
outline: none;
|
||||
width: 160px;
|
||||
min-width: 160px;
|
||||
height: 20px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones {
|
||||
padding: 0;
|
||||
width: 120px;
|
||||
height: 20px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone {
|
||||
display: inline-block;
|
||||
padding: 0;
|
||||
border: 0;
|
||||
vertical-align: middle;
|
||||
outline: none;
|
||||
background: transparent;
|
||||
cursor: pointer;
|
||||
position: relative; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-0, .emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-0:after {
|
||||
background-color: #ffcf3e; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-1, .emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-1:after {
|
||||
background-color: #fae3c5; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-2, .emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-2:after {
|
||||
background-color: #e2cfa5; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-3, .emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-3:after {
|
||||
background-color: #daa478; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-4, .emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-4:after {
|
||||
background-color: #a78058; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-5, .emojionearea .emojionearea-picker .emojionearea-tones > .btn-tone.btn-tone-5:after {
|
||||
background-color: #5e4d43; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-bullet > .btn-tone, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-square > .btn-tone {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
margin: 0;
|
||||
background-color: transparent; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-bullet > .btn-tone:after, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-square > .btn-tone:after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
display: block;
|
||||
top: 4px;
|
||||
left: 4px;
|
||||
width: 12px;
|
||||
height: 12px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-bullet > .btn-tone.active:after, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-square > .btn-tone.active:after {
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 20px;
|
||||
height: 20px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-radio > .btn-tone, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-checkbox > .btn-tone {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
margin: 0px 2px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-radio > .btn-tone.active:after, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-checkbox > .btn-tone.active:after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
display: block;
|
||||
background-color: transparent;
|
||||
border: 2px solid #fff;
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
top: 2px;
|
||||
left: 2px;
|
||||
box-sizing: initial; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-bullet > .btn-tone, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-bullet > .btn-tone:after, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-radio > .btn-tone, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-radio > .btn-tone:after {
|
||||
-moz-border-radius: 100%;
|
||||
-webkit-border-radius: 100%;
|
||||
border-radius: 100%; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-square > .btn-tone, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-square > .btn-tone:after, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-checkbox > .btn-tone, .emojionearea .emojionearea-picker .emojionearea-tones.emojionearea-tones-checkbox > .btn-tone:after {
|
||||
-moz-border-radius: 1px;
|
||||
-webkit-border-radius: 1px;
|
||||
border-radius: 1px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area {
|
||||
height: 236px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-search-panel + .emojionearea-scroll-area {
|
||||
height: 206px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area {
|
||||
overflow: auto;
|
||||
overflow-x: hidden;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
padding: 0 0 5px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-emojis-list {
|
||||
z-index: 1; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-category-title {
|
||||
display: block;
|
||||
font-family: Arial, 'Helvetica Neue', Helvetica, sans-serif;
|
||||
font-size: 13px;
|
||||
font-weight: normal;
|
||||
color: #b2b2b2;
|
||||
background: #FFFFFF;
|
||||
line-height: 20px;
|
||||
margin: 0;
|
||||
padding: 7px 0 5px 6px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-category-title:after, .emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-category-title:before {
|
||||
content: " ";
|
||||
display: block;
|
||||
clear: both; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-category-block {
|
||||
padding: 0 0 0 7px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-category-block > .emojionearea-category {
|
||||
padding: 0 !important; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-category-block > .emojionearea-category:after, .emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-category-block > .emojionearea-category:before {
|
||||
content: " ";
|
||||
display: block;
|
||||
clear: both; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-category-block:after, .emojionearea .emojionearea-picker .emojionearea-scroll-area .emojionearea-category-block:before {
|
||||
content: " ";
|
||||
display: block;
|
||||
clear: both; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area [class*=emojione-] {
|
||||
-moz-box-sizing: content-box;
|
||||
-webkit-box-sizing: content-box;
|
||||
box-sizing: content-box;
|
||||
margin: 0;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
top: 0; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojibtn {
|
||||
-moz-box-sizing: content-box;
|
||||
-webkit-box-sizing: content-box;
|
||||
box-sizing: content-box;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
float: left;
|
||||
display: block;
|
||||
margin: 1px;
|
||||
padding: 3px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojibtn:hover {
|
||||
-moz-border-radius: 4px;
|
||||
-webkit-border-radius: 4px;
|
||||
border-radius: 4px;
|
||||
background-color: #e4e4e4;
|
||||
cursor: pointer; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojibtn i, .emojionearea .emojionearea-picker .emojionearea-scroll-area .emojibtn img {
|
||||
float: left;
|
||||
display: block;
|
||||
width: 24px;
|
||||
height: 24px; }
|
||||
.emojionearea .emojionearea-picker .emojionearea-scroll-area .emojibtn img.lazy-emoji {
|
||||
filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=0);
|
||||
opacity: 0; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-filters-position-top .emojionearea-filters {
|
||||
top: 0;
|
||||
-moz-border-radius-topleft: 5px;
|
||||
-webkit-border-top-left-radius: 5px;
|
||||
border-top-left-radius: 5px;
|
||||
-moz-border-radius-topright: 5px;
|
||||
-webkit-border-top-right-radius: 5px;
|
||||
border-top-right-radius: 5px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-filters-position-top.emojionearea-search-position-top .emojionearea-scroll-area {
|
||||
bottom: 0; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-filters-position-top.emojionearea-search-position-bottom .emojionearea-scroll-area {
|
||||
top: 40px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-filters-position-top.emojionearea-search-position-bottom .emojionearea-search-panel {
|
||||
top: initial;
|
||||
bottom: 0; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-filters-position-bottom .emojionearea-filters {
|
||||
bottom: 0;
|
||||
-moz-border-radius-bottomleft: 5px;
|
||||
-webkit-border-bottom-left-radius: 5px;
|
||||
border-bottom-left-radius: 5px;
|
||||
-moz-border-radius-bottomright: 5px;
|
||||
-webkit-border-bottom-right-radius: 5px;
|
||||
border-bottom-right-radius: 5px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-filters-position-bottom.emojionearea-search-position-bottom .emojionearea-scroll-area {
|
||||
top: 0; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-filters-position-bottom.emojionearea-search-position-bottom .emojionearea-search-panel {
|
||||
top: initial;
|
||||
bottom: 40px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-filters-position-bottom.emojionearea-search-position-top .emojionearea-scroll-area {
|
||||
top: initial;
|
||||
bottom: 40px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-filters-position-bottom.emojionearea-search-position-top .emojionearea-search-panel {
|
||||
top: 0; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-picker-position-top {
|
||||
margin-top: -286px;
|
||||
right: -14px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-picker-position-top .emojionearea-wrapper:after {
|
||||
width: 19px;
|
||||
height: 10px;
|
||||
background-position: -2px -49px;
|
||||
bottom: -10px;
|
||||
right: 20px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-picker-position-top.emojionearea-filters-position-bottom .emojionearea-wrapper:after {
|
||||
background-position: -2px -80px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-picker-position-left, .emojionearea .emojionearea-picker.emojionearea-picker-position-right {
|
||||
margin-right: -326px;
|
||||
top: -8px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-picker-position-left .emojionearea-wrapper:after, .emojionearea .emojionearea-picker.emojionearea-picker-position-right .emojionearea-wrapper:after {
|
||||
width: 10px;
|
||||
height: 19px;
|
||||
background-position: 0px -60px;
|
||||
top: 13px;
|
||||
left: -10px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-picker-position-left.emojionearea-filters-position-bottom .emojionearea-wrapper:after, .emojionearea .emojionearea-picker.emojionearea-picker-position-right.emojionearea-filters-position-bottom .emojionearea-wrapper:after {
|
||||
background-position: right -60px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-picker-position-bottom {
|
||||
margin-top: 10px;
|
||||
right: -14px;
|
||||
top: 47px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-picker-position-bottom .emojionearea-wrapper:after {
|
||||
width: 19px;
|
||||
height: 10px;
|
||||
background-position: -2px -100px;
|
||||
top: -10px;
|
||||
right: 20px; }
|
||||
.emojionearea .emojionearea-picker.emojionearea-picker-position-bottom.emojionearea-filters-position-bottom .emojionearea-wrapper:after {
|
||||
background-position: -2px -90px; }
|
||||
.emojionearea .emojionearea-button.active + .emojionearea-picker {
|
||||
filter: progid:DXImageTransform.Microsoft.Alpha(enabled=false);
|
||||
opacity: 1; }
|
||||
.emojionearea .emojionearea-button.active + .emojionearea-picker-position-top {
|
||||
margin-top: -269px; }
|
||||
.emojionearea .emojionearea-button.active + .emojionearea-picker-position-left,
|
||||
.emojionearea .emojionearea-button.active + .emojionearea-picker-position-right {
|
||||
margin-right: -309px; }
|
||||
.emojionearea .emojionearea-button.active + .emojionearea-picker-position-bottom {
|
||||
margin-top: -7px; }
|
||||
.emojionearea.emojionearea-standalone {
|
||||
display: inline-block;
|
||||
width: auto;
|
||||
box-shadow: none; }
|
||||
.emojionearea.emojionearea-standalone .emojionearea-editor {
|
||||
min-height: 33px;
|
||||
position: relative;
|
||||
padding: 6px 42px 6px 6px; }
|
||||
.emojionearea.emojionearea-standalone .emojionearea-editor::before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 4px;
|
||||
left: 50%;
|
||||
bottom: 4px;
|
||||
border-left: 1px solid #e6e6e6; }
|
||||
.emojionearea.emojionearea-standalone .emojionearea-editor.has-placeholder {
|
||||
background-repeat: no-repeat;
|
||||
background-position: 20px 4px; }
|
||||
.emojionearea.emojionearea-standalone .emojionearea-editor.has-placeholder .emojioneemoji {
|
||||
opacity: 0.4; }
|
||||
.emojionearea.emojionearea-standalone .emojionearea-button {
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
width: auto;
|
||||
height: auto; }
|
||||
.emojionearea.emojionearea-standalone .emojionearea-button > div {
|
||||
right: 6px;
|
||||
top: 5px; }
|
||||
.emojionearea.emojionearea-standalone .emojionearea-picker.emojionearea-picker-position-bottom .emojionearea-wrapper:after, .emojionearea.emojionearea-standalone .emojionearea-picker.emojionearea-picker-position-top .emojionearea-wrapper:after {
|
||||
right: 23px; }
|
||||
.emojionearea.emojionearea-standalone .emojionearea-picker.emojionearea-picker-position-left .emojionearea-wrapper:after, .emojionearea.emojionearea-standalone .emojionearea-picker.emojionearea-picker-position-right .emojionearea-wrapper:after {
|
||||
top: 15px; }
|
||||
|
||||
.emojionearea .emojionearea-button > div, .emojionearea .emojionearea-picker .emojionearea-wrapper:after {
|
||||
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABcAAABuCAYAAADMB4ipAAAHfElEQVRo3u1XS1NT2Rb+9uOcQF4YlAJzLymFUHaLrdxKULvEUNpdTnRqD532f+AHMLMc94gqR1Zbt8rBnUh3YXipPGKwRDoWgXvrYiFUlEdIkPPYZ/dAkwox5yQCVt/bzRrBPnt9e+211/etFeDQDu3ArL+/X37OeqmRWoH7+vpItfWawStF1tfXR+zW9xW5ne0p8loOcAKuCdwpRft60C8a+X5zTvebCqcAvmidf1GGHtqhHdpf1qqKzsrKipyensbi4iKWl5cBAMFgEG1tbYhGo2hpadlbmxseHpaDg4MAgI6ODng8HgBAPp/H/Pw8AODatWvo7e2tvUHrui7v3r2L+fl5XL58GVeuXIHH49m1N5/Py0ePHmF0dBQdHR24desWVFXdtYdXAn/48CHm5+dx8+ZNRKPRigEUDpuenpb3799H4YaOnWh5eVmOj48jFoshGo0STdPkwMCAXF5elqV7BgYGpKZpMhqNklgshrGxMbx580Y6gicSCTDGEIvFAADpdBqpVArJZLK4J5lMIpVKIZ1OAwBisRgYY0gkEs6Rp1IphMNh+Hw+AgCGYQAANE0r7in8Xfjm8/lIOBzGq1evnMHX19fR1NRU/D8UCoFzjnA4XFwLh8PgnCMUChXXmpqakM1mUfVBS62xsZHk83lZWi1nz579ZA0AhBDO4A0NDchkMsWSJIRAURRiVy26rktVVUkmk0EgEHAGP3XqFKamppDP56Vpmrhz5w5u374t/X4/OP+w3TRNZLNZ6LoO0zSRz+dlf38/Ll686Jzz8+fPQwiBeDwOt9tNrl+/jkwmU6yaQpVkMhncuHEDbrebxONxCCEQiUScIw8Gg+TBgwdyZGQEyWRSdnV1kVQqJYeGhrC6ugrGGEKhEHp7e3Hy5EmSTCblvXv30NPTg2AwSA6M/vF4HCMjI7b0/yzh8vv9AIBsNrt34aokuQsLC7skt729varkHtqftUFf++FHsrq0QN3eBvp68Tfvf9Mv12oFCYU7G//e9nVuO7dpNbe2W4M//yQr0p8yRvyBo1Zr++lwLcCt7afD/sBRizJGavrB1dDYYh47Htrq+Kb7jBNwxzfdZ44dD201NLaYVUkU7ozQpuAJBkARwnRZpunN5zaa5hJjiXLH05GeiMd7JEM5zzHGNQBGZvk/Iv0yYVWMvK0zKk1Dl6ahW5RQobjqdjy+wEZn9PKF0n2d0csXPL7AhuKq26GECtPQLdPQZVtn1LlB69p7yRVVSEiDEGJwRd12e4+8PR3piRQidnuPvOWKuk0IMSSkwRVV6Np7WVVbSqvGsgSnlKkAFNPQXdrOtuKqcxtcUTUAhmUJnVJmlleJo3CVHmAaOlPUOmYJkxFKibQsSRkXhr4juKIKO2BHVSwcoLrqCVdUYho6K3YYRRWmoUtdey/tgKtK7rUffiQAsLq08MnbNLe2WwBgB/zHzueFyD8nwlIfbvdx8eU0WV1aKD1cVAMs9+F2j9gUPEEKemEJIe3AnXy4XfkBoNKSZHNthWfX31EA69VKttyHVyIOY1wRwmS6tqNsrr31vXo5k/bUu4gT2cp9lhbm0rzCJpeUUrE0vS63+c7/6uXMbDUWl/ssLczNFrVFddUT09AZpUy1LKvO0DVfPrfR9HxqfNbuEe185l9MFX3o6tIC5YpKFLWOfdQQ93Zu49j0+FDCDtjOp1yaOQCYhs4Y40wI05XfWj8yPT40Ua2ey33mEmMTtp2IUEq0nW3FKeJPGPjRp1Iz2QUuLUu66txG9NLVSK3gBZ+C1lcE54oqKOOCK6rm8QU2unu+u1ANuNynvFsBAG1ubbdMQ5eGviMAFDuP0w3sfMpvQEtb24fOQncU1bXl8R7JnOu+ZNv97XxKJwY6+PNPsrm13drObVqUMlMIU5OWpVHOc96Go5lTnV2fzC/VfAozD7HTCa6olBBa1Imlhbmq2lLuQ5xaW6nCPfnln0Yt7bDUhzhps8cfKH5//uTXmvS81OeLdqI/ZoROzSZrHqG/OvOPzxuhK5VgJTvV2bW3EdqJRABwrvvS/kfoSkoZvXT1YEbociHr7vnuYEfogpBFL109HKH/h0fomnXg3Lff79r7/MmvVbWG7gX4QObzc99+Tz7mHKah05KcW6ahQ9feS6cbMCdgt7eBWJagjCuUAC5tZzuouuo0Spm0hElc9R4cbf4bVl8v1p6WUmCuqEwIs34ruxaeeTy4uJVd67As08UVlVmWoG5vA7FLG3WMmHEupVTyW+vh2cn4DADMTsaTuc21LiGEhzHOnQ6gNtMrJSBMCKHkNt999WLi0S7hejEZH81n174WpukiIMw0dKq66p3Bw50RwhUVXFGJKUy28Xal48VkfKrSlWenhsc23q2cEB9SR7iiItwZIbbgHn8AlDFCCMW7laXjqZnHjkNpaubJzNuVpWZCKChjxOMPVH/QlaW0f/G3ZLqWWl6ce/bvlddp7yFD/w8Z+njoX1+GoZMjgzMAMDkyeLAMnRh+uKveJ0YGD4ahEyODFRk6OfrL/hj67GnckaHPng7vjaGzyYmaGDr77KktQ38H8tqx8Wja+WIAAAAASUVORK5CYII=') !important; }
|
||||
|
||||
.emojionearea.emojionearea-standalone .emojionearea-editor.has-placeholder {
|
||||
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAwAAAAMAQMAAABsu86kAAAABlBMVEUAAAC/v79T5hyIAAAAAXRSTlMAQObYZgAAABNJREFUCNdjYGNgQEb/P4AQqiAASiUEG6Vit44AAAAASUVORK5CYII=') !important; }
|
||||
|
||||
/*# sourceMappingURL=emojionearea.css.map */
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user