Merge branch 'master' into fix/pyproject

This commit is contained in:
Roland Geider
2024-01-24 14:24:20 +01:00
21 changed files with 362 additions and 83 deletions

View File

@@ -19,14 +19,14 @@ jobs:
strategy:
matrix:
#TODO: pypy3 has problems compiling lxml
python-version: [ '3.9', '3.10', '3.11', '3.12' ]
python-version: [ '3.10', '3.11', '3.12' ]
name: CI job (python ${{ matrix.python-version }})
steps:
- uses: actions/checkout@v4
- name: Cache dependencies
uses: actions/cache@v3.3.1
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
@@ -46,6 +46,7 @@ jobs:
run: |
pip install --upgrade pip
pip install wheel coverage
pip install -r requirements_dev.txt
pip install -r requirements_prod.txt
pip install -e .

View File

@@ -49,7 +49,7 @@ jobs:
install: true
- name: Cache Docker layers
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: multi-buildx-${{ matrix.platform }}-${{ hashFiles('requirements*.txt') }}

View File

@@ -75,6 +75,7 @@ Developers
* Gabriel Liss - https://github.com/gabeliss
* Alexandra Rhodes - https://github.com/arhodes130
* Jayanth Bontha - https://github.com/JayanthBontha
* Ethan Winters - https://github.com/ebwinters
Translators
-----------

View File

@@ -24,10 +24,16 @@ fi
wger bootstrap
# Collect static files
if [ "$DJANGO_CLEAR_STATIC_FIRST" == "False" ]; then
clear=""
else
clear="--clear"
fi
if [[ "$DJANGO_DEBUG" == "False" ]];
then
echo "Running in production mode, running collectstatic now"
python3 manage.py collectstatic --no-input --clear
python3 manage.py collectstatic --no-input $clear
fi
# Perform database migrations

View File

@@ -98,6 +98,7 @@ WGER_SETTINGS["MIN_ACCOUNT_AGE_TO_TRUST"] = env.int("MIN_ACCOUNT_AGE_TO_TRUST",
WGER_SETTINGS["SYNC_EXERCISES_CELERY"] = env.bool("SYNC_EXERCISES_CELERY", False)
WGER_SETTINGS["SYNC_EXERCISE_IMAGES_CELERY"] = env.bool("SYNC_EXERCISE_IMAGES_CELERY", False)
WGER_SETTINGS["SYNC_EXERCISE_VIDEOS_CELERY"] = env.bool("SYNC_EXERCISE_VIDEOS_CELERY", False)
WGER_SETTINGS["SYNC_INGREDIENTS_CELERY"] = env.bool("SYNC_INGREDIENTS_CELERY", False)
WGER_SETTINGS["USE_CELERY"] = env.bool("USE_CELERY", False)
# Cache

View File

@@ -10,7 +10,7 @@
},
"homepage": "https://github.com/wger-project/wger",
"dependencies": {
"Sortable": "RubaXa/Sortable#1.15.0",
"Sortable": "RubaXa/Sortable#1.15.2",
"bootstrap": "5.3.2",
"components-font-awesome": "5.9.0",
"d3": "^7.8.5",

View File

@@ -5,6 +5,7 @@
# Application
bleach[css]~=6.1
crispy-bootstrap5==2023.10
tqdm==4.66.1
# this is a fork of django-bootstrap-breadcrumbs
# we might need to think about migrating away from this completely
@@ -22,7 +23,7 @@ django_compressor~=4.4
django_extensions~=3.2
django-storages~=1.14
django-environ==0.11.2
drf-spectacular[sidecar]==0.27.0
drf-spectacular[sidecar]==0.27.1
easy-thumbnails==2.8.5
fontawesomefree~=6.5.1
icalendar==5.0.11

View File

@@ -12,7 +12,7 @@ wheel==0.42.0
pymongo==4.6.1
# for running the dummy entry generators
faker==22.2.0
faker==22.5.0
# Development packages
coverage==7.4.0

View File

@@ -58,6 +58,7 @@ from wger.manager.models import (
WorkoutLog,
)
from wger.utils.requests import (
get_all_paginated,
get_paginated,
wger_headers,
)
@@ -73,9 +74,7 @@ def sync_exercises(
print_fn('*** Synchronizing exercises...')
url = make_uri(EXERCISE_ENDPOINT, server_url=remote_url, query={'limit': 100})
result = get_paginated(url, headers=wger_headers())
for data in result:
for data in get_paginated(url, headers=wger_headers()):
uuid = data['uuid']
created = data['created']
@@ -166,8 +165,8 @@ def sync_languages(
print_fn('*** Synchronizing languages...')
headers = wger_headers()
url = make_uri(LANGUAGE_ENDPOINT, server_url=remote_url)
result = get_paginated(url, headers=headers)
for data in result:
for data in get_all_paginated(url, headers=headers):
short_name = data['short_name']
full_name = data['full_name']
@@ -190,8 +189,8 @@ def sync_licenses(
"""Synchronize the licenses from the remote server"""
print_fn('*** Synchronizing licenses...')
url = make_uri(LICENSE_ENDPOINT, server_url=remote_url)
result = get_paginated(url, headers=wger_headers())
for data in result:
for data in get_all_paginated(url, headers=wger_headers()):
short_name = data['short_name']
full_name = data['full_name']
license_url = data['url']
@@ -219,8 +218,8 @@ def sync_categories(
print_fn('*** Synchronizing categories...')
url = make_uri(CATEGORY_ENDPOINT, server_url=remote_url)
result = get_paginated(url, headers=wger_headers())
for data in result:
for data in get_all_paginated(url, headers=wger_headers()):
category_id = data['id']
category_name = data['name']
@@ -244,9 +243,8 @@ def sync_muscles(
print_fn('*** Synchronizing muscles...')
url = make_uri(MUSCLE_ENDPOINT, server_url=remote_url)
result = get_paginated(url, headers=wger_headers())
for data in result:
for data in get_all_paginated(url, headers=wger_headers()):
muscle_id = data['id']
muscle_name = data['name']
muscle_is_front = data['is_front']
@@ -280,9 +278,8 @@ def sync_equipment(
print_fn('*** Synchronizing equipment...')
url = make_uri(EQUIPMENT_ENDPOINT, server_url=remote_url)
result = get_paginated(url, headers=wger_headers())
for data in result:
for data in get_all_paginated(url, headers=wger_headers()):
equipment_id = data['id']
equipment_name = data['name']
@@ -311,9 +308,8 @@ def handle_deleted_entries(
print_fn('*** Deleting exercise data that was removed on the server...')
url = make_uri(DELETION_LOG_ENDPOINT, server_url=remote_url, query={'limit': 100})
result = get_paginated(url, headers=wger_headers())
for data in result:
for data in get_paginated(url, headers=wger_headers()):
uuid = data['uuid']
replaced_by_uuid = data['replaced_by']
model_type = data['model_type']
@@ -382,7 +378,6 @@ def download_exercise_images(
):
headers = wger_headers()
url = make_uri(IMAGE_ENDPOINT, server_url=remote_url)
result = get_paginated(url, headers=headers)
print_fn('*** Processing images ***')
@@ -396,7 +391,7 @@ def download_exercise_images(
if deleted:
print_fn(f'Deleted {deleted} images without associated image files')
for image_data in result:
for image_data in get_paginated(url, headers=headers):
image_uuid = image_data['uuid']
print_fn(f'Processing image {image_uuid}')
@@ -426,11 +421,10 @@ def download_exercise_videos(
):
headers = wger_headers()
url = make_uri(VIDEO_ENDPOINT, server_url=remote_url)
result = get_paginated(url, headers=headers)
print_fn('*** Processing videos ***')
for video_data in result:
for video_data in get_paginated(url, headers=headers):
video_uuid = video_data['uuid']
print_fn(f'Processing video {video_uuid}')

View File

@@ -22,15 +22,15 @@ msgstr ""
"Project-Id-Version: wger Workout Manager\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-07 17:25+0100\n"
"PO-Revision-Date: 2023-05-30 17:48+0000\n"
"Last-Translator: Célian <gallcelian@gmail.com>\n"
"PO-Revision-Date: 2024-01-20 02:00+0000\n"
"Last-Translator: Lucas Batier <lucas.batier@hotmail.fr>\n"
"Language-Team: French <https://hosted.weblate.org/projects/wger/web/fr/>\n"
"Language: fr\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=n > 1;\n"
"X-Generator: Weblate 4.18-dev\n"
"X-Generator: Weblate 5.4-dev\n"
#: config/models/gym_config.py:46 gym/templates/gym/list.html:53
msgid "Default gym"
@@ -481,7 +481,7 @@ msgstr "Interdit !"
#: core/templates/403.html:7
msgid "You are not allowed to acces this page"
msgstr ""
msgstr "Vous n'êtes pas autorisé à accéder à cette page"
#: core/templates/404.html:4
msgid "Page not found"

View File

@@ -1 +1,2 @@
IMAGE_ENDPOINT = "ingredient-image"
INGREDIENTS_ENDPOINT = "ingredient"

View File

@@ -0,0 +1,65 @@
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Django
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.management.base import (
BaseCommand,
CommandError,
)
from django.core.validators import URLValidator
# wger
from wger.exercises.sync import (
handle_deleted_entries,
sync_categories,
sync_equipment,
sync_exercises,
sync_languages,
sync_licenses,
sync_muscles,
)
from wger.nutrition.sync import sync_ingredients
class Command(BaseCommand):
"""
Synchronizes ingredient data from a wger instance to the local database
"""
remote_url = settings.WGER_SETTINGS['WGER_INSTANCE']
help = """Synchronizes ingredient data from a wger instance to the local database"""
def add_arguments(self, parser):
parser.add_argument(
'--remote-url',
action='store',
dest='remote_url',
default=settings.WGER_SETTINGS['WGER_INSTANCE'],
help=f'Remote URL to fetch the ingredients from (default: WGER_SETTINGS'
f'["WGER_INSTANCE"] - {settings.WGER_SETTINGS["WGER_INSTANCE"]})'
)
def handle(self, **options):
remote_url = options['remote_url']
try:
val = URLValidator()
val(remote_url)
self.remote_url = remote_url
except ValidationError:
raise CommandError('Please enter a valid URL')
sync_ingredients(self.stdout.write, self.remote_url, self.style.SUCCESS)

View File

@@ -25,19 +25,23 @@ from django.db import IntegrityError
import requests
# wger
from wger.nutrition.api.endpoints import IMAGE_ENDPOINT
from wger.nutrition.api.endpoints import (
IMAGE_ENDPOINT,
INGREDIENTS_ENDPOINT,
)
from wger.nutrition.models import (
Image,
Ingredient,
Source,
)
from wger.utils.constants import (
API_MAX_ITEMS,
CC_BY_SA_3_LICENSE_ID,
DOWNLOAD_INGREDIENT_OFF,
DOWNLOAD_INGREDIENT_WGER,
)
from wger.utils.requests import (
get_paginated_generator,
get_paginated,
wger_headers,
)
from wger.utils.url import make_uri
@@ -162,30 +166,69 @@ def download_ingredient_images(
headers = wger_headers()
url = make_uri(IMAGE_ENDPOINT, server_url=remote_url, query={'limit': 100})
print_fn('*** Processing images ***')
for result in get_paginated_generator(url, headers=headers):
print_fn('*** Processing ingredient images ***')
for image_data in get_paginated(url, headers=headers):
image_uuid = image_data['uuid']
print_fn(f'Processing image {image_uuid}')
for image_data in result:
image_uuid = image_data['uuid']
try:
ingredient = Ingredient.objects.get(uuid=image_data['ingredient_uuid'])
except Ingredient.DoesNotExist:
print_fn(' Remote ingredient not found in local DB, skipping...')
continue
print_fn(f'Processing image {image_uuid}')
if hasattr(ingredient, 'image'):
continue
try:
ingredient = Ingredient.objects.get(uuid=image_data['ingredient_uuid'])
except Ingredient.DoesNotExist:
print_fn(' Remote ingredient not found in local DB, skipping...')
continue
try:
Image.objects.get(uuid=image_uuid)
print_fn(' Image already present locally, skipping...')
continue
except Image.DoesNotExist:
print_fn(' Image not found in local DB, creating now...')
retrieved_image = requests.get(image_data['image'], headers=headers)
Image.from_json(ingredient, retrieved_image, image_data)
if hasattr(ingredient, 'image'):
continue
print_fn(style_fn(' successfully saved'))
try:
Image.objects.get(uuid=image_uuid)
print_fn(' Image already present locally, skipping...')
continue
except Image.DoesNotExist:
print_fn(' Image not found in local DB, creating now...')
retrieved_image = requests.get(image_data['image'], headers=headers)
Image.from_json(ingredient, retrieved_image, image_data)
print_fn(style_fn(' successfully saved'))
def sync_ingredients(
print_fn,
remote_url=settings.WGER_SETTINGS['WGER_INSTANCE'],
style_fn=lambda x: x,
):
"""Synchronize the ingredients from the remote server"""
print_fn('*** Synchronizing ingredients...')
url = make_uri(INGREDIENTS_ENDPOINT, server_url=remote_url, query={'limit': API_MAX_ITEMS})
for data in get_paginated(url, headers=wger_headers()):
uuid = data['uuid']
name = data['name']
ingredient, created = Ingredient.objects.update_or_create(
uuid=uuid,
defaults={
'name': name,
'code': data['code'],
'language_id': data['language'],
'created': data['created'],
'license_id': data['license'],
'license_object_url': data['license_object_url'],
'license_author': data['license_author_url'],
'license_author_url': data['license_author_url'],
'license_title': data['license_title'],
'license_derivative_source_url': data['license_derivative_source_url'],
'energy': data['energy'],
'carbohydrates': data['carbohydrates'],
'carbohydrates_sugar': data['carbohydrates_sugar'],
'fat': data['fat'],
'fat_saturated': data['fat_saturated'],
'protein': data['protein'],
'fibres': data['fibres'],
'sodium': data['sodium'],
},
)
print_fn(f"{'created' if created else 'updated'} ingredient {uuid} - {name}")
print_fn(style_fn('done!\n'))

View File

@@ -14,12 +14,20 @@
# Standard Library
import logging
import random
# Django
from django.conf import settings
# Third Party
from celery.schedules import crontab
# wger
from wger.celery_configuration import app
from wger.nutrition.sync import (
download_ingredient_images,
fetch_ingredient_image,
sync_ingredients,
)
@@ -44,3 +52,25 @@ def fetch_all_ingredient_images_task():
Returns the image if it is already present in the DB
"""
download_ingredient_images(logger.info)
@app.task
def sync_all_ingredients_task():
"""
Fetches the current ingredients from the default wger instance
"""
sync_ingredients(logger.info)
@app.on_after_finalize.connect
def setup_periodic_tasks(sender, **kwargs):
if settings.WGER_SETTINGS['SYNC_INGREDIENTS_CELERY']:
sender.add_periodic_task(
crontab(
hour=random.randint(0, 23),
minute=random.randint(0, 59),
day_of_week=random.randint(0, 6),
),
sync_all_ingredients_task.s(),
name='Sync exercises',
)

View File

@@ -0,0 +1,127 @@
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Standard Library
from decimal import Decimal
from unittest.mock import patch
from uuid import UUID
# wger
from wger.core.tests.base_testcase import WgerTestCase
from wger.nutrition.models import Ingredient
from wger.nutrition.sync import sync_ingredients
from wger.utils.requests import wger_headers
class MockIngredientResponse:
def __init__(self):
self.status_code = 200
self.content = b'1234'
# yapf: disable
@staticmethod
def json():
return {
"count": 2,
"next": None,
"previous": None,
"results": [
{
"id": 1,
"uuid": "7908c204-907f-4b1e-ad4e-f482e9769ade",
"code": "0013087245950",
"name": "Gâteau double chocolat",
"created": "2020-12-20T01:00:00+01:00",
"last_update": "2020-12-20T01:00:00+01:00",
"energy": 360,
"protein": "5.000",
"carbohydrates": "45.000",
"carbohydrates_sugar": "27.000",
"fat": "18.000",
"fat_saturated": "4.500",
"fibres": "2.000",
"sodium": "0.356",
"license": 5,
"license_title": " Gâteau double chocolat ",
"license_object_url": "",
"license_author": "Open Food Facts",
"license_author_url": "",
"license_derivative_source_url": "",
"language": 2
},
{
"id": 22634,
"uuid": "582f1b7f-a8bd-4951-9edd-247bc68b28f4",
"code": "3181238941963",
"name": "Maxi Hot Dog New York Style",
"created": "2020-12-20T01:00:00+01:00",
"last_update": "2020-12-20T01:00:00+01:00",
"energy": 256,
"protein": "11.000",
"carbohydrates": "27.000",
"carbohydrates_sugar": "5.600",
"fat": "11.000",
"fat_saturated": "4.600",
"fibres": None,
"sodium": "0.820",
"license": 5,
"license_title": " Maxi Hot Dog New York Style",
"license_object_url": "",
"license_author": "Open Food Facts",
"license_author_url": "",
"license_derivative_source_url": "",
"language": 3
},
]
}
# yapf: enable
class TestSyncMethods(WgerTestCase):
@patch('requests.get', return_value=MockIngredientResponse())
def test_ingredient_sync(self, mock_request):
# Arrange
ingredient = Ingredient.objects.get(pk=1)
self.assertEqual(Ingredient.objects.count(), 14)
self.assertEqual(ingredient.name, 'Test ingredient 1')
self.assertEqual(ingredient.energy, 176)
self.assertAlmostEqual(ingredient.protein, Decimal(25.63), 2)
self.assertEqual(ingredient.code, '1234567890987654321')
# Act
sync_ingredients(lambda x: x)
mock_request.assert_called_with(
'https://wger.de/api/v2/ingredient/?limit=999',
headers=wger_headers(),
)
# Assert
self.assertEqual(Ingredient.objects.count(), 15)
ingredient = Ingredient.objects.get(pk=1)
self.assertEqual(ingredient.name, 'Gâteau double chocolat')
self.assertEqual(ingredient.energy, 360)
self.assertAlmostEqual(ingredient.protein, Decimal(5), 2)
self.assertEqual(ingredient.code, '0013087245950')
self.assertEqual(ingredient.license.pk, 5)
self.assertEqual(ingredient.uuid, UUID('7908c204-907f-4b1e-ad4e-f482e9769ade'))
new_ingredient = Ingredient.objects.get(uuid='582f1b7f-a8bd-4951-9edd-247bc68b28f4')
self.assertEqual(new_ingredient.name, 'Maxi Hot Dog New York Style')
self.assertEqual(new_ingredient.energy, 256)
self.assertAlmostEqual(new_ingredient.protein, Decimal(11), 2)
self.assertEqual(new_ingredient.code, '3181238941963')

View File

@@ -536,6 +536,7 @@ WGER_SETTINGS = {
'SYNC_EXERCISES_CELERY': False,
'SYNC_EXERCISE_IMAGES_CELERY': False,
'SYNC_EXERCISE_VIDEOS_CELERY': False,
'SYNC_INGREDIENTS_CELERY': False,
'TWITTER': False,
'MASTODON': 'https://fosstodon.org/@wger',
'USE_CELERY': False,

View File

@@ -32,6 +32,7 @@ from django.utils.crypto import get_random_string
# Third Party
import requests
from invoke import task
from tqdm import tqdm
logger = logging.getLogger(__name__)
@@ -69,10 +70,8 @@ def start(context, address='localhost', port=8000, settings_path=None, extra_arg
@task(
help={
'settings-path': 'Path to settings file (absolute path). Leave empty for '
'default',
'database-path': 'Path to sqlite database (absolute path). Leave empty '
'for default'
'settings-path': 'Path to settings file (absolute path). Leave empty for default',
'database-path': 'Path to sqlite database (absolute path). Leave empty for default'
}
)
def bootstrap(context, settings_path=None, database_path=None):
@@ -103,12 +102,9 @@ def bootstrap(context, settings_path=None, database_path=None):
@task(
help={
'settings-path': 'Path to settings file (absolute path). Leave empty for '
'default',
'database-path': 'Path to sqlite database (absolute path). Leave empty '
'for default',
'database-type': 'Database type to use. Supported: sqlite3, postgresql. Default: '
'sqlite3',
'settings-path': 'Path to settings file (absolute path). Leave empty for default',
'database-path': 'Path to sqlite database (absolute path). Leave empty for default',
'database-type': 'Database type to use. Supported: sqlite3, postgresql. Default: sqlite3',
'key-length': 'Length of the generated secret key. Default: 50'
}
)
@@ -180,8 +176,7 @@ def create_settings(
settings_file.write(settings_content)
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for '
'default'})
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for default'})
def create_or_reset_admin(context, settings_path=None):
"""
Creates an admin user or resets the password for an existing one
@@ -206,8 +201,7 @@ def create_or_reset_admin(context, settings_path=None):
call_command("loaddata", path + "users.json")
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for '
'default'})
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for default'})
def migrate_db(context, settings_path=None):
"""
Run all database migrations
@@ -219,8 +213,7 @@ def migrate_db(context, settings_path=None):
call_command("migrate")
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for '
'default'})
@task(help={'settings-path': 'Path to settings file (absolute path). Leave empty for default'})
def load_fixtures(context, settings_path=None):
"""
Loads all fixtures
@@ -266,7 +259,7 @@ def load_online_fixtures(context, settings_path=None):
Downloads fixtures from server and installs them (at the moment only ingredients)
"""
# Find the path to the settings and setup the django environment
# Find the path to the settings and set up the django environment
setup_django_environment(settings_path)
# Prepare the download
@@ -275,15 +268,20 @@ def load_online_fixtures(context, settings_path=None):
print(f'Downloading fixture data from {url}...')
response = requests.get(url, stream=True)
total_size = int(response.headers.get("content-length", 0))
size = int(response.headers["content-length"]) / (1024 * 1024)
print(f'-> fixture size: {size:.3} MB')
# Save to temporary file and load the data
f = tempfile.NamedTemporaryFile(delete=False, suffix='.json.zip')
print(f'-> saving to temp file {f.name}')
f.write(response.content)
with tempfile.NamedTemporaryFile(delete=False, suffix='.json.zip') as f:
print(f'-> saving to temp file {f.name}')
with tqdm(total=total_size, unit='B', unit_scale=True, desc='Downloading') as pbar:
for data in response.iter_content(chunk_size=1024):
f.write(data)
pbar.update(len(data))
f.close()
call_command("loaddata", f.name)
print('Loading downloaded data, this may take a while...')
call_command("loaddata", f.name, '--verbosity=3')
print('-> removing temp file')
print('')
os.unlink(f.name)

View File

@@ -229,8 +229,7 @@ urlpatterns = i18n_patterns(
path(
'measurement/',
include(('wger.measurements.urls', 'measurements'), namespace='measurements')
),
path('email/', include(('wger.mailer.urls', 'email'), namespace='email')),
), path('email/', include(('wger.mailer.urls', 'email'), namespace='email')),
path('sitemap.xml', index, {'sitemaps': sitemaps}, name='sitemap'),
path(
'sitemap-<section>.xml',

View File

@@ -59,8 +59,16 @@ ENGLISH_SHORT_NAME = 'en'
# Possible values for ingredient image download
DOWNLOAD_INGREDIENT_WGER = 'WGER'
DOWNLOAD_INGREDIENT_OFF = 'OFF'
DOWNLOAD_INGREDIENT_OPTIONS = (DOWNLOAD_INGREDIENT_WGER, DOWNLOAD_INGREDIENT_OFF, None)
DOWNLOAD_INGREDIENT_NONE = 'None'
DOWNLOAD_INGREDIENT_OPTIONS = (
DOWNLOAD_INGREDIENT_WGER,
DOWNLOAD_INGREDIENT_OFF,
DOWNLOAD_INGREDIENT_NONE,
)
# OFF Api
OFF_SEARCH_PRODUCT_FOUND = 1
OFF_SEARCH_PRODUCT_NOT_FOUND = 0
# API
API_MAX_ITEMS = 999

View File

@@ -27,7 +27,7 @@ def wger_headers():
return {'User-agent': wger_user_agent()}
def get_paginated(url: str, headers=None):
def get_all_paginated(url: str, headers=None):
"""
Fetch all results from a paginated endpoint.
@@ -49,7 +49,7 @@ def get_paginated(url: str, headers=None):
return results
def get_paginated_generator(url: str, headers=None):
def get_paginated(url: str, headers=None):
"""
Generator that iterates over a paginated endpoint
@@ -62,7 +62,9 @@ def get_paginated_generator(url: str, headers=None):
while True:
response = requests.get(url, headers=headers).json()
yield response['results']
for result in response['results']:
yield result
url = response['next']
if not url:

View File

@@ -7,9 +7,10 @@
resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-2.0.54.tgz#d7999245f77c3fab5d84e7d32b8a6c20bfd1f072"
integrity sha512-D/PomKwNkDfSKD13DEVQT/pq2TUjN54c6uB341fEZanIzkjfGe7UaFuuaLZbpEiS5j7Wk2MUHAZqZIoECw29lg==
Sortable@RubaXa/Sortable#1.15.0:
version "1.15.0"
resolved "https://codeload.github.com/RubaXa/Sortable/tar.gz/babf6ab85fdfc5e13be1027827cecfbba2fb0d1e"
Sortable@RubaXa/Sortable#1.15.2:
version "1.15.2"
uid "07708fa1e7d9cf9d4563a7b1a09f7a47771522ed"
resolved "https://codeload.github.com/RubaXa/Sortable/tar.gz/07708fa1e7d9cf9d4563a7b1a09f7a47771522ed"
bootstrap@5.3.2:
version "5.3.2"