diff --git a/dev/data/backups/test_backup_2021-Apr-27.zip b/dev/data/backups/test_backup_2021-Apr-27.zip
deleted file mode 100644
index bac642b1..00000000
Binary files a/dev/data/backups/test_backup_2021-Apr-27.zip and /dev/null differ
diff --git a/docs/docs/documentation/getting-started/installation/postgres.md b/docs/docs/documentation/getting-started/installation/postgres.md
index 7fb33ba0..b3afe9c6 100644
--- a/docs/docs/documentation/getting-started/installation/postgres.md
+++ b/docs/docs/documentation/getting-started/installation/postgres.md
@@ -19,12 +19,12 @@ services:
environment:
# Set Frontend ENV Variables Here
- ALLOW_SIGNUP=true
- - API_URL=http://mealie-api:9000
+ - API_URL=http://mealie-api:9000 # (1)
restart: always
ports:
- - "9925:3000"
+ - "9925:3000" # (2)
volumes:
- - mealie-data:/app/data/
+ - mealie-data:/app/data/ # (3)
mealie-api:
image: hkotel/mealie:api-nightly
container_name: mealie-api
@@ -62,3 +62,9 @@ volumes:
driver: local
```
+
+
+1. Whoa whoa whoa, what is this nonsense? The API_URL is the URL the frontend container uses to proxy api requests to the backend server. In this example, the name `mealie-api` resolves to the `mealie-api` container which runs the API server on port 9000. This allows you to access the API without exposing an additional port on the host.
+
**Note** that both containers must be on the same docker-network for this to work.
+2. To access the mealie interface you only need to expose port 3000 on the mealie-frontend container. Here we expose port 9925 on the host, feel free to change this to any port you like.
+3. Mounting the data directory to the frontend is now required to access the images/assets directory. This can be mounted read-only. Internally the frontend containers runs a Caddy proxy server that serves the assets requested to reduce load on the backend API.
\ No newline at end of file
diff --git a/docs/docs/documentation/getting-started/installation/sqlite.md b/docs/docs/documentation/getting-started/installation/sqlite.md
index 4d383d66..5b9e3c9f 100644
--- a/docs/docs/documentation/getting-started/installation/sqlite.md
+++ b/docs/docs/documentation/getting-started/installation/sqlite.md
@@ -17,12 +17,12 @@ services:
environment:
# Set Frontend ENV Variables Here
- ALLOW_SIGNUP=true
- - API_URL=http://mealie-api:9000
+ - API_URL=http://mealie-api:9000 # (1)
restart: always
ports:
- - "9925:3000"
+ - "9925:3000" # (2)
volumes:
- - mealie-data:/app/data/
+ - mealie-data:/app/data/ # (3)
mealie-api:
image: hkotel/mealie:api-nightly
container_name: mealie-api
@@ -41,4 +41,11 @@ services:
volumes:
mealie-data:
driver: local
-```
\ No newline at end of file
+```
+
+
+
+1. Whoa whoa whoa, what is this nonsense? The API_URL is the URL the frontend container uses to proxy api requests to the backend server. In this example, the name `mealie-api` resolves to the `mealie-api` container which runs the API server on port 9000. This allows you to access the API without exposing an additional port on the host.
+
**Note** that both containers must be on the same docker-network for this to work.
+2. To access the mealie interface you only need to expose port 3000 on the mealie-frontend container. Here we expose port 9925 on the host, feel free to change this to any port you like.
+3. Mounting the data directory to the frontend is now required to access the images/assets directory. This can be mounted read-only. Internally the frontend containers runs a Caddy proxy server that serves the assets requested to reduce load on the backend API.
\ No newline at end of file
diff --git a/docs/docs/overrides/api.html b/docs/docs/overrides/api.html
index 1afd60fe..0f34d063 100644
--- a/docs/docs/overrides/api.html
+++ b/docs/docs/overrides/api.html
@@ -14,7 +14,7 @@
diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml
index 160f2572..85ee8018 100644
--- a/docs/mkdocs.yml
+++ b/docs/mkdocs.yml
@@ -18,6 +18,7 @@ theme:
name: Switch to light mode
custom_dir: docs/overrides
features:
+ - content.code.annotate
- navigation.top
# - navigation.instant
- navigation.expand
diff --git a/frontend/api/admin-api.ts b/frontend/api/admin-api.ts
index db978ca4..1b560634 100644
--- a/frontend/api/admin-api.ts
+++ b/frontend/api/admin-api.ts
@@ -2,26 +2,23 @@ import { AdminAboutAPI } from "./admin/admin-about";
import { AdminTaskAPI } from "./admin/admin-tasks";
import { AdminUsersApi } from "./admin/admin-users";
import { AdminGroupsApi } from "./admin/admin-groups";
+import { AdminBackupsApi } from "./admin/admin-backups";
import { ApiRequestInstance } from "~/types/api";
export class AdminAPI {
- private static instance: AdminAPI;
public about: AdminAboutAPI;
public serverTasks: AdminTaskAPI;
public users: AdminUsersApi;
public groups: AdminGroupsApi;
+ public backups: AdminBackupsApi;
constructor(requests: ApiRequestInstance) {
- if (AdminAPI.instance instanceof AdminAPI) {
- return AdminAPI.instance;
- }
-
this.about = new AdminAboutAPI(requests);
this.serverTasks = new AdminTaskAPI(requests);
this.users = new AdminUsersApi(requests);
this.groups = new AdminGroupsApi(requests);
+ this.backups = new AdminBackupsApi(requests);
Object.freeze(this);
- AdminAPI.instance = this;
}
}
diff --git a/frontend/api/admin/admin-backups.ts b/frontend/api/admin/admin-backups.ts
new file mode 100644
index 00000000..2ad34cde
--- /dev/null
+++ b/frontend/api/admin/admin-backups.ts
@@ -0,0 +1,33 @@
+import { BaseAPI } from "../_base";
+import { AllBackups } from "~/types/api-types/admin";
+import { ErrorResponse, FileTokenResponse, SuccessResponse } from "~/types/api-types/response";
+
+const prefix = "/api";
+
+const routes = {
+ base: `${prefix}/admin/backups`,
+ item: (name: string) => `${prefix}/admin/backups/${name}`,
+ restore: (name: string) => `${prefix}/admin/backups/${name}/restore`,
+};
+
+export class AdminBackupsApi extends BaseAPI {
+ async getAll() {
+ return await this.requests.get(routes.base);
+ }
+
+ async create() {
+ return await this.requests.post(routes.base, {});
+ }
+
+ async get(fileName: string) {
+ return await this.requests.get(routes.item(fileName));
+ }
+
+ async delete(fileName: string) {
+ return await this.requests.delete(routes.item(fileName));
+ }
+
+ async restore(fileName: string) {
+ return await this.requests.post(routes.restore(fileName), {});
+ }
+}
diff --git a/frontend/api/class-interfaces/utils.ts b/frontend/api/class-interfaces/utils.ts
index 4aa93d27..73b3b874 100644
--- a/frontend/api/class-interfaces/utils.ts
+++ b/frontend/api/class-interfaces/utils.ts
@@ -3,7 +3,7 @@ import { BaseAPI } from "../_base";
const prefix = "/api";
interface DownloadData {
- fileToken: string,
+ fileToken: string;
}
export class UtilsAPI extends BaseAPI {
diff --git a/frontend/components/Domain/Recipe/RecipeIngredients.vue b/frontend/components/Domain/Recipe/RecipeIngredients.vue
index e064a2e7..4a13321e 100644
--- a/frontend/components/Domain/Recipe/RecipeIngredients.vue
+++ b/frontend/components/Domain/Recipe/RecipeIngredients.vue
@@ -9,13 +9,12 @@
{{ ingredient.title }}
-
+
-
+ />
@@ -86,4 +85,13 @@ export default defineComponent({
.dense-markdown p {
margin: auto !important;
}
+
+.v-input--selection-controls {
+ margin-top: 0.5rem;
+ margin-bottom: auto !important;
+}
+
+.v-input--selection-controls__input {
+ margin-bottom: auto !important;
+}
diff --git a/frontend/pages/admin/backups.vue b/frontend/pages/admin/backups.vue
index 1a877460..69cdbe16 100644
--- a/frontend/pages/admin/backups.vue
+++ b/frontend/pages/admin/backups.vue
@@ -1,9 +1,7 @@
-// TODO: Create a new datatable below to display the import summary json files saved on server (Need to do as well).
+
-
-
-
+
-
-
+ Restoring this backup will overwrite all the current data in your database and in the data directory and
+ replace them with the contents of this backup. This action cannot be undone - use with caution. If
+ the restoration is successful, you will be logged out.
-
+
+
+
+
+ {{ $globals.icons.database }}
+ Restore Backup
+
+
+
+ {{ selected.name }}
+
-
- {{ $t("settings.backup.create-heading") }}
-
-
-
- Lorem ipsum dolor sit, amet consectetur adipisicing elit. Dolores molestiae alias incidunt fugiat!
- Recusandae natus numquam iusto voluptates deserunt quia? Sed voluptate rem facilis tempora, perspiciatis
- corrupti dolore obcaecati laudantium!
-
-
-
-
+
+
+
+ Backups a total snapshots of the database and data directory of the site. This includes all data and cannot
+ be set to exclude subsets of data. You can think off this as a snapshot of Mealie at a specific time.
+ Currently, this backup mechanism is not cross-version and therefore cannot be used to migrate data between
+ versions (data migrations are not done automatically). These serve as a database agnostic way to export and
+ import data or backup the site to an external location.
-
-
+
+ {{ $t("settings.backup.create-heading") }}
-
+
+ Looking For Migrations?
+
-
-
\ No newline at end of file
+
+
diff --git a/frontend/types/api-types/admin.ts b/frontend/types/api-types/admin.ts
index bba69b0d..be2b1791 100644
--- a/frontend/types/api-types/admin.ts
+++ b/frontend/types/api-types/admin.ts
@@ -99,12 +99,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
- id: string;
+ id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
- id: string;
+ id?: string;
name: string;
slug: string;
}
diff --git a/frontend/types/api-types/cookbook.ts b/frontend/types/api-types/cookbook.ts
index 0e2493d5..e12f2927 100644
--- a/frontend/types/api-types/cookbook.ts
+++ b/frontend/types/api-types/cookbook.ts
@@ -24,7 +24,7 @@ export interface ReadCookBook {
position?: number;
categories?: CategoryBase[];
groupId: string;
- id: number;
+ id: string;
}
export interface RecipeCategoryResponse {
name: string;
@@ -55,12 +55,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
- id: string;
+ id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
- id: string;
+ id?: string;
name: string;
slug: string;
}
@@ -117,7 +117,7 @@ export interface RecipeCookBook {
position?: number;
categories: RecipeCategoryResponse[];
groupId: string;
- id: number;
+ id: string;
}
export interface SaveCookBook {
name: string;
@@ -134,5 +134,5 @@ export interface UpdateCookBook {
position?: number;
categories?: CategoryBase[];
groupId: string;
- id: number;
+ id: string;
}
diff --git a/frontend/types/api-types/group.ts b/frontend/types/api-types/group.ts
index 26a3a512..695f0edc 100644
--- a/frontend/types/api-types/group.ts
+++ b/frontend/types/api-types/group.ts
@@ -206,7 +206,7 @@ export interface ReadGroupPreferences {
recipeDisableComments?: boolean;
recipeDisableAmount?: boolean;
groupId: string;
- id: number;
+ id: string;
}
export interface ReadInviteToken {
token: string;
@@ -219,7 +219,7 @@ export interface ReadWebhook {
url?: string;
time?: string;
groupId: string;
- id: number;
+ id: string;
}
export interface RecipeSummary {
id?: string;
@@ -244,12 +244,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
- id: string;
+ id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
- id: string;
+ id?: string;
name: string;
slug: string;
}
diff --git a/frontend/types/api-types/meal-plan.ts b/frontend/types/api-types/meal-plan.ts
index 39499146..85938c57 100644
--- a/frontend/types/api-types/meal-plan.ts
+++ b/frontend/types/api-types/meal-plan.ts
@@ -7,7 +7,7 @@
export type PlanEntryType = "breakfast" | "lunch" | "dinner" | "side";
export type PlanRulesDay = "monday" | "tuesday" | "wednesday" | "thursday" | "friday" | "saturday" | "sunday" | "unset";
-export type PlanRulesType = "breakfast" | "lunch" | "dinner" | "unset";
+export type PlanRulesType = "breakfast" | "lunch" | "dinner" | "side" | "unset";
export interface Category {
id: string;
@@ -118,12 +118,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
- id: string;
+ id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
- id: string;
+ id?: string;
name: string;
slug: string;
}
diff --git a/frontend/types/api-types/recipe.ts b/frontend/types/api-types/recipe.ts
index d2981e29..1087e708 100644
--- a/frontend/types/api-types/recipe.ts
+++ b/frontend/types/api-types/recipe.ts
@@ -67,12 +67,12 @@ export interface CreateRecipeBulk {
tags?: RecipeTag[];
}
export interface RecipeCategory {
- id: string;
+ id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
- id: string;
+ id?: string;
name: string;
slug: string;
}
diff --git a/frontend/types/api-types/response.ts b/frontend/types/api-types/response.ts
index 145ab1de..c9f03272 100644
--- a/frontend/types/api-types/response.ts
+++ b/frontend/types/api-types/response.ts
@@ -10,6 +10,9 @@ export interface ErrorResponse {
error?: boolean;
exception?: string;
}
+export interface FileTokenResponse {
+ file_token: string;
+}
export interface SuccessResponse {
message: string;
error?: boolean;
diff --git a/frontend/types/api-types/user.ts b/frontend/types/api-types/user.ts
index 1ede2780..3070b448 100644
--- a/frontend/types/api-types/user.ts
+++ b/frontend/types/api-types/user.ts
@@ -74,7 +74,7 @@ export interface ReadGroupPreferences {
recipeDisableComments?: boolean;
recipeDisableAmount?: boolean;
groupId: string;
- id: number;
+ id: string;
}
export interface LoingLiveTokenIn {
name: string;
@@ -131,12 +131,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
- id: string;
+ id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
- id: string;
+ id?: string;
name: string;
slug: string;
}
@@ -196,26 +196,12 @@ export interface SavePasswordResetToken {
userId: string;
token: string;
}
-export interface SignUpIn {
- name: string;
- admin: boolean;
-}
-export interface SignUpOut {
- name: string;
- admin: boolean;
- token: string;
- id: number;
-}
-export interface SignUpToken {
- name: string;
- admin: boolean;
- token: string;
-}
export interface Token {
access_token: string;
token_type: string;
}
export interface TokenData {
+ user_id?: string;
username?: string;
}
export interface UpdateGroup {
diff --git a/mealie/app.py b/mealie/app.py
index 42bb1a36..0e598cd0 100644
--- a/mealie/app.py
+++ b/mealie/app.py
@@ -5,7 +5,7 @@ from fastapi.middleware.gzip import GZipMiddleware
from mealie.core.config import get_app_settings
from mealie.core.root_logger import get_logger
from mealie.core.settings.static import APP_VERSION
-from mealie.routes import backup_routes, router, utility_routes
+from mealie.routes import router, utility_routes
from mealie.routes.handlers import register_debug_handler
from mealie.routes.media import media_router
from mealie.services.scheduler import SchedulerRegistry, SchedulerService, tasks
@@ -69,7 +69,6 @@ def start_scheduler():
def api_routers():
app.include_router(router)
app.include_router(media_router)
- app.include_router(backup_routes.router)
app.include_router(utility_routes.router)
diff --git a/mealie/core/settings/db_providers.py b/mealie/core/settings/db_providers.py
index c6a764d9..b894e595 100644
--- a/mealie/core/settings/db_providers.py
+++ b/mealie/core/settings/db_providers.py
@@ -24,7 +24,7 @@ class SQLiteProvider(AbstractDBProvider, BaseModel):
@property
def db_url(self) -> str:
- return "sqlite:///" + str(self.db_path.absolute())
+ return f"sqlite:///{str(self.db_path.absolute())}"
@property
def db_url_public(self) -> str:
@@ -59,7 +59,5 @@ class PostgresProvider(AbstractDBProvider, BaseSettings):
def db_provider_factory(provider_name: str, data_dir: Path, env_file: Path, env_encoding="utf-8") -> AbstractDBProvider:
if provider_name == "postgres":
return PostgresProvider(_env_file=env_file, _env_file_encoding=env_encoding)
- elif provider_name == "sqlite":
- return SQLiteProvider(data_dir=data_dir)
else:
return SQLiteProvider(data_dir=data_dir)
diff --git a/mealie/routes/admin/__init__.py b/mealie/routes/admin/__init__.py
index 5c1c8ae7..beb807fe 100644
--- a/mealie/routes/admin/__init__.py
+++ b/mealie/routes/admin/__init__.py
@@ -1,6 +1,14 @@
from mealie.routes._base.routers import AdminAPIRouter
-from . import admin_about, admin_email, admin_log, admin_management_groups, admin_management_users, admin_server_tasks
+from . import (
+ admin_about,
+ admin_backups,
+ admin_email,
+ admin_log,
+ admin_management_groups,
+ admin_management_users,
+ admin_server_tasks,
+)
router = AdminAPIRouter(prefix="/admin")
@@ -10,3 +18,4 @@ router.include_router(admin_management_users.router)
router.include_router(admin_management_groups.router)
router.include_router(admin_email.router, tags=["Admin: Email"])
router.include_router(admin_server_tasks.router, tags=["Admin: Server Tasks"])
+router.include_router(admin_backups.router)
diff --git a/mealie/routes/admin/admin_backups.py b/mealie/routes/admin/admin_backups.py
new file mode 100644
index 00000000..bed0173f
--- /dev/null
+++ b/mealie/routes/admin/admin_backups.py
@@ -0,0 +1,95 @@
+import operator
+import shutil
+from pathlib import Path
+
+from fastapi import APIRouter, File, HTTPException, UploadFile, status
+
+from mealie.core.config import get_app_dirs
+from mealie.core.security import create_file_token
+from mealie.pkgs.stats.fs_stats import pretty_size
+from mealie.routes._base import BaseAdminController, controller
+from mealie.schema.admin.backup import AllBackups, BackupFile
+from mealie.schema.response.responses import FileTokenResponse, SuccessResponse
+from mealie.services.backups_v2.backup_v2 import BackupV2
+
+router = APIRouter(prefix="/backups")
+
+
+@controller(router)
+class AdminBackupController(BaseAdminController):
+ def _backup_path(self, name) -> Path:
+ return get_app_dirs().BACKUP_DIR / name
+
+ @router.get("", response_model=AllBackups)
+ def get_all(self):
+ app_dirs = get_app_dirs()
+ imports = []
+ for archive in app_dirs.BACKUP_DIR.glob("*.zip"):
+ backup = BackupFile(
+ name=archive.name, date=archive.stat().st_ctime, size=pretty_size(archive.stat().st_size)
+ )
+ imports.append(backup)
+
+ templates = [template.name for template in app_dirs.TEMPLATE_DIR.glob("*.*")]
+ imports.sort(key=operator.attrgetter("date"), reverse=True)
+
+ return AllBackups(imports=imports, templates=templates)
+
+ @router.post("", status_code=status.HTTP_201_CREATED, response_model=SuccessResponse)
+ def create_one(self):
+ backup = BackupV2()
+
+ try:
+ backup.backup()
+ except Exception as e:
+ raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) from e
+
+ return SuccessResponse.respond("Backup created successfully")
+
+ @router.get("/{file_name}", response_model=FileTokenResponse)
+ def get_one(self, file_name: str):
+ """Returns a token to download a file"""
+ file = self._backup_path(file_name)
+
+ if not file.exists():
+ raise HTTPException(status.HTTP_404_NOT_FOUND)
+
+ return FileTokenResponse.respond(create_file_token(file))
+
+ @router.delete("/{file_name}", status_code=status.HTTP_200_OK, response_model=SuccessResponse)
+ def delete_one(self, file_name: str):
+ file = self._backup_path(file_name)
+
+ if not file.is_file():
+ raise HTTPException(status.HTTP_400_BAD_REQUEST)
+ try:
+ file.unlink()
+ except Exception as e:
+ raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) from e
+
+ return SuccessResponse.respond(f"{file_name} has been deleted.")
+
+ @router.post("/upload", response_model=SuccessResponse)
+ def upload_one(self, archive: UploadFile = File(...)):
+ """Upload a .zip File to later be imported into Mealie"""
+ app_dirs = get_app_dirs()
+ dest = app_dirs.BACKUP_DIR.joinpath(archive.filename)
+
+ with dest.open("wb") as buffer:
+ shutil.copyfileobj(archive.file, buffer)
+
+ if not dest.is_file:
+ raise HTTPException(status.HTTP_400_BAD_REQUEST)
+
+ @router.post("/{file_name}/restore", response_model=SuccessResponse)
+ def import_one(self, file_name: str):
+ backup = BackupV2()
+
+ file = self._backup_path(file_name)
+
+ try:
+ backup.restore(file)
+ except Exception as e:
+ raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) from e
+
+ return SuccessResponse.respond("Restore successful")
diff --git a/mealie/routes/backup_routes.py b/mealie/routes/backup_routes.py
deleted file mode 100644
index 80bb7d6c..00000000
--- a/mealie/routes/backup_routes.py
+++ /dev/null
@@ -1,111 +0,0 @@
-import operator
-import shutil
-
-from fastapi import Depends, File, HTTPException, UploadFile, status
-from sqlalchemy.orm.session import Session
-
-from mealie.core.config import get_app_dirs
-from mealie.core.dependencies import get_current_user
-from mealie.core.root_logger import get_logger
-from mealie.core.security import create_file_token
-from mealie.db.db_setup import generate_session
-from mealie.pkgs.stats.fs_stats import pretty_size
-from mealie.routes._base.routers import AdminAPIRouter
-from mealie.schema.admin import AllBackups, BackupFile, CreateBackup, ImportJob
-from mealie.schema.user.user import PrivateUser
-from mealie.services.backups import imports
-from mealie.services.backups.exports import backup_all
-
-router = AdminAPIRouter(prefix="/api/backups", tags=["Backups"])
-logger = get_logger()
-app_dirs = get_app_dirs()
-
-
-@router.get("/available", response_model=AllBackups)
-def available_imports():
- """Returns a list of avaiable .zip files for import into Mealie."""
- imports = []
- for archive in app_dirs.BACKUP_DIR.glob("*.zip"):
- backup = BackupFile(name=archive.name, date=archive.stat().st_ctime, size=pretty_size(archive.stat().st_size))
- imports.append(backup)
-
- templates = [template.name for template in app_dirs.TEMPLATE_DIR.glob("*.*")]
- imports.sort(key=operator.attrgetter("date"), reverse=True)
-
- return AllBackups(imports=imports, templates=templates)
-
-
-@router.post("/export/database", status_code=status.HTTP_201_CREATED)
-def export_database(data: CreateBackup, session: Session = Depends(generate_session)):
- """Generates a backup of the recipe database in json format."""
- try:
- export_path = backup_all(
- session=session,
- tag=data.tag,
- templates=data.templates,
- export_recipes=data.options.recipes,
- export_users=data.options.users,
- export_groups=data.options.groups,
- export_notifications=data.options.notifications,
- )
-
- return {"export_path": export_path}
- except Exception as e:
- logger.error(e)
- raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR)
-
-
-@router.post("/upload", status_code=status.HTTP_200_OK)
-def upload_backup_file(archive: UploadFile = File(...)):
- """Upload a .zip File to later be imported into Mealie"""
- dest = app_dirs.BACKUP_DIR.joinpath(archive.filename)
-
- with dest.open("wb") as buffer:
- shutil.copyfileobj(archive.file, buffer)
-
- if not dest.is_file:
- raise HTTPException(status.HTTP_400_BAD_REQUEST)
-
-
-@router.get("/{file_name}/download")
-async def download_backup_file(file_name: str):
- """Returns a token to download a file"""
- file = app_dirs.BACKUP_DIR.joinpath(file_name)
-
- return {"fileToken": create_file_token(file)}
-
-
-@router.post("/{file_name}/import", status_code=status.HTTP_200_OK)
-def import_database(
- import_data: ImportJob,
- session: Session = Depends(generate_session),
- user: PrivateUser = Depends(get_current_user),
-):
- """Import a database backup file generated from Mealie."""
-
- return imports.import_database(
- user=user,
- session=session,
- archive=import_data.name,
- import_recipes=import_data.recipes,
- import_settings=import_data.settings,
- import_users=import_data.users,
- import_groups=import_data.groups,
- force_import=import_data.force,
- rebase=import_data.rebase,
- )
-
-
-@router.delete("/{file_name}/delete", status_code=status.HTTP_200_OK)
-def delete_backup(file_name: str):
- """Removes a database backup from the file system"""
- file_path = app_dirs.BACKUP_DIR.joinpath(file_name)
-
- if not file_path.is_file():
- raise HTTPException(status.HTTP_400_BAD_REQUEST)
- try:
- file_path.unlink()
- except Exception:
- raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR)
-
- return {"message": f"{file_name} has been deleted."}
diff --git a/mealie/schema/response/responses.py b/mealie/schema/response/responses.py
index d145ee37..4b4e2f24 100644
--- a/mealie/schema/response/responses.py
+++ b/mealie/schema/response/responses.py
@@ -1,5 +1,6 @@
from typing import Optional
+from fastapi_camelcase import CamelModel
from pydantic import BaseModel
@@ -28,3 +29,15 @@ class SuccessResponse(BaseModel):
in the same call, for use while providing details to a HTTPException
"""
return cls(message=message).dict()
+
+
+class FileTokenResponse(CamelModel):
+ file_token: str
+
+ @classmethod
+ def respond(cls, token: str) -> dict:
+ """
+ This method is an helper to create an obect and convert to a dictionary
+ in the same call, for use while providing details to a HTTPException
+ """
+ return cls(file_token=token).dict()
diff --git a/mealie/services/backups_v2/__init__.py b/mealie/services/backups_v2/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/mealie/services/backups_v2/alchemy_exporter.py b/mealie/services/backups_v2/alchemy_exporter.py
new file mode 100644
index 00000000..d199b54f
--- /dev/null
+++ b/mealie/services/backups_v2/alchemy_exporter.py
@@ -0,0 +1,138 @@
+import datetime
+import json
+from pathlib import Path
+
+from fastapi.encoders import jsonable_encoder
+from pydantic import BaseModel
+from sqlalchemy import MetaData, create_engine
+from sqlalchemy.engine import base
+from sqlalchemy.orm import Session, sessionmaker
+
+from mealie.services._base_service import BaseService
+
+
+class AlchemyExporter(BaseService):
+ connection_str: str
+ engine: base.Engine
+ meta: MetaData
+
+ look_for_datetime = {"created_at", "update_at", "date_updated", "timestamp", "expires_at"}
+ look_for_date = {"date_added", "date"}
+
+ class DateTimeParser(BaseModel):
+ date: datetime.date = None
+ time: datetime.datetime = None
+
+ def __init__(self, connection_str: str) -> None:
+ super().__init__()
+
+ self.connection_str = connection_str
+ self.engine = create_engine(connection_str)
+ self.meta = MetaData()
+ self.session_maker = sessionmaker(bind=self.engine)
+
+ @staticmethod
+ def convert_to_datetime(data: dict) -> dict:
+ """
+ walks the dictionary to convert all things that look like timestamps to datetime objects
+ used in the context of reading a json file into a database via SQLAlchemy.
+ """
+ for key, value in data.items():
+ if isinstance(value, dict):
+ data = AlchemyExporter.convert_to_datetime(value)
+ elif isinstance(value, list): # assume that this is a list of dictionaries
+ data[key] = [AlchemyExporter.convert_to_datetime(item) for item in value]
+ elif isinstance(value, str):
+ if key in AlchemyExporter.look_for_datetime:
+ data[key] = AlchemyExporter.DateTimeParser(time=value).time
+ if key in AlchemyExporter.look_for_date:
+ data[key] = AlchemyExporter.DateTimeParser(date=value).date
+
+ return data
+
+ @staticmethod
+ def _compare_schemas(schema1: dict, schema2: dict) -> bool:
+ try:
+ # validate alembic version(s) are the same
+ return schema1["alembic_version"] == schema2["alembic_version"]
+ except KeyError:
+ return False
+
+ @staticmethod
+ def validate_schemas(schema1: Path | dict, schema2: Path | dict) -> bool:
+ """
+ Validates that the schema of the database matches the schema of the database. In practice,
+ this means validating that the alembic version is the same
+ """
+
+ def extract_json(file: Path) -> dict:
+ with open(file) as f:
+ return json.loads(f.read())
+
+ if isinstance(schema1, Path):
+ schema1 = extract_json(schema1)
+
+ if isinstance(schema2, Path):
+ schema2 = extract_json(schema2)
+
+ return AlchemyExporter._compare_schemas(schema1, schema2)
+
+ def dump_schema(self) -> dict:
+ """
+ Returns the schema of the SQLAlchemy database as a python dictionary. This dictionary is wrapped by
+ jsonable_encoder to ensure that the object can be converted to a json string.
+ """
+ self.meta.reflect(bind=self.engine)
+
+ all_tables = self.meta.tables.values()
+
+ results = {
+ **{table.name: [] for table in all_tables},
+ "alembic_version": [dict(row) for row in self.engine.execute("SELECT * FROM alembic_version").fetchall()],
+ }
+
+ return jsonable_encoder(results)
+
+ def dump(self) -> dict[str, list[dict]]:
+ """
+ Returns the entire SQLAlchemy database as a python dictionary. This dictionary is wrapped by
+ jsonable_encoder to ensure that the object can be converted to a json string.
+ """
+ self.meta.reflect(bind=self.engine) # http://docs.sqlalchemy.org/en/rel_0_9/core/reflection.html
+ result = {
+ table.name: [dict(row) for row in self.engine.execute(table.select())] for table in self.meta.sorted_tables
+ }
+
+ return jsonable_encoder(result)
+
+ def restore(self, db_dump: dict) -> None:
+ """Restores all data from dictionary into the database"""
+ data = AlchemyExporter.convert_to_datetime(db_dump)
+
+ self.meta.reflect(bind=self.engine)
+ for table_name, rows in data.items():
+ if not rows:
+ continue
+
+ table = self.meta.tables[table_name]
+ self.engine.execute(table.delete())
+ self.engine.execute(table.insert(), rows)
+
+ def drop_all(self) -> None:
+ """Drops all data from the database"""
+ self.meta.reflect(bind=self.engine)
+ with self.session_maker() as session:
+ session: Session
+
+ is_postgres = self.settings.DB_ENGINE == "postgres"
+
+ try:
+ if is_postgres:
+ session.execute("SET session_replication_role = 'replica'")
+
+ for table in self.meta.sorted_tables:
+ session.execute(f"DELETE FROM {table.name}")
+ finally:
+ if is_postgres:
+ session.execute("SET session_replication_role = 'origin'")
+ session.commit()
diff --git a/mealie/services/backups_v2/backup_file.py b/mealie/services/backups_v2/backup_file.py
new file mode 100644
index 00000000..187ed3b3
--- /dev/null
+++ b/mealie/services/backups_v2/backup_file.py
@@ -0,0 +1,45 @@
+import json
+import shutil
+import tempfile
+from pathlib import Path
+
+
+class BackupContents:
+ def __init__(self, file: Path) -> None:
+ self.base = file
+ self.data_directory = self.base / "data"
+ self.tables = self.base / "database.json"
+
+ def validate(self) -> bool:
+ if not self.base.is_dir():
+ return False
+
+ if not self.data_directory.is_dir():
+ return False
+
+ if not self.tables.is_file():
+ return False
+
+ return True
+
+ def read_tables(self) -> dict:
+ with open(self.tables) as f:
+ return json.loads(f.read())
+
+
+class BackupFile:
+ temp_dir: Path | None
+
+ def __init__(self, file: Path) -> None:
+ self.zip = file
+
+ def __enter__(self) -> BackupContents:
+ self.temp_dir = Path(tempfile.mkdtemp())
+ shutil.unpack_archive(str(self.zip), str(self.temp_dir))
+ return BackupContents(self.temp_dir)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if self.temp_dir and self.temp_dir.is_dir():
+ shutil.rmtree(self.temp_dir)
+
+ self.temp_dir = None
diff --git a/mealie/services/backups_v2/backup_v2.py b/mealie/services/backups_v2/backup_v2.py
new file mode 100644
index 00000000..6c420c38
--- /dev/null
+++ b/mealie/services/backups_v2/backup_v2.py
@@ -0,0 +1,98 @@
+import datetime
+import json
+import shutil
+from pathlib import Path
+from zipfile import ZipFile
+
+from mealie.services._base_service import BaseService
+from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter
+from mealie.services.backups_v2.backup_file import BackupFile
+
+
+class BackupV2(BaseService):
+ def __init__(self, db_url: str = None) -> None:
+ super().__init__()
+
+ self.db_url = db_url or self.settings.DB_URL
+ self.db_exporter = AlchemyExporter(self.db_url)
+
+ def _sqlite(self) -> None:
+ db_file = self.settings.DB_URL.removeprefix("sqlite:///")
+
+ # Create a backup of the SQLite database
+ timestamp = datetime.datetime.now().strftime("%Y.%m.%d")
+ shutil.copy(db_file, f"mealie_{timestamp}.bak.db")
+
+ def _postgres(self) -> None:
+ pass
+
+ def backup(self) -> Path:
+ # sourcery skip: merge-nested-ifs, reintroduce-else, remove-redundant-continue
+ exclude = {"mealie.db", "mealie.log", ".secret"}
+ exclude_ext = {".zip"}
+ exclude_dirs = {"backups"}
+
+ timestamp = datetime.datetime.now().strftime("%Y.%m.%d.%H.%M.%S")
+
+ backup_name = f"mealie_{timestamp}.zip"
+ backup_file = self.directories.BACKUP_DIR / backup_name
+
+ database_json = self.db_exporter.dump()
+
+ with ZipFile(backup_file, "w") as zip_file:
+ zip_file.writestr("database.json", json.dumps(database_json))
+
+ for data_file in self.directories.DATA_DIR.glob("**/*"):
+ if data_file.name in exclude:
+ continue
+
+ if data_file.is_file() and data_file.suffix not in exclude_ext:
+ if data_file.parent.name in exclude_dirs:
+ continue
+
+ zip_file.write(data_file, f"data/{data_file.relative_to(self.directories.DATA_DIR)}")
+
+ return backup_file
+
+ def _copy_data(self, data_path: Path) -> None:
+ for f in data_path.iterdir():
+ if f.is_file():
+ continue
+
+ shutil.rmtree(self.directories.DATA_DIR / f.name)
+ shutil.copytree(f, self.directories.DATA_DIR / f.name)
+
+ def restore(self, backup_path: Path) -> None:
+ self.logger.info("initially backup restore")
+
+ backup = BackupFile(backup_path)
+
+ if self.settings.DB_ENGINE == "sqlite":
+ self._sqlite()
+ elif self.settings.DB_ENGINE == "postgres":
+ self._postgres()
+
+ with backup as contents:
+ if not contents.validate():
+ self.logger.error(
+ "Invalid backup file. file does not contain required elements (data directory and database.json"
+ )
+ raise ValueError("Invalid backup file")
+
+ # Purge the Database
+
+ self.logger.info("dropping all database tables")
+ self.db_exporter.drop_all()
+
+ database_json = contents.read_tables()
+
+ self.logger.info("importing database tables")
+ self.db_exporter.restore(database_json)
+
+ self.logger.info("database tables imported successfully")
+
+ self.logger.info("restoring data directory")
+ self._copy_data(contents.data_directory)
+ self.logger.info("data directory restored successfully")
+
+ self.logger.info("backup restore complete")
diff --git a/poetry.lock b/poetry.lock
index c1e07372..fb1da482 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -688,11 +688,11 @@ i18n = ["babel (>=2.9.0)"]
[[package]]
name = "mkdocs-material"
-version = "7.3.6"
+version = "8.2.3"
description = "A Material Design theme for MkDocs"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.6"
[package.dependencies]
jinja2 = ">=2.11.1"
@@ -1488,7 +1488,7 @@ pgsql = ["psycopg2-binary"]
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
-content-hash = "4c1c1e4eb5026c44d36ede6f44f2675e037d7adaaba9b4ea298e76422e3d3d68"
+content-hash = "00e37f7569d999689984b41bb0085f86e0e902eb1a7cae32d408b079db0ae8d8"
[metadata.files]
aiofiles = [
@@ -2033,8 +2033,8 @@ mkdocs = [
{file = "mkdocs-1.2.3.tar.gz", hash = "sha256:89f5a094764381cda656af4298727c9f53dc3e602983087e1fe96ea1df24f4c1"},
]
mkdocs-material = [
- {file = "mkdocs-material-7.3.6.tar.gz", hash = "sha256:1b1dbd8ef2508b358d93af55a5c5db3f141c95667fad802301ec621c40c7c217"},
- {file = "mkdocs_material-7.3.6-py2.py3-none-any.whl", hash = "sha256:1b6b3e9e09f922c2d7f1160fe15c8f43d4adc0d6fb81aa6ff0cbc7ef5b78ec75"},
+ {file = "mkdocs-material-8.2.3.tar.gz", hash = "sha256:aea074a5b368c8a27c8ae4fe72bd943176512b225541106797e367c62ce3f5a0"},
+ {file = "mkdocs_material-8.2.3-py2.py3-none-any.whl", hash = "sha256:34a3155fe30f3fd697acef230e459e0428acb0481bcbb968e4a94a3ac174af18"},
]
mkdocs-material-extensions = [
{file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"},
diff --git a/pyproject.toml b/pyproject.toml
index 83df7b36..6cc406c4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -46,7 +46,7 @@ tzdata = "^2021.5"
pylint = "^2.6.0"
pytest = "^6.2.1"
pytest-cov = "^2.11.0"
-mkdocs-material = "^7.0.2"
+mkdocs-material = "^8.2.3"
flake8 = "^4.0.1"
coverage = "^5.5"
pydantic-to-typescript = "^1.0.7"
diff --git a/tests/unit_tests/services_tests/backup_v2_tests/test_alchemy_exporter.py b/tests/unit_tests/services_tests/backup_v2_tests/test_alchemy_exporter.py
new file mode 100644
index 00000000..50dc5094
--- /dev/null
+++ b/tests/unit_tests/services_tests/backup_v2_tests/test_alchemy_exporter.py
@@ -0,0 +1,48 @@
+import json
+
+from mealie.core.config import get_app_settings
+from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter
+
+
+def test_alchemy_exporter():
+ settings = get_app_settings()
+ exporter = AlchemyExporter(settings.DB_URL)
+ data = exporter.dump()
+
+ assert data["alembic_version"] == [{"version_num": "6b0f5f32d602"}]
+ assert json.dumps(data, indent=4) # Make sure data is json-serializable
+
+
+def test_validate_schemas():
+ schema = {
+ "alembic_version": [{"version_num": "6b0f5f32d602"}],
+ }
+ match = {
+ "alembic_version": [{"version_num": "6b0f5f32d602"}],
+ }
+
+ invalid_version = {
+ "alembic_version": [{"version_num": "not-valid-schema"}],
+ }
+
+ assert AlchemyExporter.validate_schemas(schema, match)
+ assert not AlchemyExporter.validate_schemas(schema, invalid_version)
+
+ schema_with_tables = {
+ "alembic_version": [{"version_num": "6b0f5f32d602"}],
+ "recipes": [
+ {
+ "id": 1,
+ }
+ ],
+ }
+ match_with_tables = {
+ "alembic_version": [{"version_num": "6b0f5f32d602"}],
+ "recipes": [
+ {
+ "id": 2,
+ }
+ ],
+ }
+
+ assert AlchemyExporter.validate_schemas(schema_with_tables, match_with_tables)
diff --git a/tests/unit_tests/services_tests/backup_v2_tests/test_backup_file.py b/tests/unit_tests/services_tests/backup_v2_tests/test_backup_file.py
new file mode 100644
index 00000000..3c77e6ce
--- /dev/null
+++ b/tests/unit_tests/services_tests/backup_v2_tests/test_backup_file.py
@@ -0,0 +1,56 @@
+import json
+from pathlib import Path
+from zipfile import ZipFile
+
+from mealie.services.backups_v2.backup_file import BackupFile
+from tests import utils
+
+
+def zip_factory(temp_dir) -> Path:
+ temp_zip = temp_dir / f"{utils.random_string()}.zip"
+
+ with ZipFile(temp_zip, "w") as zip_file:
+ zip_file.writestr("test.txt", "test")
+
+ return temp_zip
+
+
+def test_backup_file_context_manager(tmp_path: Path):
+ temp_zip = zip_factory(tmp_path)
+
+ backup_file = BackupFile(temp_zip)
+
+ with backup_file as _:
+ assert backup_file.temp_dir.exists()
+ temp_dir_path = backup_file.temp_dir
+
+ assert not backup_file.temp_dir
+ assert not temp_dir_path.exists()
+
+
+def test_backup_file_invalid_zip(tmp_path: Path):
+ temp_zip = zip_factory(tmp_path)
+
+ backup_file = BackupFile(temp_zip)
+
+ with backup_file as content:
+ assert not content.validate()
+
+
+def test_backup_file_valid_zip(tmp_path: Path):
+ dummy_dict = {"hello": "world"}
+
+ temp_zip = zip_factory(tmp_path)
+
+ # Add contents
+ with ZipFile(temp_zip, "a") as zip_file:
+ zip_file.writestr("data/test.txt", "test")
+ zip_file.writestr("database.json", json.dumps(dummy_dict))
+
+ backup_file = BackupFile(temp_zip)
+
+ with backup_file as content:
+ assert content.validate()
+
+ assert content.read_tables() == dummy_dict
+ assert content.data_directory.joinpath("test.txt").is_file()
diff --git a/tests/unit_tests/services_tests/backup_v2_tests/test_backup_v2.py b/tests/unit_tests/services_tests/backup_v2_tests/test_backup_v2.py
new file mode 100644
index 00000000..92b59a83
--- /dev/null
+++ b/tests/unit_tests/services_tests/backup_v2_tests/test_backup_v2.py
@@ -0,0 +1,58 @@
+import filecmp
+from pathlib import Path
+from typing import Any
+
+from mealie.core.config import get_app_settings
+from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter
+from mealie.services.backups_v2.backup_file import BackupFile
+from mealie.services.backups_v2.backup_v2 import BackupV2
+
+
+def dict_sorter(d: dict) -> Any:
+ possible_keys = {"created_at", "id"}
+
+ return next((d[key] for key in possible_keys if key in d), 1)
+
+
+# For Future Use
+def match_file_tree(path_a: Path, path_b: Path):
+ if path_a.is_dir() and path_b.is_dir():
+ for a_file in path_a.iterdir():
+ b_file = path_b.joinpath(a_file.name)
+ assert b_file.exists()
+ match_file_tree(a_file, b_file)
+ else:
+ assert filecmp(path_a, path_b)
+
+
+def test_database_backup():
+ backup_v2 = BackupV2()
+ path_to_backup = backup_v2.backup()
+
+ assert path_to_backup.exists()
+
+ backup = BackupFile(path_to_backup)
+
+ with backup as contents:
+ assert contents.validate()
+
+
+def test_database_restore():
+ settings = get_app_settings()
+
+ # Capture existing database snapshot
+ original_exporter = AlchemyExporter(settings.DB_URL)
+ snapshop_1 = original_exporter.dump()
+
+ # Create Backup
+ backup_v2 = BackupV2(settings.DB_URL)
+ path_to_backup = backup_v2.backup()
+
+ assert path_to_backup.exists()
+ backup_v2.restore(path_to_backup)
+
+ new_exporter = AlchemyExporter(settings.DB_URL)
+ snapshop_2 = new_exporter.dump()
+
+ for s1, s2 in zip(snapshop_1, snapshop_2):
+ assert snapshop_1[s1].sort(key=dict_sorter) == snapshop_2[s2].sort(key=dict_sorter)