Feature/database backups (#1040)

* add annotations to docs

* alchemy data dumper

* initial tests

* sourcery refactor

* db backups/restore

* potential postgres fix

* potential postgres fix

* this is terrible

* potential pg fix

* cleanup

* remove unused import

* fix comparison

* generate frontend types

* update timestamp and add directory filter

* rewrite to new admin-api

* update backup routers

* add file_token response helper

* update imports

* remove test_backup
This commit is contained in:
Hayden 2022-03-13 15:42:22 -08:00 committed by GitHub
parent 2d1ef7173d
commit 8eefa05393
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
32 changed files with 756 additions and 229 deletions

View file

@ -19,12 +19,12 @@ services:
environment:
# Set Frontend ENV Variables Here
- ALLOW_SIGNUP=true
- API_URL=http://mealie-api:9000
- API_URL=http://mealie-api:9000 # (1)
restart: always
ports:
- "9925:3000"
- "9925:3000" # (2)
volumes:
- mealie-data:/app/data/
- mealie-data:/app/data/ # (3)
mealie-api:
image: hkotel/mealie:api-nightly
container_name: mealie-api
@ -62,3 +62,9 @@ volumes:
driver: local
```
<!-- Updating This? Be Sure to also update the SQLite Annotations -->
1. Whoa whoa whoa, what is this nonsense? The API_URL is the URL the frontend container uses to proxy api requests to the backend server. In this example, the name `mealie-api` resolves to the `mealie-api` container which runs the API server on port 9000. This allows you to access the API without exposing an additional port on the host.
<br/> <br/> **Note** that both containers must be on the same docker-network for this to work.
2. To access the mealie interface you only need to expose port 3000 on the mealie-frontend container. Here we expose port 9925 on the host, feel free to change this to any port you like.
3. Mounting the data directory to the frontend is now required to access the images/assets directory. This can be mounted read-only. Internally the frontend containers runs a Caddy proxy server that serves the assets requested to reduce load on the backend API.

View file

@ -17,12 +17,12 @@ services:
environment:
# Set Frontend ENV Variables Here
- ALLOW_SIGNUP=true
- API_URL=http://mealie-api:9000
- API_URL=http://mealie-api:9000 # (1)
restart: always
ports:
- "9925:3000"
- "9925:3000" # (2)
volumes:
- mealie-data:/app/data/
- mealie-data:/app/data/ # (3)
mealie-api:
image: hkotel/mealie:api-nightly
container_name: mealie-api
@ -41,4 +41,11 @@ services:
volumes:
mealie-data:
driver: local
```
```
<!-- Updating This? Be Sure to also update the Postgres Annotations -->
1. Whoa whoa whoa, what is this nonsense? The API_URL is the URL the frontend container uses to proxy api requests to the backend server. In this example, the name `mealie-api` resolves to the `mealie-api` container which runs the API server on port 9000. This allows you to access the API without exposing an additional port on the host.
<br/> <br/> **Note** that both containers must be on the same docker-network for this to work.
2. To access the mealie interface you only need to expose port 3000 on the mealie-frontend container. Here we expose port 9925 on the host, feel free to change this to any port you like.
3. Mounting the data directory to the frontend is now required to access the images/assets directory. This can be mounted read-only. Internally the frontend containers runs a Caddy proxy server that serves the assets requested to reduce load on the backend API.

File diff suppressed because one or more lines are too long

View file

@ -18,6 +18,7 @@ theme:
name: Switch to light mode
custom_dir: docs/overrides
features:
- content.code.annotate
- navigation.top
# - navigation.instant
- navigation.expand

View file

@ -2,26 +2,23 @@ import { AdminAboutAPI } from "./admin/admin-about";
import { AdminTaskAPI } from "./admin/admin-tasks";
import { AdminUsersApi } from "./admin/admin-users";
import { AdminGroupsApi } from "./admin/admin-groups";
import { AdminBackupsApi } from "./admin/admin-backups";
import { ApiRequestInstance } from "~/types/api";
export class AdminAPI {
private static instance: AdminAPI;
public about: AdminAboutAPI;
public serverTasks: AdminTaskAPI;
public users: AdminUsersApi;
public groups: AdminGroupsApi;
public backups: AdminBackupsApi;
constructor(requests: ApiRequestInstance) {
if (AdminAPI.instance instanceof AdminAPI) {
return AdminAPI.instance;
}
this.about = new AdminAboutAPI(requests);
this.serverTasks = new AdminTaskAPI(requests);
this.users = new AdminUsersApi(requests);
this.groups = new AdminGroupsApi(requests);
this.backups = new AdminBackupsApi(requests);
Object.freeze(this);
AdminAPI.instance = this;
}
}

View file

@ -0,0 +1,33 @@
import { BaseAPI } from "../_base";
import { AllBackups } from "~/types/api-types/admin";
import { ErrorResponse, FileTokenResponse, SuccessResponse } from "~/types/api-types/response";
const prefix = "/api";
const routes = {
base: `${prefix}/admin/backups`,
item: (name: string) => `${prefix}/admin/backups/${name}`,
restore: (name: string) => `${prefix}/admin/backups/${name}/restore`,
};
export class AdminBackupsApi extends BaseAPI {
async getAll() {
return await this.requests.get<AllBackups>(routes.base);
}
async create() {
return await this.requests.post<SuccessResponse | ErrorResponse>(routes.base, {});
}
async get(fileName: string) {
return await this.requests.get<FileTokenResponse>(routes.item(fileName));
}
async delete(fileName: string) {
return await this.requests.delete<SuccessResponse | ErrorResponse>(routes.item(fileName));
}
async restore(fileName: string) {
return await this.requests.post<SuccessResponse | ErrorResponse>(routes.restore(fileName), {});
}
}

View file

@ -3,7 +3,7 @@ import { BaseAPI } from "../_base";
const prefix = "/api";
interface DownloadData {
fileToken: string,
fileToken: string;
}
export class UtilsAPI extends BaseAPI {

View file

@ -9,13 +9,12 @@
<h3 v-if="showTitleEditor[index]" class="mt-2">{{ ingredient.title }}</h3>
<v-divider v-if="showTitleEditor[index]"></v-divider>
<v-list-item dense @click="toggleChecked(index)">
<v-checkbox hide-details :value="checked[index]" class="pt-0 my-auto py-auto" color="secondary"> </v-checkbox>
<v-checkbox hide-details :value="checked[index]" color="secondary" />
<v-list-item-content>
<VueMarkdown
class="ma-0 pa-0 text-subtitle-1 dense-markdown"
:source="parseIngredientText(ingredient, disableAmount, scale)"
>
</VueMarkdown>
/>
</v-list-item-content>
</v-list-item>
</div>
@ -86,4 +85,13 @@ export default defineComponent({
.dense-markdown p {
margin: auto !important;
}
.v-input--selection-controls {
margin-top: 0.5rem;
margin-bottom: auto !important;
}
.v-input--selection-controls__input {
margin-bottom: auto !important;
}
</style>

View file

@ -1,9 +1,7 @@
// TODO: Create a new datatable below to display the import summary json files saved on server (Need to do as well).
<template>
<v-container fluid>
<BannerExperimental issue="https://github.com/hay-kot/mealie/issues/871"></BannerExperimental>
<section>
<BaseCardSectionTitle title="Site Backups"> </BaseCardSectionTitle>
<!-- Delete Dialog -->
<BaseDialog
v-model="deleteDialog"
@ -18,48 +16,44 @@
</BaseDialog>
<!-- Import Dialog -->
<BaseDialog
v-model="importDialog"
:title="selected.name"
:icon="$globals.icons.database"
:submit-text="$t('general.import')"
@submit="importBackup()"
>
<BaseDialog v-model="importDialog" color="error" title="Backup Restore" :icon="$globals.icons.database">
<v-divider></v-divider>
<v-card-text>
<AdminBackupImportOptions v-model="selected.options" class="mt-5 mb-2" :import-backup="true" />
</v-card-text>
Restoring this backup will overwrite all the current data in your database and in the data directory and
replace them with the contents of this backup. <b> This action cannot be undone - use with caution. </b> If
the restoration is successful, you will be logged out.
<v-divider></v-divider>
<v-checkbox
v-model="confirmImport"
class="checkbox-top"
color="error"
hide-details
label="I understand that this action is irreversible, destructive and may cause data loss"
></v-checkbox>
</v-card-text>
<v-card-actions class="justify-center pt-0">
<BaseButton delete :disabled="!confirmImport" @click="restoreBackup(selected)">
<template #icon> {{ $globals.icons.database }} </template>
Restore Backup
</BaseButton>
</v-card-actions>
<p class="caption pb-0 mb-1 text-center">
{{ selected.name }}
</p>
</BaseDialog>
<v-card outlined>
<v-card-title class="py-2"> {{ $t("settings.backup.create-heading") }} </v-card-title>
<v-divider class="mx-2"></v-divider>
<v-form @submit.prevent="createBackup()">
<v-card-text>
Lorem ipsum dolor sit, amet consectetur adipisicing elit. Dolores molestiae alias incidunt fugiat!
Recusandae natus numquam iusto voluptates deserunt quia? Sed voluptate rem facilis tempora, perspiciatis
corrupti dolore obcaecati laudantium!
<div style="max-width: 300px">
<v-text-field
v-model="backupOptions.tag"
class="mt-4"
:label="$t('settings.backup.backup-tag') + ' (optional)'"
>
</v-text-field>
<AdminBackupImportOptions v-model="backupOptions.options" class="mt-5 mb-2" />
<v-divider class="my-3"></v-divider>
</div>
<v-card-actions>
<BaseButton type="submit"> </BaseButton>
</v-card-actions>
<section>
<BaseCardSectionTitle title="Backups">
<v-card-text class="py-0 px-1">
Backups a total snapshots of the database and data directory of the site. This includes all data and cannot
be set to exclude subsets of data. You can think off this as a snapshot of Mealie at a specific time.
Currently, this backup mechanism is not cross-version and therefore cannot be used to migrate data between
versions (data migrations are not done automatically). These serve as a database agnostic way to export and
import data or backup the site to an external location.
</v-card-text>
</v-form>
</v-card>
</BaseCardSectionTitle>
<BaseButton @click="createBackup"> {{ $t("settings.backup.create-heading") }} </BaseButton>
<section class="mt-5">
<BaseCardSectionTitle title="Backups"></BaseCardSectionTitle>
<v-data-table
:headers="headers"
:items="backups.imports || []"
@ -93,7 +87,7 @@
<AppButtonUpload
:text-btn="false"
class="mr-4"
url="/api/backups/upload"
url="/api/admin/backups/upload"
accept=".zip"
color="info"
@uploaded="refreshBackups()"
@ -102,24 +96,66 @@
</div>
</section>
</section>
<v-container class="mt-4 d-flex justify-end">
<v-btn outlined rounded to="/user/group/data/migrations"> Looking For Migrations? </v-btn>
</v-container>
</v-container>
</template>
<script lang="ts">
import { defineComponent, reactive, toRefs, useContext } from "@nuxtjs/composition-api";
import AdminBackupImportOptions from "@/components/Domain/Admin/AdminBackupImportOptions.vue";
import { useBackups } from "~/composables/use-backups";
import { defineComponent, reactive, ref, toRefs, useContext } from "@nuxtjs/composition-api";
import { onMounted } from "vue-demi";
import { useAdminApi } from "~/composables/api";
import { AllBackups } from "~/types/api-types/admin";
export default defineComponent({
components: { AdminBackupImportOptions },
layout: "admin",
setup() {
const { i18n } = useContext();
const { i18n, $auth } = useContext();
const { selected, backups, backupOptions, deleteTarget, refreshBackups, importBackup, createBackup, deleteBackup } =
useBackups();
const adminApi = useAdminApi();
const selected = ref("");
const backups = ref<AllBackups>({
imports: [],
templates: [],
});
async function refreshBackups() {
const { data } = await adminApi.backups.getAll();
if (data) {
backups.value = data;
}
}
async function createBackup() {
const { data } = await adminApi.backups.create();
if (!data?.error) {
refreshBackups();
}
}
async function restoreBackup(fileName: string) {
const { data } = await adminApi.backups.restore(fileName);
if (!data?.error) {
$auth.logout();
}
}
const deleteTarget = ref("");
async function deleteBackup() {
const { data } = await adminApi.backups.delete(deleteTarget.value);
if (!data?.error) {
refreshBackups();
}
}
const state = reactive({
confirmImport: false,
deleteDialog: false,
createDialog: false,
importDialog: false,
@ -136,22 +172,23 @@ export default defineComponent({
if (selected.value === null || selected.value === undefined) {
return;
}
selected.value.name = data.name;
selected.value = data.name;
state.importDialog = true;
}
const backupsFileNameDownload = (fileName: string) => `api/backups/${fileName}/download`;
const backupsFileNameDownload = (fileName: string) => `api/admin/backups/${fileName}`;
onMounted(refreshBackups);
return {
restoreBackup,
selected,
...toRefs(state),
backupOptions,
backups,
createBackup,
deleteBackup,
setSelected,
deleteTarget,
importBackup,
setSelected,
refreshBackups,
backupsFileNameDownload,
};
@ -163,6 +200,9 @@ export default defineComponent({
},
});
</script>
<style scoped>
</style>
<style>
.v-input--selection-controls__input {
margin-bottom: auto;
}
</style>

View file

@ -99,12 +99,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
id: string;
id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
id: string;
id?: string;
name: string;
slug: string;
}

View file

@ -24,7 +24,7 @@ export interface ReadCookBook {
position?: number;
categories?: CategoryBase[];
groupId: string;
id: number;
id: string;
}
export interface RecipeCategoryResponse {
name: string;
@ -55,12 +55,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
id: string;
id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
id: string;
id?: string;
name: string;
slug: string;
}
@ -117,7 +117,7 @@ export interface RecipeCookBook {
position?: number;
categories: RecipeCategoryResponse[];
groupId: string;
id: number;
id: string;
}
export interface SaveCookBook {
name: string;
@ -134,5 +134,5 @@ export interface UpdateCookBook {
position?: number;
categories?: CategoryBase[];
groupId: string;
id: number;
id: string;
}

View file

@ -206,7 +206,7 @@ export interface ReadGroupPreferences {
recipeDisableComments?: boolean;
recipeDisableAmount?: boolean;
groupId: string;
id: number;
id: string;
}
export interface ReadInviteToken {
token: string;
@ -219,7 +219,7 @@ export interface ReadWebhook {
url?: string;
time?: string;
groupId: string;
id: number;
id: string;
}
export interface RecipeSummary {
id?: string;
@ -244,12 +244,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
id: string;
id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
id: string;
id?: string;
name: string;
slug: string;
}

View file

@ -7,7 +7,7 @@
export type PlanEntryType = "breakfast" | "lunch" | "dinner" | "side";
export type PlanRulesDay = "monday" | "tuesday" | "wednesday" | "thursday" | "friday" | "saturday" | "sunday" | "unset";
export type PlanRulesType = "breakfast" | "lunch" | "dinner" | "unset";
export type PlanRulesType = "breakfast" | "lunch" | "dinner" | "side" | "unset";
export interface Category {
id: string;
@ -118,12 +118,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
id: string;
id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
id: string;
id?: string;
name: string;
slug: string;
}

View file

@ -67,12 +67,12 @@ export interface CreateRecipeBulk {
tags?: RecipeTag[];
}
export interface RecipeCategory {
id: string;
id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
id: string;
id?: string;
name: string;
slug: string;
}

View file

@ -10,6 +10,9 @@ export interface ErrorResponse {
error?: boolean;
exception?: string;
}
export interface FileTokenResponse {
file_token: string;
}
export interface SuccessResponse {
message: string;
error?: boolean;

View file

@ -74,7 +74,7 @@ export interface ReadGroupPreferences {
recipeDisableComments?: boolean;
recipeDisableAmount?: boolean;
groupId: string;
id: number;
id: string;
}
export interface LoingLiveTokenIn {
name: string;
@ -131,12 +131,12 @@ export interface RecipeSummary {
dateUpdated?: string;
}
export interface RecipeCategory {
id: string;
id?: string;
name: string;
slug: string;
}
export interface RecipeTag {
id: string;
id?: string;
name: string;
slug: string;
}
@ -196,26 +196,12 @@ export interface SavePasswordResetToken {
userId: string;
token: string;
}
export interface SignUpIn {
name: string;
admin: boolean;
}
export interface SignUpOut {
name: string;
admin: boolean;
token: string;
id: number;
}
export interface SignUpToken {
name: string;
admin: boolean;
token: string;
}
export interface Token {
access_token: string;
token_type: string;
}
export interface TokenData {
user_id?: string;
username?: string;
}
export interface UpdateGroup {

View file

@ -5,7 +5,7 @@ from fastapi.middleware.gzip import GZipMiddleware
from mealie.core.config import get_app_settings
from mealie.core.root_logger import get_logger
from mealie.core.settings.static import APP_VERSION
from mealie.routes import backup_routes, router, utility_routes
from mealie.routes import router, utility_routes
from mealie.routes.handlers import register_debug_handler
from mealie.routes.media import media_router
from mealie.services.scheduler import SchedulerRegistry, SchedulerService, tasks
@ -69,7 +69,6 @@ def start_scheduler():
def api_routers():
app.include_router(router)
app.include_router(media_router)
app.include_router(backup_routes.router)
app.include_router(utility_routes.router)

View file

@ -24,7 +24,7 @@ class SQLiteProvider(AbstractDBProvider, BaseModel):
@property
def db_url(self) -> str:
return "sqlite:///" + str(self.db_path.absolute())
return f"sqlite:///{str(self.db_path.absolute())}"
@property
def db_url_public(self) -> str:
@ -59,7 +59,5 @@ class PostgresProvider(AbstractDBProvider, BaseSettings):
def db_provider_factory(provider_name: str, data_dir: Path, env_file: Path, env_encoding="utf-8") -> AbstractDBProvider:
if provider_name == "postgres":
return PostgresProvider(_env_file=env_file, _env_file_encoding=env_encoding)
elif provider_name == "sqlite":
return SQLiteProvider(data_dir=data_dir)
else:
return SQLiteProvider(data_dir=data_dir)

View file

@ -1,6 +1,14 @@
from mealie.routes._base.routers import AdminAPIRouter
from . import admin_about, admin_email, admin_log, admin_management_groups, admin_management_users, admin_server_tasks
from . import (
admin_about,
admin_backups,
admin_email,
admin_log,
admin_management_groups,
admin_management_users,
admin_server_tasks,
)
router = AdminAPIRouter(prefix="/admin")
@ -10,3 +18,4 @@ router.include_router(admin_management_users.router)
router.include_router(admin_management_groups.router)
router.include_router(admin_email.router, tags=["Admin: Email"])
router.include_router(admin_server_tasks.router, tags=["Admin: Server Tasks"])
router.include_router(admin_backups.router)

View file

@ -0,0 +1,95 @@
import operator
import shutil
from pathlib import Path
from fastapi import APIRouter, File, HTTPException, UploadFile, status
from mealie.core.config import get_app_dirs
from mealie.core.security import create_file_token
from mealie.pkgs.stats.fs_stats import pretty_size
from mealie.routes._base import BaseAdminController, controller
from mealie.schema.admin.backup import AllBackups, BackupFile
from mealie.schema.response.responses import FileTokenResponse, SuccessResponse
from mealie.services.backups_v2.backup_v2 import BackupV2
router = APIRouter(prefix="/backups")
@controller(router)
class AdminBackupController(BaseAdminController):
def _backup_path(self, name) -> Path:
return get_app_dirs().BACKUP_DIR / name
@router.get("", response_model=AllBackups)
def get_all(self):
app_dirs = get_app_dirs()
imports = []
for archive in app_dirs.BACKUP_DIR.glob("*.zip"):
backup = BackupFile(
name=archive.name, date=archive.stat().st_ctime, size=pretty_size(archive.stat().st_size)
)
imports.append(backup)
templates = [template.name for template in app_dirs.TEMPLATE_DIR.glob("*.*")]
imports.sort(key=operator.attrgetter("date"), reverse=True)
return AllBackups(imports=imports, templates=templates)
@router.post("", status_code=status.HTTP_201_CREATED, response_model=SuccessResponse)
def create_one(self):
backup = BackupV2()
try:
backup.backup()
except Exception as e:
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) from e
return SuccessResponse.respond("Backup created successfully")
@router.get("/{file_name}", response_model=FileTokenResponse)
def get_one(self, file_name: str):
"""Returns a token to download a file"""
file = self._backup_path(file_name)
if not file.exists():
raise HTTPException(status.HTTP_404_NOT_FOUND)
return FileTokenResponse.respond(create_file_token(file))
@router.delete("/{file_name}", status_code=status.HTTP_200_OK, response_model=SuccessResponse)
def delete_one(self, file_name: str):
file = self._backup_path(file_name)
if not file.is_file():
raise HTTPException(status.HTTP_400_BAD_REQUEST)
try:
file.unlink()
except Exception as e:
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) from e
return SuccessResponse.respond(f"{file_name} has been deleted.")
@router.post("/upload", response_model=SuccessResponse)
def upload_one(self, archive: UploadFile = File(...)):
"""Upload a .zip File to later be imported into Mealie"""
app_dirs = get_app_dirs()
dest = app_dirs.BACKUP_DIR.joinpath(archive.filename)
with dest.open("wb") as buffer:
shutil.copyfileobj(archive.file, buffer)
if not dest.is_file:
raise HTTPException(status.HTTP_400_BAD_REQUEST)
@router.post("/{file_name}/restore", response_model=SuccessResponse)
def import_one(self, file_name: str):
backup = BackupV2()
file = self._backup_path(file_name)
try:
backup.restore(file)
except Exception as e:
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) from e
return SuccessResponse.respond("Restore successful")

View file

@ -1,111 +0,0 @@
import operator
import shutil
from fastapi import Depends, File, HTTPException, UploadFile, status
from sqlalchemy.orm.session import Session
from mealie.core.config import get_app_dirs
from mealie.core.dependencies import get_current_user
from mealie.core.root_logger import get_logger
from mealie.core.security import create_file_token
from mealie.db.db_setup import generate_session
from mealie.pkgs.stats.fs_stats import pretty_size
from mealie.routes._base.routers import AdminAPIRouter
from mealie.schema.admin import AllBackups, BackupFile, CreateBackup, ImportJob
from mealie.schema.user.user import PrivateUser
from mealie.services.backups import imports
from mealie.services.backups.exports import backup_all
router = AdminAPIRouter(prefix="/api/backups", tags=["Backups"])
logger = get_logger()
app_dirs = get_app_dirs()
@router.get("/available", response_model=AllBackups)
def available_imports():
"""Returns a list of avaiable .zip files for import into Mealie."""
imports = []
for archive in app_dirs.BACKUP_DIR.glob("*.zip"):
backup = BackupFile(name=archive.name, date=archive.stat().st_ctime, size=pretty_size(archive.stat().st_size))
imports.append(backup)
templates = [template.name for template in app_dirs.TEMPLATE_DIR.glob("*.*")]
imports.sort(key=operator.attrgetter("date"), reverse=True)
return AllBackups(imports=imports, templates=templates)
@router.post("/export/database", status_code=status.HTTP_201_CREATED)
def export_database(data: CreateBackup, session: Session = Depends(generate_session)):
"""Generates a backup of the recipe database in json format."""
try:
export_path = backup_all(
session=session,
tag=data.tag,
templates=data.templates,
export_recipes=data.options.recipes,
export_users=data.options.users,
export_groups=data.options.groups,
export_notifications=data.options.notifications,
)
return {"export_path": export_path}
except Exception as e:
logger.error(e)
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR)
@router.post("/upload", status_code=status.HTTP_200_OK)
def upload_backup_file(archive: UploadFile = File(...)):
"""Upload a .zip File to later be imported into Mealie"""
dest = app_dirs.BACKUP_DIR.joinpath(archive.filename)
with dest.open("wb") as buffer:
shutil.copyfileobj(archive.file, buffer)
if not dest.is_file:
raise HTTPException(status.HTTP_400_BAD_REQUEST)
@router.get("/{file_name}/download")
async def download_backup_file(file_name: str):
"""Returns a token to download a file"""
file = app_dirs.BACKUP_DIR.joinpath(file_name)
return {"fileToken": create_file_token(file)}
@router.post("/{file_name}/import", status_code=status.HTTP_200_OK)
def import_database(
import_data: ImportJob,
session: Session = Depends(generate_session),
user: PrivateUser = Depends(get_current_user),
):
"""Import a database backup file generated from Mealie."""
return imports.import_database(
user=user,
session=session,
archive=import_data.name,
import_recipes=import_data.recipes,
import_settings=import_data.settings,
import_users=import_data.users,
import_groups=import_data.groups,
force_import=import_data.force,
rebase=import_data.rebase,
)
@router.delete("/{file_name}/delete", status_code=status.HTTP_200_OK)
def delete_backup(file_name: str):
"""Removes a database backup from the file system"""
file_path = app_dirs.BACKUP_DIR.joinpath(file_name)
if not file_path.is_file():
raise HTTPException(status.HTTP_400_BAD_REQUEST)
try:
file_path.unlink()
except Exception:
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR)
return {"message": f"{file_name} has been deleted."}

View file

@ -1,5 +1,6 @@
from typing import Optional
from fastapi_camelcase import CamelModel
from pydantic import BaseModel
@ -28,3 +29,15 @@ class SuccessResponse(BaseModel):
in the same call, for use while providing details to a HTTPException
"""
return cls(message=message).dict()
class FileTokenResponse(CamelModel):
file_token: str
@classmethod
def respond(cls, token: str) -> dict:
"""
This method is an helper to create an obect and convert to a dictionary
in the same call, for use while providing details to a HTTPException
"""
return cls(file_token=token).dict()

View file

View file

@ -0,0 +1,138 @@
import datetime
import json
from pathlib import Path
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel
from sqlalchemy import MetaData, create_engine
from sqlalchemy.engine import base
from sqlalchemy.orm import Session, sessionmaker
from mealie.services._base_service import BaseService
class AlchemyExporter(BaseService):
connection_str: str
engine: base.Engine
meta: MetaData
look_for_datetime = {"created_at", "update_at", "date_updated", "timestamp", "expires_at"}
look_for_date = {"date_added", "date"}
class DateTimeParser(BaseModel):
date: datetime.date = None
time: datetime.datetime = None
def __init__(self, connection_str: str) -> None:
super().__init__()
self.connection_str = connection_str
self.engine = create_engine(connection_str)
self.meta = MetaData()
self.session_maker = sessionmaker(bind=self.engine)
@staticmethod
def convert_to_datetime(data: dict) -> dict:
"""
walks the dictionary to convert all things that look like timestamps to datetime objects
used in the context of reading a json file into a database via SQLAlchemy.
"""
for key, value in data.items():
if isinstance(value, dict):
data = AlchemyExporter.convert_to_datetime(value)
elif isinstance(value, list): # assume that this is a list of dictionaries
data[key] = [AlchemyExporter.convert_to_datetime(item) for item in value]
elif isinstance(value, str):
if key in AlchemyExporter.look_for_datetime:
data[key] = AlchemyExporter.DateTimeParser(time=value).time
if key in AlchemyExporter.look_for_date:
data[key] = AlchemyExporter.DateTimeParser(date=value).date
return data
@staticmethod
def _compare_schemas(schema1: dict, schema2: dict) -> bool:
try:
# validate alembic version(s) are the same
return schema1["alembic_version"] == schema2["alembic_version"]
except KeyError:
return False
@staticmethod
def validate_schemas(schema1: Path | dict, schema2: Path | dict) -> bool:
"""
Validates that the schema of the database matches the schema of the database. In practice,
this means validating that the alembic version is the same
"""
def extract_json(file: Path) -> dict:
with open(file) as f:
return json.loads(f.read())
if isinstance(schema1, Path):
schema1 = extract_json(schema1)
if isinstance(schema2, Path):
schema2 = extract_json(schema2)
return AlchemyExporter._compare_schemas(schema1, schema2)
def dump_schema(self) -> dict:
"""
Returns the schema of the SQLAlchemy database as a python dictionary. This dictionary is wrapped by
jsonable_encoder to ensure that the object can be converted to a json string.
"""
self.meta.reflect(bind=self.engine)
all_tables = self.meta.tables.values()
results = {
**{table.name: [] for table in all_tables},
"alembic_version": [dict(row) for row in self.engine.execute("SELECT * FROM alembic_version").fetchall()],
}
return jsonable_encoder(results)
def dump(self) -> dict[str, list[dict]]:
"""
Returns the entire SQLAlchemy database as a python dictionary. This dictionary is wrapped by
jsonable_encoder to ensure that the object can be converted to a json string.
"""
self.meta.reflect(bind=self.engine) # http://docs.sqlalchemy.org/en/rel_0_9/core/reflection.html
result = {
table.name: [dict(row) for row in self.engine.execute(table.select())] for table in self.meta.sorted_tables
}
return jsonable_encoder(result)
def restore(self, db_dump: dict) -> None:
"""Restores all data from dictionary into the database"""
data = AlchemyExporter.convert_to_datetime(db_dump)
self.meta.reflect(bind=self.engine)
for table_name, rows in data.items():
if not rows:
continue
table = self.meta.tables[table_name]
self.engine.execute(table.delete())
self.engine.execute(table.insert(), rows)
def drop_all(self) -> None:
"""Drops all data from the database"""
self.meta.reflect(bind=self.engine)
with self.session_maker() as session:
session: Session
is_postgres = self.settings.DB_ENGINE == "postgres"
try:
if is_postgres:
session.execute("SET session_replication_role = 'replica'")
for table in self.meta.sorted_tables:
session.execute(f"DELETE FROM {table.name}")
finally:
if is_postgres:
session.execute("SET session_replication_role = 'origin'")
session.commit()

View file

@ -0,0 +1,45 @@
import json
import shutil
import tempfile
from pathlib import Path
class BackupContents:
def __init__(self, file: Path) -> None:
self.base = file
self.data_directory = self.base / "data"
self.tables = self.base / "database.json"
def validate(self) -> bool:
if not self.base.is_dir():
return False
if not self.data_directory.is_dir():
return False
if not self.tables.is_file():
return False
return True
def read_tables(self) -> dict:
with open(self.tables) as f:
return json.loads(f.read())
class BackupFile:
temp_dir: Path | None
def __init__(self, file: Path) -> None:
self.zip = file
def __enter__(self) -> BackupContents:
self.temp_dir = Path(tempfile.mkdtemp())
shutil.unpack_archive(str(self.zip), str(self.temp_dir))
return BackupContents(self.temp_dir)
def __exit__(self, exc_type, exc_val, exc_tb):
if self.temp_dir and self.temp_dir.is_dir():
shutil.rmtree(self.temp_dir)
self.temp_dir = None

View file

@ -0,0 +1,98 @@
import datetime
import json
import shutil
from pathlib import Path
from zipfile import ZipFile
from mealie.services._base_service import BaseService
from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter
from mealie.services.backups_v2.backup_file import BackupFile
class BackupV2(BaseService):
def __init__(self, db_url: str = None) -> None:
super().__init__()
self.db_url = db_url or self.settings.DB_URL
self.db_exporter = AlchemyExporter(self.db_url)
def _sqlite(self) -> None:
db_file = self.settings.DB_URL.removeprefix("sqlite:///")
# Create a backup of the SQLite database
timestamp = datetime.datetime.now().strftime("%Y.%m.%d")
shutil.copy(db_file, f"mealie_{timestamp}.bak.db")
def _postgres(self) -> None:
pass
def backup(self) -> Path:
# sourcery skip: merge-nested-ifs, reintroduce-else, remove-redundant-continue
exclude = {"mealie.db", "mealie.log", ".secret"}
exclude_ext = {".zip"}
exclude_dirs = {"backups"}
timestamp = datetime.datetime.now().strftime("%Y.%m.%d.%H.%M.%S")
backup_name = f"mealie_{timestamp}.zip"
backup_file = self.directories.BACKUP_DIR / backup_name
database_json = self.db_exporter.dump()
with ZipFile(backup_file, "w") as zip_file:
zip_file.writestr("database.json", json.dumps(database_json))
for data_file in self.directories.DATA_DIR.glob("**/*"):
if data_file.name in exclude:
continue
if data_file.is_file() and data_file.suffix not in exclude_ext:
if data_file.parent.name in exclude_dirs:
continue
zip_file.write(data_file, f"data/{data_file.relative_to(self.directories.DATA_DIR)}")
return backup_file
def _copy_data(self, data_path: Path) -> None:
for f in data_path.iterdir():
if f.is_file():
continue
shutil.rmtree(self.directories.DATA_DIR / f.name)
shutil.copytree(f, self.directories.DATA_DIR / f.name)
def restore(self, backup_path: Path) -> None:
self.logger.info("initially backup restore")
backup = BackupFile(backup_path)
if self.settings.DB_ENGINE == "sqlite":
self._sqlite()
elif self.settings.DB_ENGINE == "postgres":
self._postgres()
with backup as contents:
if not contents.validate():
self.logger.error(
"Invalid backup file. file does not contain required elements (data directory and database.json"
)
raise ValueError("Invalid backup file")
# Purge the Database
self.logger.info("dropping all database tables")
self.db_exporter.drop_all()
database_json = contents.read_tables()
self.logger.info("importing database tables")
self.db_exporter.restore(database_json)
self.logger.info("database tables imported successfully")
self.logger.info("restoring data directory")
self._copy_data(contents.data_directory)
self.logger.info("data directory restored successfully")
self.logger.info("backup restore complete")

10
poetry.lock generated
View file

@ -688,11 +688,11 @@ i18n = ["babel (>=2.9.0)"]
[[package]]
name = "mkdocs-material"
version = "7.3.6"
version = "8.2.3"
description = "A Material Design theme for MkDocs"
category = "dev"
optional = false
python-versions = "*"
python-versions = ">=3.6"
[package.dependencies]
jinja2 = ">=2.11.1"
@ -1488,7 +1488,7 @@ pgsql = ["psycopg2-binary"]
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
content-hash = "4c1c1e4eb5026c44d36ede6f44f2675e037d7adaaba9b4ea298e76422e3d3d68"
content-hash = "00e37f7569d999689984b41bb0085f86e0e902eb1a7cae32d408b079db0ae8d8"
[metadata.files]
aiofiles = [
@ -2033,8 +2033,8 @@ mkdocs = [
{file = "mkdocs-1.2.3.tar.gz", hash = "sha256:89f5a094764381cda656af4298727c9f53dc3e602983087e1fe96ea1df24f4c1"},
]
mkdocs-material = [
{file = "mkdocs-material-7.3.6.tar.gz", hash = "sha256:1b1dbd8ef2508b358d93af55a5c5db3f141c95667fad802301ec621c40c7c217"},
{file = "mkdocs_material-7.3.6-py2.py3-none-any.whl", hash = "sha256:1b6b3e9e09f922c2d7f1160fe15c8f43d4adc0d6fb81aa6ff0cbc7ef5b78ec75"},
{file = "mkdocs-material-8.2.3.tar.gz", hash = "sha256:aea074a5b368c8a27c8ae4fe72bd943176512b225541106797e367c62ce3f5a0"},
{file = "mkdocs_material-8.2.3-py2.py3-none-any.whl", hash = "sha256:34a3155fe30f3fd697acef230e459e0428acb0481bcbb968e4a94a3ac174af18"},
]
mkdocs-material-extensions = [
{file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"},

View file

@ -46,7 +46,7 @@ tzdata = "^2021.5"
pylint = "^2.6.0"
pytest = "^6.2.1"
pytest-cov = "^2.11.0"
mkdocs-material = "^7.0.2"
mkdocs-material = "^8.2.3"
flake8 = "^4.0.1"
coverage = "^5.5"
pydantic-to-typescript = "^1.0.7"

View file

@ -0,0 +1,48 @@
import json
from mealie.core.config import get_app_settings
from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter
def test_alchemy_exporter():
settings = get_app_settings()
exporter = AlchemyExporter(settings.DB_URL)
data = exporter.dump()
assert data["alembic_version"] == [{"version_num": "6b0f5f32d602"}]
assert json.dumps(data, indent=4) # Make sure data is json-serializable
def test_validate_schemas():
schema = {
"alembic_version": [{"version_num": "6b0f5f32d602"}],
}
match = {
"alembic_version": [{"version_num": "6b0f5f32d602"}],
}
invalid_version = {
"alembic_version": [{"version_num": "not-valid-schema"}],
}
assert AlchemyExporter.validate_schemas(schema, match)
assert not AlchemyExporter.validate_schemas(schema, invalid_version)
schema_with_tables = {
"alembic_version": [{"version_num": "6b0f5f32d602"}],
"recipes": [
{
"id": 1,
}
],
}
match_with_tables = {
"alembic_version": [{"version_num": "6b0f5f32d602"}],
"recipes": [
{
"id": 2,
}
],
}
assert AlchemyExporter.validate_schemas(schema_with_tables, match_with_tables)

View file

@ -0,0 +1,56 @@
import json
from pathlib import Path
from zipfile import ZipFile
from mealie.services.backups_v2.backup_file import BackupFile
from tests import utils
def zip_factory(temp_dir) -> Path:
temp_zip = temp_dir / f"{utils.random_string()}.zip"
with ZipFile(temp_zip, "w") as zip_file:
zip_file.writestr("test.txt", "test")
return temp_zip
def test_backup_file_context_manager(tmp_path: Path):
temp_zip = zip_factory(tmp_path)
backup_file = BackupFile(temp_zip)
with backup_file as _:
assert backup_file.temp_dir.exists()
temp_dir_path = backup_file.temp_dir
assert not backup_file.temp_dir
assert not temp_dir_path.exists()
def test_backup_file_invalid_zip(tmp_path: Path):
temp_zip = zip_factory(tmp_path)
backup_file = BackupFile(temp_zip)
with backup_file as content:
assert not content.validate()
def test_backup_file_valid_zip(tmp_path: Path):
dummy_dict = {"hello": "world"}
temp_zip = zip_factory(tmp_path)
# Add contents
with ZipFile(temp_zip, "a") as zip_file:
zip_file.writestr("data/test.txt", "test")
zip_file.writestr("database.json", json.dumps(dummy_dict))
backup_file = BackupFile(temp_zip)
with backup_file as content:
assert content.validate()
assert content.read_tables() == dummy_dict
assert content.data_directory.joinpath("test.txt").is_file()

View file

@ -0,0 +1,58 @@
import filecmp
from pathlib import Path
from typing import Any
from mealie.core.config import get_app_settings
from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter
from mealie.services.backups_v2.backup_file import BackupFile
from mealie.services.backups_v2.backup_v2 import BackupV2
def dict_sorter(d: dict) -> Any:
possible_keys = {"created_at", "id"}
return next((d[key] for key in possible_keys if key in d), 1)
# For Future Use
def match_file_tree(path_a: Path, path_b: Path):
if path_a.is_dir() and path_b.is_dir():
for a_file in path_a.iterdir():
b_file = path_b.joinpath(a_file.name)
assert b_file.exists()
match_file_tree(a_file, b_file)
else:
assert filecmp(path_a, path_b)
def test_database_backup():
backup_v2 = BackupV2()
path_to_backup = backup_v2.backup()
assert path_to_backup.exists()
backup = BackupFile(path_to_backup)
with backup as contents:
assert contents.validate()
def test_database_restore():
settings = get_app_settings()
# Capture existing database snapshot
original_exporter = AlchemyExporter(settings.DB_URL)
snapshop_1 = original_exporter.dump()
# Create Backup
backup_v2 = BackupV2(settings.DB_URL)
path_to_backup = backup_v2.backup()
assert path_to_backup.exists()
backup_v2.restore(path_to_backup)
new_exporter = AlchemyExporter(settings.DB_URL)
snapshop_2 = new_exporter.dump()
for s1, s2 in zip(snapshop_1, snapshop_2):
assert snapshop_1[s1].sort(key=dict_sorter) == snapshop_2[s2].sort(key=dict_sorter)