This commit is contained in:
Keith Martin 2026-01-22 12:38:43 +00:00 committed by GitHub
commit b9db9145f2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
441 changed files with 38411 additions and 2692 deletions

1
.gitignore vendored
View file

@ -87,3 +87,4 @@ Thumbs.db
.c9revisions
.settings
.swp
.vscode

167
Makefile
View file

@ -72,7 +72,7 @@ watch: watch-js
build-all: build-go build-js
pull: docker-pull
test: test-js test-go
test-go: run-test-go
test-go: reset-mariadb-migrate reset-postgres-migrate run-test-go
test-hub: run-test-hub
test-pkg: run-test-pkg
test-ai: run-test-ai
@ -82,17 +82,35 @@ test-entity: run-test-entity
test-commands: run-test-commands
test-photoprism: run-test-photoprism
test-short: run-test-short
test-mariadb: reset-acceptance run-test-mariadb
acceptance-run-chromium: storage/acceptance acceptance-auth-sqlite-restart wait acceptance-auth acceptance-auth-sqlite-stop acceptance-sqlite-restart wait-2 acceptance acceptance-sqlite-stop
acceptance-run-chromium-short: storage/acceptance acceptance-auth-sqlite-restart wait acceptance-auth-short acceptance-auth-sqlite-stop acceptance-sqlite-restart wait-2 acceptance-short acceptance-sqlite-stop
acceptance-auth-run-chromium: storage/acceptance acceptance-auth-sqlite-restart wait acceptance-auth acceptance-auth-sqlite-stop
acceptance-public-run-chromium: storage/acceptance acceptance-sqlite-restart wait acceptance acceptance-sqlite-stop
test-mariadb: reset-mariadb-testdb reset-mariadb-migrate reset-postgres-migrate run-test-mariadb
test-postgres: reset-postgres-testdb reset-postgres-migrate reset-mariadb-migrate run-test-postgres
test-sqlite: reset-sqlite-unit reset-mariadb-migrate reset-postgres-migrate run-test-sqlite
# SQLite acceptance tests - These setup, configure and then call the actual tests.
acceptance-run-chromium: storage/acceptance storage/sqlite acceptance-exec-chromium
acceptance-run-chromium-short: storage/acceptance storage/sqlite acceptance-exec-chromium-short
acceptance-auth-run-chromium: storage/acceptance storage/sqlite acceptance-auth-exec-chromium
acceptance-public-run-chromium: storage/acceptance storage/sqlite acceptance-public-exec-chromium
# MariaDB acceptance tests - These setup, configure and then call the actual tests.
acceptance-mariadb-run-chromium: storage/acceptance storage/mariadb acceptance-exec-chromium
acceptance-mariadb-run-chromium-short: storage/acceptance storage/mariadb acceptance-exec-chromium-short
acceptance-mariadb-auth-run-chromium: storage/acceptance storage/mariadb acceptance-auth-exec-chromium
acceptance-mariadb-public-run-chromium: storage/acceptance storage/mariadb acceptance-public-exec-chromium
# PostgreSQL acceptance tests - These setup, configure and then call the actual tests.
acceptance-postgres-run-chromium: storage/acceptance storage/postgres acceptance-exec-chromium
acceptance-postgres-run-chromium-short: storage/acceptance storage/postgres acceptance-exec-chromium-short
acceptance-postgres-auth-run-chromium: storage/acceptance storage/postgres acceptance-auth-exec-chromium
acceptance-postgres-public-run-chromium: storage/acceptance storage/postgres acceptance-public-exec-chromium
# The actual tests that are called for acceptance tests. Don't call these directly, use the ones with run in the name.
acceptance-exec-chromium: acceptance-file-reset acceptance-database-reset-1 acceptance-auth-start wait-1 acceptance-auth acceptance-auth-stop acceptance-database-reset-2 acceptance-public-start wait-2 acceptance acceptance-public-stop
acceptance-exec-chromium-short: acceptance-file-reset acceptance-database-reset-1 acceptance-auth-start wait-1 acceptance-auth-short acceptance-auth-stop acceptance-database-reset-2 acceptance-public-start wait-2 acceptance-short acceptance-public-stop
acceptance-auth-exec-chromium: acceptance-file-reset acceptance-database-reset-1 acceptance-auth-start wait-1 acceptance-auth acceptance-auth-stop
acceptance-public-exec-chromium: acceptance-file-reset acceptance-database-reset-1 acceptance-public-start wait-1 acceptance acceptance-public-stop
help: list
list:
@awk '/^[[:alnum:]]+[^[:space:]]+:/ {printf "%s",substr($$1,1,length($$1)-1); if (match($$0,/#/)) {desc=substr($$0,RSTART+1); sub(/^[[:space:]]+/,"",desc); printf " - %s\n",desc} else printf "\n" }' "$(firstword $(MAKEFILE_LIST))"
wait:
sleep 20
wait-2:
wait-%:
sleep 20
show-rev:
@git rev-parse HEAD
@ -197,9 +215,7 @@ install-onnx:
sudo scripts/dist/install-onnx.sh
install-darktable:
sudo scripts/dist/install-darktable.sh
acceptance-sqlite-restart:
cp -f storage/acceptance/backup.db storage/acceptance/index.db
cp -f storage/acceptance/config-sqlite/settingsBackup.yml storage/acceptance/config-sqlite/settings.yml
acceptance-file-reset:
rm -rf storage/acceptance/sidecar/2020
rm -rf storage/acceptance/sidecar/2011
rm -rf storage/acceptance/originals/2010
@ -207,15 +223,35 @@ acceptance-sqlite-restart:
rm -rf storage/acceptance/originals/2011
rm -rf storage/acceptance/originals/2013
rm -rf storage/acceptance/originals/2017
./photoprism --auth-mode="public" -c "./storage/acceptance/config-sqlite" start -d
acceptance-sqlite-stop:
./photoprism --auth-mode="public" -c "./storage/acceptance/config-sqlite" stop
acceptance-auth-sqlite-restart:
cp -f storage/acceptance/backup.db storage/acceptance/index.db
cp -f storage/acceptance/config-sqlite/settingsBackup.yml storage/acceptance/config-sqlite/settings.yml
./photoprism --auth-mode="password" -c "./storage/acceptance/config-sqlite" start -d
acceptance-auth-sqlite-stop:
./photoprism --auth-mode="password" -c "./storage/acceptance/config-sqlite" stop
rm storage/acceptance/photoprism.log
acceptance-database-reset-%:
@if [ -f storage/acceptance/config-active/dbms.sqlite ]; then \
echo "resetting sqlite"; \
cp -f storage/acceptance/backup.db storage/acceptance/index.db; \
cp -f storage/acceptance/config-active/settingsBackup.yml storage/acceptance/config-active/settings.yml; \
fi
@if [ -f storage/acceptance/config-active/dbms.mariadb ]; then \
echo "resetting mariadb"; \
cp -f storage/acceptance/backup.db storage/acceptance/index.db; \
mysql < scripts/sql/mariadb/reset-acceptance.sql; \
./photoprism --database-driver sqlite --database-dsn "storage/acceptance/index.db?_busy_timeout=5000&_foreign_keys=on" --transfer-driver mysql --transfer-dsn "$(subst testdb,acceptance,$(PHOTOPRISM_TEST_DSN_MARIADB))" migrations transfer -force; \
cp -f storage/acceptance/config-active/settingsBackup.yml storage/acceptance/config-active/settings.yml; \
fi
@if [ -f storage/acceptance/config-active/dbms.postgresql ]; then \
echo "resetting postgresql"; \
cp -f storage/acceptance/backup.db storage/acceptance/index.db; \
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-acceptance.sql; \
./photoprism --database-driver sqlite --database-dsn "storage/acceptance/index.db?_busy_timeout=5000&_foreign_keys=on" --transfer-driver postgres --transfer-dsn "$(subst testdb,acceptance,$(PHOTOPRISM_TEST_DSN_POSTGRES))" migrations transfer -force; \
cp -f storage/acceptance/config-active/settingsBackup.yml storage/acceptance/config-active/settings.yml; \
fi
acceptance-public-start:
./photoprism --auth-mode="public" -c "./storage/acceptance/config-active" start -d
acceptance-public-stop:
./photoprism --auth-mode="public" -c "./storage/acceptance/config-active" stop
acceptance-auth-start:
./photoprism --auth-mode="password" -c "./storage/acceptance/config-active" start -d
acceptance-auth-stop:
./photoprism --auth-mode="password" -c "./storage/acceptance/config-active" stop
start:
./photoprism start -d
stop:
@ -224,6 +260,8 @@ terminal:
$(DOCKER_COMPOSE) exec -u $(UID) photoprism bash
mariadb:
$(DOCKER_COMPOSE) exec mariadb mariadb -uroot -pphotoprism photoprism
postgres:
$(DOCKER_COMPOSE) exec postgres psql -uphotoprism -pphotoprism photoprism
root: root-terminal
root-terminal:
$(DOCKER_COMPOSE) exec -u root photoprism bash
@ -288,6 +326,20 @@ dep-onnx:
dep-acceptance: storage/acceptance
storage/acceptance:
[ -f "./storage/acceptance/index.db" ] || (cd storage && rm -rf acceptance && wget -c https://dl.photoprism.app/qa/acceptance.tar.gz -O - | tar -xz)
storage/sqlite:
rm -rf storage/acceptance/config-active
cp storage/acceptance/config-sqlite/ storage/acceptance/config-active -r
echo sqlite > storage/acceptance/config-active/dbms.sqlite
storage/mariadb:
rm -rf storage/acceptance/config-active
cp storage/acceptance/config-sqlite/ storage/acceptance/config-active -r
sed "s/DatabaseDriver: sqlite/DatabaseDriver: mysql/;s/DatabaseD[sS][nN][: a-z./]\+/DatabaseDSN: $(subst &,\&,$(subst /,\/,$(PHOTOPRISM_TEST_DSN_MARIADB)))/" storage/acceptance/config-sqlite/options.yml | sed "s/testdb/acceptance/g" > storage/acceptance/config-active/options.yml
echo mariadb > storage/acceptance/config-active/dbms.mariadb
storage/postgres:
rm -rf storage/acceptance/config-active
cp storage/acceptance/config-sqlite/ storage/acceptance/config-active -r
sed "s/DatabaseDriver: sqlite/DatabaseDriver: postgres/;s/DatabaseD[sS][nN][: a-z./]\+/DatabaseDSN: $(subst &,\&,$(subst /,\/,$(PHOTOPRISM_TEST_DSN_POSTGRES)))/" storage/acceptance/config-sqlite/options.yml | sed "s/testdb/acceptance/g" > storage/acceptance/config-active/options.yml
echo postgresql > storage/acceptance/config-active/dbms.postgresql
zip-facenet:
(cd assets && zip -r facenet.zip facenet -x "*/.*" -x "*/version.txt")
zip-nasnet:
@ -389,19 +441,42 @@ vitest-component:
(cd frontend && npm run test-component)
reset-mariadb:
$(info Resetting photoprism database...)
mysql < scripts/sql/reset-photoprism.sql
mysql < scripts/sql/mariadb/reset-photoprism.sql
reset-mariadb-testdb:
$(info Resetting testdb database...)
mysql < scripts/sql/reset-testdb.sql
mysql < scripts/sql/mariadb/reset-testdb.sql
reset-mariadb-local:
$(info Resetting local database...)
mysql < scripts/sql/reset-local.sql
mysql < scripts/sql/mariadb/reset-local.sql
reset-mariadb-acceptance:
$(info Resetting acceptance database...)
mysql < scripts/sql/reset-acceptance.sql
reset-mariadb-all: reset-mariadb-testdb reset-mariadb-local reset-mariadb-acceptance
reset-testdb: reset-sqlite reset-mariadb-testdb
reset-acceptance: reset-mariadb-acceptance
mysql < scripts/sql/mariadb/reset-acceptance.sql
reset-mariadb-migrate:
$(info Resetting migrate database...)
mysql < scripts/sql/mariadb/reset-migrate.sql
reset-sqlite-unit:
$(info Resetting SQLite unit database...)
rm --force ./storage/testdata/unit.test.db
cp ./internal/entity/migrate/testdata/migrate_sqlite3 ./storage/testdata/unit.test.db
reset-mariadb-all: reset-mariadb-testdb reset-mariadb-local reset-mariadb-acceptance reset-mariadb-migrate
reset-postgres:
$(info Resetting photoprism database...)
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-photoprism.sql
reset-postgres-testdb:
$(info Resetting testdb database...)
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-testdb.sql
reset-postgres-local:
$(info Resetting local database...)
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-local.sql
reset-postgres-acceptance:
$(info Resetting acceptance database...)
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-acceptance.sql
reset-postgres-migrate:
$(info Resetting migrate database...)
psql postgresql://photoprism:photoprism@postgres:5432/postgres -f scripts/sql/postgresql/reset-migrate.sql
reset-postgres-all: reset-postgres-testdb reset-postgres-local reset-postgres-acceptance
reset-testdb: reset-sqlite reset-mariadb-testdb reset-postgres-testdb reset-postgres-migrate
# reset-acceptance: reset-mariadb-acceptance
reset-sqlite:
$(info Removing test database files...)
find ./internal -type f \( -iname '.*.db' -o -iname '.*.db-journal' -o -iname '.test.*' \) -delete
@ -416,7 +491,13 @@ run-test-hub:
env PHOTOPRISM_TEST_HUB="true" $(GOTEST) -parallel 1 -count 1 -cpu 1 -tags="slow,develop,debug" -timeout 20m ./pkg/... ./internal/...
run-test-mariadb:
$(info Running all Go tests on MariaDB...)
PHOTOPRISM_TEST_DRIVER="mysql" PHOTOPRISM_TEST_DSN="root:photoprism@tcp(mariadb:4001)/acceptance?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true" $(GOTEST) -parallel 1 -count 1 -cpu 1 -tags="slow,develop" -timeout 20m ./pkg/... ./internal/...
PHOTOPRISM_TEST_DSN_NAME="mariadb" $(GOTEST) -parallel 1 -count 1 -cpu 1 -tags="slow,develop" -timeout 20m ./pkg/... ./internal/...
run-test-postgres:
$(info Running all Go tests on PostgreSQL...)
PHOTOPRISM_TEST_DSN_NAME="postgres" $(GOTEST) -parallel 1 -count 1 -cpu 1 -tags="slow,develop" -timeout 20m ./pkg/... ./internal/...
run-test-sqlite:
$(info Running all Go tests on SQLite...)
PHOTOPRISM_TEST_DSN_NAME="sqlitefile" $(GOTEST) -parallel 1 -count 1 -cpu 1 -tags "slow,develop" -timeout 20m ./pkg/... ./internal/...
run-test-pkg:
$(info Running all Go tests in "/pkg"...)
$(GOTEST) -parallel 2 -count 1 -cpu 2 -tags="slow,develop" -timeout 20m ./pkg/...
@ -466,6 +547,24 @@ git-pull:
echo "Updating photoprism/$$d"; \
git -C "$$d" pull --ff-only || echo "Warning: git pull failed in $$d"; \
done;
test-sqlite-benchmark10x:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && go test -skip Test -parallel 4 -count 10 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 1s -bench=.'
test-sqlite-benchmark10s:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && go test -skip Test -parallel 4 -count 1 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 10s -bench=.'
test-mariadb-benchmark10x:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && PHOTOPRISM_TEST_DSN_NAME="mariadb" go test -skip Test -parallel 4 -count 10 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 1s -bench=.'
test-mariadb-benchmark10s:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && PHOTOPRISM_TEST_DSN_NAME="mariadb" go test -skip Test -parallel 4 -count 1 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 10s -bench=.'
test-postgres-benchmark10x:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && PHOTOPRISM_TEST_DSN_NAME="postgres" go test -skip Test -parallel 4 -count 10 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 1s -bench=.'
test-postgres-benchmark10s:
$(info Running all Go tests with benchmarks...)
dirname $$(grep --files-with-matches --include "*_test.go" -oP "(?<=func )Benchmark[A-Za-z_]+(?=\(b \*testing\.B)" --recursive ./*) | sort -u | xargs -n1 bash -c 'cd "$$0" && pwd && PHOTOPRISM_TEST_DSN_NAME="postgres" go test -skip Test -parallel 4 -count 1 -cpu 4 -failfast -tags slow -timeout 30m -benchtime 10s -bench=.'
docker-pull:
$(DOCKER_COMPOSE) --profile=all pull --ignore-pull-failures
$(DOCKER_COMPOSE) -f compose.latest.yaml pull --ignore-pull-failures
@ -961,5 +1060,15 @@ dummy-ldap:
$(DOCKER_COMPOSE) stop dummy-ldap
$(DOCKER_COMPOSE) up -d -V --force-recreate dummy-ldap
# PostgreSQL-specific targets:
start-alldbms:
$(DOCKER_COMPOSE) -f compose.alldbms.yaml up
start-postgres:
$(DOCKER_COMPOSE) -f compose.postgres.yaml up
docker-postgres:
docker pull --platform=amd64 photoprism/develop:plucky
docker pull --platform=amd64 photoprism/develop:plucky-slim
scripts/docker/buildx-multi.sh photoprism linux/amd64 postgres /plucky
# Declare all targets as "PHONY", see https://www.gnu.org/software/make/manual/html_node/Phony-Targets.html.
MAKEFLAGS += --always-make

472
compose.alldbms.yaml Normal file
View file

@ -0,0 +1,472 @@
## FOR TEST AND DEVELOPMENT ONLY, DO NOT USE IN PRODUCTION ##
## Setup: https://docs.photoprism.app/developer-guide/setup/ ##
services:
## PhotoPrism (Development Environment All DBMS')
photoprism:
build: .
image: photoprism/photoprism:develop
depends_on:
- postgres
- mariadb
- dummy-webdav
- dummy-oidc
stop_grace_period: 15s
security_opt:
- seccomp:unconfined
- apparmor:unconfined
## Expose HTTP and debug ports
ports:
- "2342:2342" # Default HTTP port (host:container)
- "2443:2443" # Default TLS port (host:container)
- "2343:2343" # Acceptance Test HTTP port (host:container)
- "40000:40000" # Go Debugger (host:container)
shm_size: "2gb"
## Set links and labels for use with Traefik reverse proxy
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
- "traefik:vision.localssl.dev"
- "traefik:qdrant.localssl.dev"
- "traefik:keycloak.localssl.dev"
- "traefik:dummy-oidc.localssl.dev"
- "traefik:dummy-webdav.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.docker.network=photoprism"
- "traefik.http.services.photoprism.loadbalancer.server.port=2342"
- "traefik.http.services.photoprism.loadbalancer.server.scheme=http"
- "traefik.http.routers.photoprism.entrypoints=websecure"
- "traefik.http.routers.photoprism.rule=Host(`localssl.dev`) || HostRegexp(`^.+\\.localssl\\.dev`)"
- "traefik.http.routers.photoprism.priority=2"
- "traefik.http.routers.photoprism.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.photoprism.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.photoprism.tls=true"
## Override variables with optional env file, see https://docs.docker.com/reference/compose-file/services/#required
env_file:
- path: ".env"
required: false
## Configure development environment
environment:
## Run as a non-root user after initialization (supported: 0, 33, 50-99, 500-600, and 900-1200):
PHOTOPRISM_UID: ${UID:-1000} # user id, should match your host user id
PHOTOPRISM_GID: ${GID:-1000} # group id
## Access Management:
PHOTOPRISM_ADMIN_USER: "admin" # admin login username
PHOTOPRISM_ADMIN_PASSWORD: "photoprism" # initial admin password (8-72 characters)
PHOTOPRISM_AUTH_MODE: "password" # authentication mode (public, password)
PHOTOPRISM_REGISTER_URI: "https://keycloak.localssl.dev/admin/"
PHOTOPRISM_PASSWORD_RESET_URI: "https://keycloak.localssl.dev/realms/master/login-actions/reset-credentials"
PHOTOPRISM_USAGE_INFO: "true"
PHOTOPRISM_FILES_QUOTA: "100"
## Customization:
PHOTOPRISM_DEFAULT_LOCALE: "en" # default user interface language, e.g. "en" or "de"
PHOTOPRISM_PLACES_LOCALE: "local" # location details language, e.g. "local", "en", or "de"
## OpenID Connect (pre-configured for local tests):
## see https://keycloak.localssl.dev/realms/master/.well-known/openid-configuration
PHOTOPRISM_OIDC_URI: "https://keycloak.localssl.dev/realms/master"
PHOTOPRISM_OIDC_CLIENT: "photoprism-develop"
PHOTOPRISM_OIDC_SECRET: "9d8351a0-ca01-4556-9c37-85eb634869b9"
PHOTOPRISM_OIDC_PROVIDER: "Keycloak"
PHOTOPRISM_OIDC_REGISTER: "true"
PHOTOPRISM_OIDC_WEBDAV: "true"
PHOTOPRISM_DISABLE_OIDC: "false"
## LDAP Authentication (pre-configured for local tests):
PHOTOPRISM_LDAP_URI: "ldap://dummy-ldap:389"
PHOTOPRISM_LDAP_INSECURE: "true"
PHOTOPRISM_LDAP_SYNC: "true"
PHOTOPRISM_LDAP_BIND: "simple"
PHOTOPRISM_LDAP_BIND_DN: "cn"
PHOTOPRISM_LDAP_BASE_DN: "dc=localssl,dc=dev"
PHOTOPRISM_LDAP_ROLE: ""
PHOTOPRISM_LDAP_ROLE_DN: "ou=photoprism-*,ou=groups,dc=localssl,dc=dev"
PHOTOPRISM_LDAP_WEBDAV_DN: "ou=photoprism-webdav,ou=groups,dc=localssl,dc=dev"
## HTTPS/TLS Options:
## see https://docs.photoprism.app/getting-started/using-https/
PHOTOPRISM_DISABLE_TLS: "true"
PHOTOPRISM_DEFAULT_TLS: "true"
## Site Information:
PHOTOPRISM_SITE_URL: "https://app.localssl.dev/" # server URL in the format "http(s)://domain.name(:port)/(path)"
PHOTOPRISM_SITE_CAPTION: "AI-Powered Photos App"
PHOTOPRISM_SITE_DESCRIPTION: "Tags and finds pictures without getting in your way!"
PHOTOPRISM_SITE_AUTHOR: "@photoprism_app"
PHOTOPRISM_DEBUG: "true"
PHOTOPRISM_READONLY: "false"
PHOTOPRISM_EXPERIMENTAL: "true"
PHOTOPRISM_HTTP_MODE: "debug"
PHOTOPRISM_HTTP_HOST: "0.0.0.0"
PHOTOPRISM_HTTP_PORT: 2342
PHOTOPRISM_HTTP_COMPRESSION: "gzip" # improves transfer speed and bandwidth utilization (none or gzip)
PHOTOPRISM_DATABASE_DRIVER: "postgres"
PHOTOPRISM_DATABASE_SERVER: "postgres:5432"
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "photoprism"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DSN_NAME: "sqlite"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: "file:/go/src/github.com/photoprism/photoprism/storage/testdata/unit.test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"
PHOTOPRISM_ORIGINALS_LIMIT: 128000 # sets originals file size limit to 128 GB
PHOTOPRISM_IMPORT_PATH: "/go/src/github.com/photoprism/photoprism/storage/import"
PHOTOPRISM_DISABLE_CHOWN: "false" # disables updating storage permissions via chmod and chown on startup
PHOTOPRISM_DISABLE_BACKUPS: "false" # disables backing up albums and photo metadata to YAML files
PHOTOPRISM_DISABLE_WEBDAV: "false" # disables built-in WebDAV server
PHOTOPRISM_DISABLE_SETTINGS: "false" # disables settings UI and API
PHOTOPRISM_DISABLE_PLACES: "false" # disables reverse geocoding and maps
PHOTOPRISM_DISABLE_EXIFTOOL: "false" # disables creating JSON metadata sidecar files with ExifTool
PHOTOPRISM_DISABLE_TENSORFLOW: "false" # disables all features depending on TensorFlow
PHOTOPRISM_DISABLE_RAW: "false" # disables indexing and conversion of RAW images
PHOTOPRISM_RAW_PRESETS: "false" # enables applying user presets when converting RAW images (reduces performance)
PHOTOPRISM_DETECT_NSFW: "false" # automatically flags photos as private that MAY be offensive (requires TensorFlow)
PHOTOPRISM_UPLOAD_NSFW: "false" # allows uploads that MAY be offensive (no effect without TensorFlow)
PHOTOPRISM_UPLOAD_ALLOW: "" # restricts uploads to these file types (comma-separated list of EXTENSIONS; leave blank to allow all)
PHOTOPRISM_UPLOAD_ARCHIVES: "true" # allows upload of zip archives (will be extracted before import)
PHOTOPRISM_THUMB_LIBRARY: "auto" # image processing library to be used for generating thumbnails (auto, imaging, vips)
PHOTOPRISM_THUMB_FILTER: "auto" # downscaling filter (imaging best to worst: blackman, lanczos, cubic, linear, nearest)
PHOTOPRISM_THUMB_UNCACHED: "true" # enables on-demand thumbnail rendering (high memory and cpu usage)
## Video Transcoding (https://docs.photoprism.app/getting-started/advanced/transcoding/):
# PHOTOPRISM_FFMPEG_ENCODER: "software" # H.264/AVC encoder (software, intel, nvidia, apple, raspberry, or vaapi)
# LIBVA_DRIVER_NAME: "i965" # For Intel architectures Haswell and older which do not support QSV yet but use VAAPI instead
PHOTOPRISM_FFMPEG_SIZE: "1920" # video size limit in pixels (720-7680) (default: 3840)
# PHOTOPRISM_FFMPEG_BITRATE: "64" # video bitrate limit in Mbps (default: 60)
## Run/install on first startup (options: update tensorflow https intel gpu davfs yt-dlp):
PHOTOPRISM_INIT: "https postgresql"
## Computer Vision API (https://docs.photoprism.app/getting-started/config-options/#computer-vision):
PHOTOPRISM_VISION_API: "true" # server: enables service API endpoints under /api/v1/vision (requires access token)
PHOTOPRISM_VISION_URI: "" # client: service URI, e.g. http://hostname/api/v1/vision (leave blank to disable)
PHOTOPRISM_VISION_KEY: "" # client: service access token (for authentication)
## External dependencies and tools:
TF_CPP_MIN_LOG_LEVEL: 1
GOCACHE: "/go/src/github.com/photoprism/photoprism/.local/gocache"
CODEX_HOME: "/go/src/github.com/photoprism/photoprism/.local/codex"
## Shared devices for video hardware transcoding (optional):
# devices:
# - "/dev/dri:/dev/dri" # Required Intel QSV or VAAPI hardware transcoding
# - "/dev/video11:/dev/video11" # Video4Linux Video Encode Device (h264_v4l2m2m)
working_dir: "/go/src/github.com/photoprism/photoprism"
volumes:
- ".:/go/src/github.com/photoprism/photoprism"
- "./storage:/photoprism"
- "go-mod:/go/pkg/mod"
## PostgreSQL Database Server
## Docs: https://www.postgresql.org/docs/
postgres:
image: postgres:17-alpine
stop_grace_period: 15s
expose:
- "5432"
ports:
- "5432:5432" # database port (host:container)
volumes:
- "postgresql:/var/lib/postgresql"
- "./scripts/sql/postgresql-init.sql:/docker-entrypoint-initdb.d/init.sql"
environment:
POSTGRES_DB: photoprism
POSTGRES_USER: photoprism
POSTGRES_PASSWORD: photoprism
## MariaDB (Database Server)
## Docs: https://mariadb.com/docs/reference/
## Release Notes: https://mariadb.com/kb/en/changes-improvements-in-mariadb-1011/
mariadb:
image: mariadb:11
stop_grace_period: 15s
security_opt: # see https://github.com/MariaDB/mariadb-docker/issues/434#issuecomment-1136151239
- seccomp:unconfined
- apparmor:unconfined
command: --port=4001 --innodb-strict-mode=1 --innodb-buffer-pool-size=256M --transaction-isolation=READ-COMMITTED --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci --max-connections=512 --innodb-rollback-on-timeout=OFF --innodb-lock-wait-timeout=120
expose:
- "4001"
ports:
- "4001:4001" # database port (host:container)
volumes:
- "mariadb:/var/lib/mysql"
- "./scripts/sql/mariadb-init.sql:/docker-entrypoint-initdb.d/init.sql"
environment:
MARIADB_AUTO_UPGRADE: "1"
MARIADB_INITDB_SKIP_TZINFO: "1"
MARIADB_DATABASE: "photoprism"
MARIADB_USER: "photoprism"
MARIADB_PASSWORD: "photoprism"
MARIADB_ROOT_PASSWORD: "photoprism"
## Qdrant (Vector Database)
## Docs: https://qdrant.tech/documentation/guides/installation/#docker-compose
## Release Notes: https://github.com/qdrant/qdrant/releases
## Web UI: https://qdrant.localssl.dev/dashboard
qdrant:
image: qdrant/qdrant:latest
profiles: [ "all", "qdrant" ]
stop_grace_period: 15s
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
- "traefik:vision.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.qdrant.loadbalancer.server.port=6333"
- "traefik.http.services.qdrant.loadbalancer.server.scheme=http"
- "traefik.http.routers.qdrant.entrypoints=websecure"
- "traefik.http.routers.qdrant.rule=Host(`qdrant.localssl.dev`)"
- "traefik.http.routers.qdrant.priority=3"
- "traefik.http.routers.qdrant.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.qdrant.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.qdrant.tls=true"
expose:
- 6333
- 6334
- 6335
volumes:
- "./.qdrant.yaml:/qdrant/config/production.yaml"
- "./storage/services/qdrant:/qdrant/storage"
## Ollama Large-Language Model Runner
## run "ollama pull [name]:[version]" to download a vision model
## listed at <https://ollama.com/search?c=vision>, for example:
## docker compose exec ollama ollama pull gemma3:latest
ollama:
image: ollama/ollama:latest
stop_grace_period: 10s
## Only starts this service if the "all", "ollama", or "vision" profile is specified::
## docker compose --profile ollama up -d
profiles: [ "all", "ollama", "vision" ]
## Insecurely exposes the Ollama service on port 11434
## without authentication (for private networks only):
# ports:
# - "11434:11434"
labels:
- "traefik.enable=true"
- "traefik.docker.network=photoprism"
- "traefik.http.services.ollama.loadbalancer.server.port=11434"
- "traefik.http.routers.ollama.rule=Host(`ollama.localssl.dev`)"
- "traefik.http.routers.ollama.entrypoints=websecure"
- "traefik.http.routers.ollama.tls=true"
environment:
## Ollama Configuration Options:
OLLAMA_HOST: "0.0.0.0:11434"
OLLAMA_MODELS: "/root/.ollama" # model storage path (see volumes section below)
OLLAMA_MAX_QUEUE: "100" # maximum number of queued requests
OLLAMA_NUM_PARALLEL: "1" # maximum number of parallel requests
OLLAMA_MAX_LOADED_MODELS: "1" # maximum number of loaded models per GPU
OLLAMA_LOAD_TIMEOUT: "5m" # maximum time for loading models (default "5m")
OLLAMA_KEEP_ALIVE: "5m" # duration that models stay loaded in memory (default "5m")
OLLAMA_CONTEXT_LENGTH: "4096" # maximum input context length
OLLAMA_MULTIUSER_CACHE: "false" # optimize prompt caching for multi-user scenarios
OLLAMA_NOPRUNE: "false" # disables pruning of model blobs at startup
OLLAMA_NOHISTORY: "true" # disables readline history
OLLAMA_FLASH_ATTENTION: "false" # enables the experimental flash attention feature
OLLAMA_KV_CACHE_TYPE: "f16" # cache quantization (f16, q8_0, or q4_0)
OLLAMA_SCHED_SPREAD: "false" # allows scheduling models across all GPUs.
OLLAMA_NEW_ENGINE: "true" # enables the new Ollama engine
# OLLAMA_DEBUG: "true" # shows additional debug information
# OLLAMA_INTEL_GPU: "true" # enables experimental Intel GPU detection
## NVIDIA GPU Hardware Acceleration (optional):
# NVIDIA_VISIBLE_DEVICES: "all"
# NVIDIA_DRIVER_CAPABILITIES: "compute,utility"
volumes:
- "./storage/services/ollama:/root/.ollama"
## NVIDIA GPU Hardware Acceleration (optional):
# deploy:
# resources:
# reservations:
# devices:
# - driver: "nvidia"
# capabilities: [ gpu ]
# count: "all"
## Open WebUI, a Web Interface for Ollama
## see https://github.com/open-webui/open-webui
open-webui:
image: ghcr.io/open-webui/open-webui:main
stop_grace_period: 10s
## Only starts this service if the "all", "ollama", "open-webui", or "vision" profile is specified::
## docker compose --profile ollama up -d
profiles: [ "all", "ollama", "open-webui", "vision" ]
## Exposes Open WebUI at http://localhost:8080 (use https://chat.localssl.dev/ to access it through Traefik):
ports:
- "127.0.0.1:8080:8080"
labels:
- "traefik.enable=true"
- "traefik.docker.network=photoprism"
- "traefik.http.services.open-webui.loadbalancer.server.port=8080"
- "traefik.http.routers.open-webui.rule=Host(`chat.localssl.dev`) || Host(`open-webui.localssl.dev`) || Host(`ollama-ui.localssl.dev`)"
- "traefik.http.routers.open-webui.entrypoints=websecure"
- "traefik.http.routers.open-webui.tls=true"
environment:
WEBUI_URL: "https://chat.localssl.dev"
# WEBUI_SECRET_KEY: ""
OLLAMA_BASE_URL: "http://ollama:11434"
ANONYMIZED_TELEMETRY: "false" # disable Chroma telemetry
HF_HUB_DISABLE_TELEMETRY: "1" # disable Hugging Face telemetry
# HUGGING_FACE_HUB_TOKEN: "" # see https://huggingface.co/docs/hub/en/security-tokens
volumes:
- "./storage/services/open-webui:/app/backend/data"
## PhotoPrism® Computer Vision API
## see https://github.com/photoprism/photoprism-vision
photoprism-vision:
image: photoprism/vision:latest
stop_grace_period: 15s
## Only starts this service if the "all" or "vision" profile is specified::
## docker compose --profile vision up -d
profiles: [ "all", "vision" ]
working_dir: "/app"
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
- "traefik:qdrant.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.qdrant.loadbalancer.server.port=5000"
- "traefik.http.services.qdrant.loadbalancer.server.scheme=http"
- "traefik.http.routers.qdrant.entrypoints=websecure"
- "traefik.http.routers.qdrant.rule=Host(`vision.localssl.dev`)"
- "traefik.http.routers.qdrant.priority=3"
- "traefik.http.routers.qdrant.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.qdrant.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.qdrant.tls=true"
expose:
- 5000
environment:
TF_CPP_MIN_LOG_LEVEL: 2
## Ollama client configuration (for the service, see below):
OLLAMA_ENABLED: "true"
OLLAMA_HOST: "http://ollama:11434"
## Traefik v3 (Reverse Proxy)
## includes "*.localssl.dev" SSL certificate for test environments
## Docs: https://doc.traefik.io/traefik/
traefik:
image: photoprism/traefik:latest
stop_grace_period: 15s
security_opt:
- no-new-privileges:true
ports:
- "80:80" # HTTP (redirects to HTTPS)
- "443:443" # HTTPS (required)
labels:
- "traefik.enable=true"
volumes:
- "/var/run/docker.sock:/var/run/docker.sock" # enables Traefik to watch services
## Dummy WebDAV Server
dummy-webdav:
image: photoprism/dummy-webdav:240627
stop_grace_period: 30s
environment:
WEBDAV_USERNAME: admin
WEBDAV_PASSWORD: photoprism
labels:
- "traefik.enable=true"
- "traefik.http.services.dummy-webdav.loadbalancer.server.port=80"
- "traefik.http.routers.dummy-webdav.entrypoints=websecure"
- "traefik.http.routers.dummy-webdav.rule=Host(`dummy-webdav.localssl.dev`)"
- "traefik.http.routers.dummy-webdav.priority=3"
- "traefik.http.routers.dummy-webdav.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.dummy-webdav.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.dummy-webdav.tls=true"
## Dummy OIDC Identity Provider
dummy-oidc:
image: photoprism/dummy-oidc:240627
stop_grace_period: 30s
labels:
- "traefik.enable=true"
- "traefik.http.services.dummy-oidc.loadbalancer.server.port=9998"
- "traefik.http.routers.dummy-oidc.entrypoints=websecure"
- "traefik.http.routers.dummy-oidc.rule=Host(`dummy-oidc.localssl.dev`)"
- "traefik.http.routers.dummy-oidc.priority=3"
- "traefik.http.routers.dummy-oidc.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.dummy-oidc.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.dummy-oidc.tls=true"
## Dummy LDAP Directory Server
## Docs: https://glauth.github.io/docs/
dummy-ldap:
image: glauth/glauth-plugins:latest
stop_grace_period: 15s
ports:
- "127.0.0.1:389:389"
labels:
- "traefik.enable=true"
- "traefik.http.services.ldap.loadbalancer.server.port=5555"
- "traefik.http.routers.dummy-ldap.entrypoints=websecure"
- "traefik.http.routers.dummy-ldap.rule=Host(`dummy-ldap.localssl.dev`)"
- "traefik.http.routers.dummy-ldap.priority=3"
- "traefik.http.routers.dummy-ldap.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.dummy-ldap.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.dummy-ldap.tls=true"
volumes:
- "./.ldap.cfg:/app/config/config.cfg"
## Keycloak (OIDC Identity Provider)
## Docs: https://www.keycloak.org/docs/latest/server_admin/
## Login with "user / photoprism" and "admin / photoprism".
keycloak:
image: quay.io/keycloak/keycloak:25.0
stop_grace_period: 30s
command: "start-dev" # development mode, do not use this in production!
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
labels:
- "traefik.enable=true"
- "traefik.http.services.keycloak.loadbalancer.server.port=8080"
- "traefik.http.routers.keycloak.entrypoints=websecure"
- "traefik.http.routers.keycloak.rule=Host(`keycloak.localssl.dev`)"
- "traefik.http.routers.keycloak.priority=3"
- "traefik.http.routers.keycloak.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.keycloak.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.keycloak.tls=true"
environment: # see https://www.keycloak.org/server/all-config
KEYCLOAK_ADMIN: "admin"
KEYCLOAK_ADMIN_PASSWORD: "photoprism"
KC_METRICS_ENABLED: "false"
KC_HOSTNAME: "keycloak.localssl.dev"
KC_HOSTNAME_STRICT: "false"
KC_PROXY: "edge"
KC_DB: "postgres"
KC_DB_URL: "jdbc:postgresql://postgres:5432/keycloak"
KC_DB_USERNAME: "keycloak"
KC_DB_PASSWORD: "keycloak"
## Run "docker compose --profile prometheus up" to start your development environment with Prometheus.
## Docs: https://prometheus.io/docs/prometheus/latest/configuration/configuration/#oauth2
## The following grants API access to Prometheus with the preconfigured client credentials (adjust flags as needed):
## ./photoprism client add --id=cs5cpu17n6gj2qo5 --secret=xcCbOrw6I0vcoXzhnOmXhjpVSyFq0l0e -s metrics -n Prometheus -e 60 -t 1
prometheus:
image: prom/prometheus:latest
stop_grace_period: 15s
profiles: [ "all", "auth", "prometheus" ]
labels:
- "traefik.enable=true"
- "traefik.http.services.prometheus.loadbalancer.server.port=9090"
- "traefik.http.routers.prometheus.entrypoints=websecure"
- "traefik.http.routers.prometheus.rule=Host(`prometheus.localssl.dev`)"
- "traefik.http.routers.prometheus.priority=3"
- "traefik.http.routers.prometheus.tls.domains[0].main=localssl.dev"
- "traefik.http.routers.prometheus.tls.domains[0].sans=*.localssl.dev"
- "traefik.http.routers.prometheus.tls=true"
volumes:
- "./prometheus.yml:/etc/prometheus/prometheus.yml"
## Create named volume for Go module cache
volumes:
go-mod:
driver: local
postgresql:
driver: local
mariadb:
driver: local
## Create shared "photoprism" network for connecting with services in other compose.yaml files
networks:
default:
name: photoprism
driver: bridge

View file

@ -39,9 +39,12 @@ services:
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "root"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_TEST_DSN: ".test.db"
PHOTOPRISM_TEST_DSN_NAME: "sqlitefile"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: ".test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"

View file

@ -93,8 +93,12 @@ services:
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "root"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_TEST_DSN_NAME: "sqlite"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: "file:/go/src/github.com/photoprism/photoprism/storage/testdata/unit.test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"

View file

@ -96,8 +96,12 @@ services:
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "root"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_TEST_DSN_NAME: "sqlite"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: "file:/go/src/github.com/photoprism/photoprism/storage/testdata/unit.test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"

View file

@ -2,100 +2,43 @@
## Setup: https://docs.photoprism.app/developer-guide/setup/ ##
services:
## PhotoPrism Development Environment (PostgreSQL)
# ATTENTION: PostgreSQL is NOT supported yet as Gorm (our ORM library) needs to be upgraded first.
# The current Gorm version does NOT support compatible general data types:
# https://github.com/photoprism/photoprism/issues/47
photoprism:
build: .
image: photoprism/photoprism:develop
depends_on:
- postgres
- dummy-webdav
security_opt:
- seccomp:unconfined
- apparmor:unconfined
ports:
- "2342:2342" # default HTTP port (host:container)
- "2343:2343" # acceptance Test HTTP port (host:container)
working_dir: "/go/src/github.com/photoprism/photoprism"
volumes:
- ".:/go/src/github.com/photoprism/photoprism"
- "go-mod:/go/pkg/mod"
shm_size: "2gb"
environment:
PHOTOPRISM_ADMIN_USER: "admin" # admin login username
PHOTOPRISM_ADMIN_PASSWORD: "photoprism" # initial admin password (8-72 characters)
PHOTOPRISM_AUTH_MODE: "password" # authentication mode (public, password)
PHOTOPRISM_SITE_URL: "http://localhost:2342/"
PHOTOPRISM_SITE_CAPTION: "AI-Powered Photos App"
PHOTOPRISM_SITE_DESCRIPTION: "Open-Source Photo Management"
PHOTOPRISM_SITE_AUTHOR: "@photoprism_app"
PHOTOPRISM_DEBUG: "true"
PHOTOPRISM_READONLY: "false"
PHOTOPRISM_EXPERIMENTAL: "true"
PHOTOPRISM_HTTP_MODE: "debug"
PHOTOPRISM_HTTP_HOST: "0.0.0.0"
PHOTOPRISM_HTTP_PORT: 2342
PHOTOPRISM_HTTP_COMPRESSION: "gzip" # improves transfer speed and bandwidth utilization (none or gzip)
PHOTOPRISM_DATABASE_DRIVER: "postgres"
PHOTOPRISM_DATABASE_SERVER: "postgres:5432"
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "photoprism"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"
PHOTOPRISM_IMPORT_PATH: "/go/src/github.com/photoprism/photoprism/storage/import"
PHOTOPRISM_DISABLE_CHOWN: "false" # disables updating storage permissions via chmod and chown on startup
PHOTOPRISM_DISABLE_BACKUPS: "false" # disables backing up albums and photo metadata to YAML files
PHOTOPRISM_DISABLE_WEBDAV: "false" # disables built-in WebDAV server
PHOTOPRISM_DISABLE_SETTINGS: "false" # disables settings UI and API
PHOTOPRISM_DISABLE_PLACES: "false" # disables reverse geocoding and maps
PHOTOPRISM_DISABLE_EXIFTOOL: "false" # disables creating JSON metadata sidecar files with ExifTool
PHOTOPRISM_DISABLE_TENSORFLOW: "false" # disables all features depending on TensorFlow
PHOTOPRISM_DETECT_NSFW: "false" # automatically flags photos as private that MAY be offensive (requires TensorFlow)
PHOTOPRISM_UPLOAD_NSFW: "false" # allows uploads that MAY be offensive (no effect without TensorFlow)
PHOTOPRISM_UPLOAD_ALLOW: "" # restricts uploads to these file types (comma-separated list of EXTENSIONS; leave blank to allow all)
PHOTOPRISM_UPLOAD_ARCHIVES: "true" # allows upload of zip archives (will be extracted before import)
PHOTOPRISM_RAW_PRESETS: "false" # enables applying user presets when converting RAW images (reduces performance)
PHOTOPRISM_THUMB_FILTER: "lanczos" # resample filter, best to worst: blackman, lanczos, cubic, linear
PHOTOPRISM_THUMB_UNCACHED: "true" # enables on-demand thumbnail rendering (high memory and cpu usage)
PHOTOPRISM_THUMB_SIZE: 1920 # pre-rendered thumbnail size limit (default 1920, min 720, max 7680)
# PHOTOPRISM_THUMB_SIZE: 4096 # Retina 4K, DCI 4K (requires more storage); 7680 for 8K Ultra HD
PHOTOPRISM_THUMB_SIZE_UNCACHED: 7680 # on-demand rendering size limit (default 7680, min 720, max 7680)
PHOTOPRISM_JPEG_SIZE: 7680 # size limit for converted image files in pixels (720-30000)
## Run/install on first startup (options: update tensorflow https intel gpu davfs yt-dlp):
PHOTOPRISM_INIT: "https"
## Computer Vision (https://docs.photoprism.app/getting-started/config-options/#computer-vision):
PHOTOPRISM_VISION_API: "true" # server: enables service API endpoints under /api/v1/vision (requires access token)
PHOTOPRISM_VISION_URI: "" # client: service URI, e.g. http://hostname/api/v1/vision (leave blank to disable)
PHOTOPRISM_VISION_KEY: "" # client: service access token (for authentication)
OLLAMA_BASE_URL: "http://ollama:11434" # use "https://ollama.com" for Ollama Cloud
OLLAMA_API_KEY: "" # API key required to access Ollama (optional)
## External dependencies and tools:
TF_CPP_MIN_LOG_LEVEL: 1
GOCACHE: "/go/src/github.com/photoprism/photoprism/.local/gocache"
CODEX_HOME: "/go/src/github.com/photoprism/photoprism/.local/codex"
## PostgreSQL Database Server
## Docs: https://www.postgresql.org/docs/
postgres:
image: postgres:12-alpine
postgres-18:
image: postgres:18-alpine
stop_grace_period: 15s
expose:
- "5432"
ports:
- "5432:5432" # database port (host:container)
volumes:
- "postgresql:/var/lib/postgresql"
- "./scripts/sql/postgresql-init.sql:/docker-entrypoint-initdb.d/init.sql"
environment:
POSTGRES_DB: photoprism
POSTGRES_USER: photoprism
POSTGRES_PASSWORD: photoprism
## Dummy WebDAV Server
dummy-webdav:
image: photoprism/dummy-webdav:251210
## PostgreSQL Database Server
## Docs: https://www.postgresql.org/docs/
postgres-17:
image: postgres:17-alpine
stop_grace_period: 15s
expose:
- "5432"
ports:
- "5432:5432" # database port (host:container)
volumes:
- "postgresql:/var/lib/postgresql"
- "./scripts/sql/postgresql-init.sql:/docker-entrypoint-initdb.d/init.sql"
environment:
WEBDAV_USERNAME: admin
WEBDAV_PASSWORD: photoprism
POSTGRES_DB: photoprism
POSTGRES_USER: photoprism
POSTGRES_PASSWORD: photoprism
volumes:
go-mod:
driver: local
## Join shared "photoprism" network
networks:
default:
name: photoprism
external: true

View file

@ -7,6 +7,7 @@ services:
build: .
image: photoprism/photoprism:develop
depends_on:
- postgres
- mariadb
- dummy-webdav
- dummy-oidc
@ -101,8 +102,12 @@ services:
PHOTOPRISM_DATABASE_NAME: "photoprism"
PHOTOPRISM_DATABASE_USER: "root"
PHOTOPRISM_DATABASE_PASSWORD: "photoprism"
PHOTOPRISM_TEST_DRIVER: "sqlite"
PHOTOPRISM_TEST_DSN_NAME: "sqlite"
# PHOTOPRISM_TEST_DSN_MYSQL8: "root:photoprism@tcp(mysql:4001)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s"
PHOTOPRISM_TEST_DSN_MARIADB: "root:photoprism@tcp(mariadb:4001)/testdb?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true"
PHOTOPRISM_TEST_DSN_SQLITE: ""
PHOTOPRISM_TEST_DSN_SQLITEFILE: "file:/go/src/github.com/photoprism/photoprism/storage/testdata/unit.test.db?_foreign_keys=on&_busy_timeout=5000"
PHOTOPRISM_TEST_DSN_POSTGRES: "postgresql://testdb:testdb@postgres:5432/testdb?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
PHOTOPRISM_ASSETS_PATH: "/go/src/github.com/photoprism/photoprism/assets"
PHOTOPRISM_STORAGE_PATH: "/go/src/github.com/photoprism/photoprism/storage"
PHOTOPRISM_ORIGINALS_PATH: "/go/src/github.com/photoprism/photoprism/storage/originals"
@ -130,7 +135,7 @@ services:
PHOTOPRISM_FFMPEG_SIZE: "1920" # video size limit in pixels (720-7680) (default: 3840)
# PHOTOPRISM_FFMPEG_BITRATE: "64" # video bitrate limit in Mbps (default: 60)
## Run/install on first startup (options: update tensorflow https intel gpu davfs yt-dlp):
PHOTOPRISM_INIT: "https"
PHOTOPRISM_INIT: "https postgresql"
## Computer Vision (https://docs.photoprism.app/getting-started/config-options/#computer-vision):
PHOTOPRISM_VISION_API: "true" # server: enables service API endpoints under /api/v1/vision (requires access token)
PHOTOPRISM_VISION_URI: "" # client: service URI, e.g. http://hostname/api/v1/vision (leave blank to disable)
@ -151,6 +156,23 @@ services:
- "./storage:/photoprism"
- "go-mod:/go/pkg/mod"
## PostgreSQL Database Server
## Docs: https://www.postgresql.org/docs/
postgres:
image: postgres:17-alpine
stop_grace_period: 15s
expose:
- "5432"
ports:
- "5432:5432" # database port (host:container)
volumes:
- "postgresql:/var/lib/postgresql"
- "./scripts/sql/postgresql-init.sql:/docker-entrypoint-initdb.d/init.sql"
environment:
POSTGRES_DB: photoprism
POSTGRES_USER: photoprism
POSTGRES_PASSWORD: photoprism
## MariaDB (Database Server)
## Docs: https://mariadb.com/docs/reference/
## Release Notes: https://mariadb.com/kb/en/changes-improvements-in-mariadb-1011/
@ -441,6 +463,8 @@ services:
volumes:
go-mod:
driver: local
postgresql:
driver: local
mariadb:
driver: local

View file

@ -79,6 +79,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
chromium-driver \
chromium-sandbox \
mariadb-client \
postgresql-client \
sqlite3 \
libc6-dev \
libssl-dev \

View file

@ -76,6 +76,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
gettext \
firefox \
mariadb-client \
postgresql-client \
davfs2 \
chrpath \
libc6-dev \

View file

@ -56,6 +56,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
va-driver-all libva2 iputils-ping dnsutils libmagic-mgc binutils binutils-gold \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-yt-dlp.sh && \
/scripts/install-libheif.sh && \

View file

@ -75,6 +75,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
ln -sf /usr/bin/batcat /usr/local/bin/bat && \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-onnx.sh && \
/scripts/install-darktable.sh && \

View file

@ -52,6 +52,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
x264 x265 libde265-dev libaom-dev libvpx-dev libwebm-dev libjpeg-dev libmatroska-dev libdvdread-dev \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \
echo 'alias ll="ls -alh"' >> /etc/skel/.bashrc && \

View file

@ -70,6 +70,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
ln -sf /usr/bin/batcat /usr/local/bin/bat && \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-onnx.sh && \
/scripts/install-darktable.sh && \

View file

@ -52,6 +52,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
x264 x265 libde265-dev libaom-dev libvpx-dev libwebm-dev libjpeg-dev libmatroska-dev libdvdread-dev \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \
echo 'alias ll="ls -alh"' >> /etc/skel/.bashrc && \

View file

@ -70,6 +70,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
ln -sf /usr/bin/batcat /usr/local/bin/bat && \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-onnx.sh && \
/scripts/install-darktable.sh && \

View file

@ -54,6 +54,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
iputils-ping dnsutils binutils binutils-gold \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \
echo 'alias ll="ls -alh"' >> /etc/skel/.bashrc && \

View file

@ -72,6 +72,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
ln -sf /usr/bin/batcat /usr/local/bin/bat && \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-onnx.sh && \
/scripts/install-darktable.sh && \

View file

@ -57,6 +57,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
iputils-ping dnsutils \
&& \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-darktable.sh && \
/scripts/install-libheif.sh && \
echo 'alias ll="ls -alh"' >> /etc/skel/.bashrc && \

View file

@ -75,6 +75,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
ln -sf /usr/bin/batcat /usr/local/bin/bat && \
/scripts/install-nodejs.sh && \
/scripts/install-mariadb.sh mariadb-client && \
/scripts/install-postgresql.sh postgresql-client && \
/scripts/install-tensorflow.sh && \
/scripts/install-onnx.sh && \
/scripts/install-darktable.sh && \

View file

@ -106,6 +106,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
sudo \
bash \
mariadb-client \
postgresql-client \
sqlite3 \
tzdata \
libc6 \

View file

@ -105,6 +105,7 @@ RUN echo 'APT::Acquire::Retries "3";' > /etc/apt/apt.conf.d/80retries && \
sudo \
bash \
mariadb-client \
postgresql-client \
sqlite3 \
tzdata \
libc6 \

View file

@ -44,7 +44,7 @@ export default class Page {
} catch {
// ignore the error as the item may not show up
showLogs && this.logMessage("After Click In Catch in waitForSpecficEvent");
console.trace("notify close missed in waitForSpecficEvent " + event)
showLogs && console.trace("notify close missed in waitForSpecficEvent " + event)
} finally {
return
}

16
go.mod
View file

@ -18,14 +18,12 @@ require (
github.com/google/open-location-code/go v0.0.0-20250620134813-83986da0156b
github.com/gorilla/websocket v1.5.3
github.com/gosimple/slug v1.15.0
github.com/jinzhu/gorm v1.9.16
github.com/jinzhu/inflection v1.0.0
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 // indirect
github.com/karrick/godirwalk v1.17.0
github.com/klauspost/cpuid/v2 v2.3.0
github.com/leandro-lugaresi/hub v1.1.1
github.com/leonelquinteros/gotext v1.7.2
github.com/lib/pq v1.10.9 // indirect
github.com/lucasb-eyer/go-colorful v1.3.0
github.com/mandykoh/prism v0.35.3
github.com/manifoldco/promptui v0.9.0
@ -94,11 +92,14 @@ require (
golang.org/x/mod v0.32.0
golang.org/x/sys v0.40.0
google.golang.org/protobuf v1.36.11
gorm.io/driver/mysql v1.5.7
gorm.io/driver/postgres v1.5.9
gorm.io/driver/sqlite v1.5.6
gorm.io/gorm v1.25.12
)
require (
filippo.io/edwards25519 v1.1.0 // indirect
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/boombuler/barcode v1.1.0 // indirect
@ -141,6 +142,10 @@ require (
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
github.com/gorilla/securecookie v1.1.2 // indirect
github.com/gosimple/unidecode v1.0.1 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/pgx/v5 v5.7.2
github.com/jackc/puddle/v2 v2.2.2 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/jonboulle/clockwork v0.5.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
@ -168,6 +173,11 @@ require (
github.com/ugorji/go/codec v1.3.0 // indirect
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 // indirect
github.com/zitadel/logging v0.6.2 // indirect
)
require (
github.com/mattn/go-runewidth v0.0.19 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/zitadel/schema v1.3.2 // indirect
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
go.opentelemetry.io/otel v1.39.0 // indirect

61
go.sum
View file

@ -17,20 +17,14 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/IGLOU-EU/go-wildcard v1.0.3 h1:r8T46+8/9V1STciXJomTWRpPEv4nGJATDbJkdU0Nou0=
github.com/IGLOU-EU/go-wildcard v1.0.3/go.mod h1:/qeV4QLmydCbwH0UMQJmXDryrFKJknWi/jjO8IiuQfY=
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc=
github.com/abema/go-mp4 v1.4.1 h1:YoS4VRqd+pAmddRPLFf8vMk74kuGl6ULSjzhsIqwr6M=
github.com/abema/go-mp4 v1.4.1/go.mod h1:vPl9t5ZK7K0x68jh12/+ECWBCXoWuIDtNgPtU2f04ws=
github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e h1:4dAU9FXIyQktpoUAgOJK3OTFc/xug0PCXYCqU0FgDKI=
github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bmatcuk/doublestar/v4 v4.9.2 h1:b0mc6WyRSYLjzofB2v/0cuDUZ+MqoGyH3r0dVij35GI=
@ -73,8 +67,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davidbyttow/govips/v2 v2.16.0 h1:1nH/Rbx8qZP1hd+oYL9fYQjAnm1+KorX9s07ZGseQmo=
github.com/davidbyttow/govips/v2 v2.16.0/go.mod h1:clH5/IDVmG5eVyc23qYpyi7kmOT0B/1QNTKtci4RkyM=
github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd h1:83Wprp6ROGeiHFAP8WJdI2RoxALQYgdllERc3N5N2DM=
github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c=
@ -122,8 +114,6 @@ github.com/emersion/go-webdav v0.7.0 h1:cp6aBWXBf8Sjzguka9VJarr4XTkGc2IHxXI1Gq3T
github.com/emersion/go-webdav v0.7.0/go.mod h1:mI8iBx3RAODwX7PJJ7qzsKAKs/vY429YfS2/9wKnDbQ=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0=
github.com/esimov/pigo v1.4.6 h1:wpB9FstbqeGP/CZP+nTR52tUJe7XErq8buG+k4xCXlw=
github.com/esimov/pigo v1.4.6/go.mod h1:uqj9Y3+3IRYhFK071rxz1QYq0ePhA6+R9jrUZavi46M=
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
@ -137,8 +127,6 @@ github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk=
github.com/gin-gonic/gin v1.11.0/go.mod h1:+iq/FyxlGzII0KHiBGjuNn4UNENUlKbGlNmc+W50Dls=
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ4S3TGls2FvczZtj5Re/2ZzkV9VwqPHH/3Bo=
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
github.com/go-chi/chi/v5 v5.2.3 h1:WQIt9uxdsAbgIYgid+BpYc+liqQZGMHRaUwp0JUcvdE=
github.com/go-chi/chi/v5 v5.2.3/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
github.com/go-co-op/gocron/v2 v2.19.0 h1:OKf2y6LXPs/BgBI2fl8PxUpNAI1DA9Mg+hSeGOS38OU=
@ -153,8 +141,6 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs=
github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08=
github.com/go-ldap/ldap/v3 v3.4.12 h1:1b81mv7MagXZ7+1r7cLTWmyuTqVqdwbtJSjC0DAp9s4=
github.com/go-ldap/ldap/v3 v3.4.12/go.mod h1:+SPAGcTtOfmGsCb3h1RFiq4xpp4N636G75OEace8lNo=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
@ -198,7 +184,7 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688=
github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
@ -210,9 +196,6 @@ github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
@ -263,33 +246,24 @@ github.com/gosimple/slug v1.15.0 h1:wRZHsRrRcs6b0XnxMUBM6WK1U1Vg5B0R7VkIf1Xzobo=
github.com/gosimple/slug v1.15.0/go.mod h1:UiRaFH+GEilHstLUmcBgWcI42viBN7mAb818JrYOeFQ=
github.com/gosimple/unidecode v1.0.1 h1:hZzFTMMqSswvf0LBJZCZgThIZrpDHFXux9KeGmn6T/o=
github.com/gosimple/unidecode v1.0.1/go.mod h1:CP0Cr1Y1kogOtx0bJblKzsVWrqYaqfNOnHzpgWw4Awc=
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=
github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=
github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg=
github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo=
github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o=
github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=
github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8=
github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.7.2 h1:mLoDLV6sonKlvjIEsV56SkWNCnuNv531l94GaIzO+XI=
github.com/jackc/pgx/v5 v5.7.2/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jdeng/goheif v0.0.0-20200323230657-a0d6a8b3e68f/go.mod h1:G7IyA3/eR9IFmUIPdyP3c0l4ZaqEvXAk876WfaQ8plc=
github.com/jeremija/gosubmit v0.2.8 h1:mmSITBz9JxVtu8eqbN+zmmwX7Ij2RidQxhcwRVI4wqA=
github.com/jeremija/gosubmit v0.2.8/go.mod h1:Ui+HS073lCFREXBbdfrJzMB57OI/bdxTiLtrDHHhFPI=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
github.com/jinzhu/gorm v1.9.16 h1:+IyIjPEABKRpsu/F8OvDPy9fyQlgsg2luMV2ZIH5i5o=
github.com/jinzhu/gorm v1.9.16/go.mod h1:G3LB3wezTOWM2ITLzPxEXgSkOXAntiLHS7UdBefADcs=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.0.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jonboulle/clockwork v0.5.0 h1:Hyh9A8u51kptdkR+cqRpT1EebBwTn1oK9YfGYbdFz6I=
@ -321,9 +295,6 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/leonelquinteros/gotext v1.7.2 h1:bDPndU8nt+/kRo1m4l/1OXiiy2v7Z7dfPQ9+YP7G1Mc=
github.com/leonelquinteros/gotext v1.7.2/go.mod h1:9/haCkm5P7Jay1sxKDGJ5WIg4zkz8oZKw4ekNpALob8=
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mailru/easyjson v0.9.1 h1:LbtsOm5WAswyWbvTEOqhypdPeZzHavpZx96/n553mR8=
@ -340,7 +311,6 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
github.com/mattn/go-sqlite3 v1.14.0/go.mod h1:JIl7NbARA7phWnGvh0LKTyg7S9BA+6gx71ShQilpsus=
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@ -483,11 +453,9 @@ go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEb
golang.org/x/arch v0.22.0 h1:c/Zle32i5ttqRXjdLyyHZESLD/bB90DCU1g9l/0YBDI=
golang.org/x/arch v0.22.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191205180655-e7c4368fe9dd/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
@ -533,7 +501,6 @@ golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -549,7 +516,6 @@ golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200320220750-118fecf932d8/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
@ -733,6 +699,15 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/mysql v1.5.7 h1:MndhOPYOfEp2rHKgkZIhJ16eVUIRf2HmzgoPmh7FCWo=
gorm.io/driver/mysql v1.5.7/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM=
gorm.io/driver/postgres v1.5.9 h1:DkegyItji119OlcaLjqN11kHoUgZ/j13E0jkJZgD6A8=
gorm.io/driver/postgres v1.5.9/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
gorm.io/driver/sqlite v1.5.6 h1:fO/X46qn5NUEEOZtnjJRWRzZMe8nqJiQ9E+0hi+hKQE=
gorm.io/driver/sqlite v1.5.6/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDah4=
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8=
gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

218
gorm2upgrade.md Normal file
View file

@ -0,0 +1,218 @@
# Gorm V2 Upgrade Documentation
The following documentation covers what needs to be done to use Gorm V2, which is different to Gorm V1.
And what has changed to enable the upgrade from Gorm V1 to Gorm V2.
# Ongoing Development
As new development is done, changes are made to existing columns, or new columns are added to the structs that support PhotoPrism.
These structs are turned into tables in DBMS' that Gorm supports. At the time of writing PhotoPrism supports SQLite and MariaDB, with requests to support PostgreSQL.
Given the requests to support PostgreSQL the way that the Gorm annotations for the structs are used needed to change.
## type/size annotation
For all future development the type/size Gorm annotation needs to only use the default types that Gorm supports.
Do not use a database specific datatype like VARBINARY, VARCHAR, MEDIUMBLOB.
The following tables give an overview of the database type, to Go type, and the required Gorm annotation. Not all types are listed.
If you want the complete set, check the [go-gorm source](https://github.com/go-gorm/) for DataTypeOf for each DBMS.
### MariaDB translation
| DBMS Type | Go Type | Gorm annotation |
|----------------------|---------|-----------------------|
| SMALLINT | int | type:int;size:16; |
| MEDIUMINT | int | type:int;size:24; |
| INT | int | type:int;size:32; |
| BIGINT | int | |
| SMALLINT UNSIGNED | uint | type:uint;size:16; |
| MEDIUMINT UNSIGNED | uint | type:uint;size:24; |
| INT UNSIGNED | uint | type:uint;size:32; |
| BIGINT UNSIGNED | uint | |
| FLOAT | float32 | |
| DOUBLE | float64 | |
| VARBINARY(125) | string | type:byte;size:125; |
| VARCHAR(60) | string | size:60; |
| BLOB | as required | type:byte;size:65535; |
| MEDIUMBLOB | as required | type:byte;size:66666; |
| LONGBLOB | as required | type:byte;size:16777216; |
| DATETIME | time.Time | |
| DECIMAL(16,2) | float64 | precision:16;scale:2; |
### SQLite translation
| DBMS Type | Go Type | Gorm annotation |
|----------------------|---------|-----------------------|
| INTEGER (1) | int | |
| TEXT (2) | string | |
| BLOB (3) | as required | type:byte; |
| REAL (4) | float64 | |
| NUMERIC (5) | time.Time | |
|----------------------|---------|-----------------------|
| SMALLINT (1) | int | type:int;size:16; |
| MEDIUMINT (1) | int | type:int;size:24; |
| INT (1) | int | type:int;size:32; |
| BIGINT (1) | int | |
| SMALLINT UNSIGNED (1) | uint | type:uint;size:16; |
| MEDIUMINT UNSIGNED (1) | uint | type:uint;size:24; |
| INT UNSIGNED (1) | uint | type:uint;size:32; |
| BIGINT UNSIGNED (1) | uint | |
| FLOAT (4) | float32 | |
| DOUBLE (4) | float64 | |
| VARBINARY(125) (2) | string | type:byte;size:125; |
| VARCHAR(60) (2) | string | size:60; |
| BLOB (3) | as required | type:byte;size:65535; |
| MEDIUMBLOB (3) | as required | type:byte;size:66666; |
| LONGBLOB (3) | as required | type:byte;size:16777216; |
| DATETIME (5) | time.Time | |
| DECIMAL(16,2) (5) | float64 | precision:16;scale:2; |
The number in the brackets is "Affinity" which SQLite uses to translate a foreign DBMS type into it's base set of 5 types, at top of table above.
### PostgreSQL translation
| DBMS Type | Go Type | Gorm annotation |
|----------------------|---------|-----------------------|
| SMALLSERIAL | int | size:16;autoIncrement; |
| SERIAL | int | size:32;autoIncrement; |
| BIGSERIAL | int | autoIncrement; |
| SMALLINT | int | size:16; |
| INTEGER | int | size:32; |
| BIGINT | int | |
| SMALLSERIAL | uint | size:15;autoIncrement; |
| SERIAL | uint | size:31;autoIncrement; |
| BIGSERIAL | uint | autoIncrement; |
| SMALLINT | uint | size:15; |
| INTEGER | uint | size:31; |
| BIGINT | uint | |
| NUMERIC(16,2) (5) | float64 | precision:16;scale:2; |
| DECIMAL | float64 | |
| VARCHAR(60) | string | size:60; |
| TEXT | string | |
| TIMESTAMPTZ(4) | time.Time | precision:4; |
| TIMESTAMPTZ | time.Time | |
| BYTEA | Bytes | |
| BYTEA | String | type:byte;size:125; |
| BYTEA | as required | type:byte;size:66666; |
## Foreign Keys
Gorm V2's implementation has introduced foreign keys at the database level. This will ensure that the data relationship between parent and child records is maintained. But, it also means that you can't create a child record if the parent is not already committed to the database (or added earlier in the same transaction).
An example of this is that you can't call the Create function on a Details struct, until the Create function on the Photo struct has already been done. This is NOT a change to the way that PhotoPrism is already developed.
It is possible to create an instance of a struct that has child structs (eg. Photo and Detail) by including the content of the child struct in the parent struct. Gorm will then take care of the creation of both records when photo.Create() is called.
eg.
```
photo := Photo{
TakenAt: time.Date(2020, 11, 11, 9, 7, 18, 0, time.UTC),
TakenSrc: SrcMeta,
Details: &Details {
Keywords: "nature, frog",
Notes: "notes",
}
}
```
## Queries
The use of 0 to represent FALSE and 1 to represent TRUE in queries shall no longer be done. Use TRUE/FALSE as appropriate in queries.
## Managing tables
Gorm V2 uses the Migrator to provide any changes to table structure. This replaces DropTableIfExists and CreateTable with Migrator().DropTable and Migrator().CreateTable. See internal/commands/auth_reset.go for an example.
## Soft Delete
Gorm V2 has changed the struct to support soft deletion. It now uses a type gorm.DeletedAt which has a Time time.Time and a Valid Boolean to indicate if a record is deleted. The structure in the database has not changed.
Valid = true when a record is soft deleted. The Time will also be populated.
# Changes made to support Gorm V2
The follow provides an overview on what changes have been made to PhotoPrism to enable Gorm V2.
## Breaking Changes
There is only 1 known visible change as a result of the implementation of Gorm V2.
That is in the PhotoPrism cli, where the output previously returned a DeletedDate the following difference will be visible.
1. Any command that returns a DeletedDate will not return a column for DeletedAt if the record is not deleted.
2. Any command that returns a DeletedDate will return a gorm.DeletedAt structure if the record is deleted.
## Connection Strings
The connection string for SQLite has been changed, with &_foreign_keys=on being added to ensure that foreign keys are enabled within SQLite like they are on MariaDB.
## Migrator Changes
The migration has moved from a map to an ordered list to ensure that the migration is done in an order that supports foreign keys, instead of randomly.
In addition to that, the Truncate function has been updated to execute in foreign key order when removing all records from all tables. This process also resets the intital auto increment value to one.
__Newly added tables need to be added to these lists.__
## Structs
The following changes have been made to all Gorm related PhotoPrism structs.
The definition of a Primary Key has changed from primary_key to primaryKey.
The definition of auto increment has changed from auto_increment to autoIncrement.
The definition of a foreign key's source has changed from foreignkey to foreignKey.
The definition of a foreign key's target field has changed from association_foreignkey to references.
The definition of a many 2 many relationship has changed from association_jointable_foreignkey to a combination of foreignKey, joinForeignKey, References and joinReferences.
The definition of associations has been removed.
The definition of a unique index has changed from unique_index to uniqueIndex.
The definition of the type SMALLINT has changed from type:SMALLINT to type:int;size:16;
The definition of the type VARBINARY has changed from type:VARBINARY(nn) to type:bytes;size:nn.
The definition of the type VARCHAR has changed from type:VARCHAR(nn) to size:nn.
The definition of the field DeletedAt has changed from *time.Time to gorm.DeletedAt.
The definition of PRELOAD has been removed.
The use of the gorm definition type:DATETIME has been removed (not required).
### Album
The column Photos type has changed from PhotoAlbums to []PhotoAlbum.
### User
The column UserShares type has changed from UserShares to []UserShare.
The columns UserDetails and UserSettings are no longer automatically preloaded.
### Cell
The column Place is no longer automatically preloaded.
### Country
The column CountryPhotoID is no longer a required field. A migration script has been created to change the number 0 to a NULL in the database.
### Face
The column EmbeddingJSON has had it's gorm specific type changed from type:MEDIUMBLOB to type:bytes;size:66666. This is to support PostgreSQL and SQLite which use unsized blob types, whilst the number ensures that MariaDB uses a medium_blob type.
### Marker
The columns EmbeddingsJSON and LandmarksJSON have had their gorm specific types changed from type:MEDIUMBLOB to type:bytes;size:66666. This is to support PostgreSQL and SQLite which use unsized blob types, whilst the number ensures that MariaDB uses a medium_blob type.
### Photo
The columns PhotoLat, PhotoLng and PhotoFNumber have had their gorm specific types removed.
The columns Details, Camera, Lens, Cell and Place have had their explicit assocations removed.
The columns Keywords and Albums have had many2many relationships defined.
### PhotoAlbum
The columns Photo and Album have been removed. The gorm function SetupJoinTable is used to populate the foreign key into the model because this table is not using the primary keys of Photo and Album.
### PhotoLabel
The columns Photo and Label have had their Pre Load status removed, and replaced with foreign key definitions.
### Many to Many joins
The structs Photo and Album are connected via PhotoAlbum by SetupJoinTable.
The structs Photo and Keyword are connected via PhotoKeyword by SetupJoinTable.
The structs Label and LabelCategory are connected via Category by SetupJoinTable.
## Queries
With Gorm V1 the assumption that a 0 = FALSE or 1 = TRUE for boolean values had been made. All cases of this have been changed to using TRUE/FALSE as appropriate.

View file

@ -7,11 +7,13 @@ import (
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/pkg/dsn"
"github.com/photoprism/photoprism/pkg/fs"
)
func TestOptions(t *testing.T) {
var configPath = fs.Abs("testdata")
var configPath = fs.Abs("testdata/" + dsn.PhotoPrismTestToFolderName())
_ = os.Mkdir(configPath, os.ModePerm)
var configFile = filepath.Join(configPath, "vision.yml")
t.Run("Save", func(t *testing.T) {
@ -27,6 +29,7 @@ func TestOptions(t *testing.T) {
err := options.Load(filepath.Join(configPath, "invalid.yml"))
assert.Error(t, err)
})
_ = os.RemoveAll(configPath)
}
func TestConfigValues_Load(t *testing.T) {

View file

@ -16,6 +16,7 @@ import (
clusterjwt "github.com/photoprism/photoprism/internal/auth/jwt"
"github.com/photoprism/photoprism/internal/auth/session"
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/photoprism/get"
"github.com/photoprism/photoprism/internal/service/cluster"
"github.com/photoprism/photoprism/pkg/authn"
@ -297,8 +298,13 @@ type portalJWTFixture struct {
func newPortalJWTFixture(t *testing.T, suffix string) portalJWTFixture {
t.Helper()
origConf := get.Config()
t.Cleanup(func() { get.SetConfig(origConf) })
t.Cleanup(func() {
c := get.Config()
c.CloseDb()
get.SetConfig(config.TestConfig())
c = get.Config()
entity.SetDbProvider(c) // Make sure that the database has been swapped back
})
nodeConf := config.NewMinimalTestConfigWithDb("auth-any-portal-jwt-"+suffix, t.TempDir())

View file

@ -6,6 +6,7 @@ import (
"os"
"strings"
"testing"
"time"
"github.com/gin-gonic/gin"
"github.com/sirupsen/logrus"
@ -16,6 +17,8 @@ import (
"github.com/photoprism/photoprism/internal/form"
"github.com/photoprism/photoprism/internal/photoprism/get"
"github.com/photoprism/photoprism/internal/server/limiter"
"github.com/photoprism/photoprism/internal/testextras"
"github.com/photoprism/photoprism/pkg/dsn"
"github.com/photoprism/photoprism/pkg/fs"
"github.com/photoprism/photoprism/pkg/http/header"
)
@ -29,6 +32,17 @@ func TestMain(m *testing.M) {
// Remove temporary SQLite files before running the tests.
fs.PurgeTestDbFiles(".", false)
caller := "internal/api/api_test.go/TestMain"
dbc, dbn, err := testextras.AcquireDBMutex(log, caller)
if err != nil {
log.Error("FAIL")
os.Exit(1)
}
defer testextras.UnlockDBMutex(dbc.Db())
_, dsname := dsn.PhotoPrismTestToDriverDSN(dbn)
dsn.SetDSNToEnv(dsname)
// Init test config.
c := config.TestConfig()
get.SetConfig(c)
@ -37,7 +51,11 @@ func TestMain(m *testing.M) {
limiter.Login = limiter.NewLimit(1, 10000)
// Run unit tests.
beforeTimestamp := time.Now().UTC()
code := m.Run()
code = testextras.ValidateDBErrors(c.Db(), log, beforeTimestamp, code)
testextras.ReleaseDBMutex(dbc.Db(), log, caller, code)
if err := c.CloseDb(); err != nil {
log.Errorf("close db: %v", err)
@ -45,7 +63,6 @@ func TestMain(m *testing.M) {
// Remove temporary SQLite files after running the tests.
fs.PurgeTestDbFiles(".", false)
os.Exit(code)
}

View file

@ -8,7 +8,7 @@ import (
"github.com/dustin/go-humanize/english"
"github.com/gin-gonic/gin"
"github.com/jinzhu/gorm"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/internal/config"
@ -257,8 +257,8 @@ func BatchPhotosPrivate(router *gin.RouterGroup) {
log.Infof("photos: updating private flag for %s", clean.Log(frm.String()))
if err := entity.Db().Model(entity.Photo{}).Where("photo_uid IN (?)", frm.Photos).UpdateColumn("photo_private",
gorm.Expr("CASE WHEN photo_private > 0 THEN 0 ELSE 1 END")).Error; err != nil {
if err := entity.Db().Model(&entity.Photo{}).Where("photo_uid IN (?)", frm.Photos).UpdateColumn("photo_private",
gorm.Expr("CASE WHEN photo_private THEN false ELSE true END")).Error; err != nil {
log.Errorf("private: %s", err)
AbortSaveFailed(c)
return

View file

@ -59,7 +59,7 @@ func TestRemoveFromAlbumCoverCache(t *testing.T) {
cache.Flush()
var album entity.Album
if err := query.UnscopedDb().Where("album_type = ? AND thumb_src = ?", entity.AlbumManual, entity.SrcAuto).First(&album).Error; err != nil {
if err := query.Db().Where("album_type = ? AND thumb_src = ?", entity.AlbumManual, entity.SrcAuto).First(&album).Error; err != nil {
t.Skipf("no auto-managed manual album available: %v", err)
}

View file

@ -7,10 +7,16 @@ import (
"github.com/stretchr/testify/assert"
"github.com/tidwall/gjson"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/service/cluster"
)
func TestClusterMetrics_EmptyCounts(t *testing.T) {
// Remove the fixture record
if !assert.Empty(t, entity.UnscopedDb().Delete(entity.Client{}, "client_uid = ?", entity.ClientFixtures.Get("node").ClientUID).Error) {
return
}
app, router, conf := NewApiTest()
conf.Options().NodeRole = cluster.RolePortal
conf.Options().ClusterCIDR = "192.0.2.0/24"
@ -24,4 +30,7 @@ func TestClusterMetrics_EmptyCounts(t *testing.T) {
body := resp.Body.String()
assert.Equal(t, "192.0.2.0/24", gjson.Get(body, "ClusterCIDR").String())
assert.Equal(t, int64(0), gjson.Get(body, "Nodes.total").Int())
// Recreate the fixture record
entity.Db().Create(entity.ClientFixtures.Get("node"))
}

View file

@ -7,6 +7,7 @@ import (
"net/http/httptest"
"os"
"path/filepath"
"strings"
"testing"
"time"
@ -14,6 +15,7 @@ import (
"github.com/tidwall/gjson"
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/service/cluster"
"github.com/photoprism/photoprism/internal/service/cluster/provisioner"
reg "github.com/photoprism/photoprism/internal/service/cluster/registry"
@ -41,8 +43,13 @@ func TestClusterNodesRegister(t *testing.T) {
// Pre-create a node via registry and rotate to get a plaintext secret for tests
regy, err := reg.NewClientRegistryWithConfig(conf)
assert.NoError(t, err)
n := &reg.Node{Node: cluster.Node{UUID: rnd.UUIDv7(), Name: "pp-auth", Role: cluster.RoleApp}}
assert.NoError(t, regy.Put(n))
rCreate := AuthenticatedRequestWithBody(app, http.MethodPost, "/api/v1/cluster/nodes/register", `{"NodeName":"pp-auth", "NodeRole":"`+cluster.RoleApp+`"}`, cluster.ExampleJoinToken)
cleanupRegisterProvisioning(t, conf, rCreate)
assert.Equal(t, http.StatusCreated, rCreate.Code)
assert.Contains(t, rCreate.Body.String(), `"AlreadyProvisioned":false`)
var resp cluster.RegisterResponse
json.Unmarshal(rCreate.Body.Bytes(), &resp)
n := resp.Node
nr, err := regy.RotateSecret(n.UUID)
assert.NoError(t, err)
secret := nr.ClientSecret
@ -109,17 +116,16 @@ func TestClusterNodesRegister(t *testing.T) {
conf.Options().JoinToken = cluster.ExampleJoinToken
ClusterNodesRegister(router)
regy, err := reg.NewClientRegistryWithConfig(conf)
assert.NoError(t, err)
// Pre-create node with a UUID
n := &reg.Node{Node: cluster.Node{UUID: rnd.UUIDv7(), Name: "pp-lock", Role: cluster.RoleApp}}
assert.NoError(t, regy.Put(n))
// Register the node to ensure that the database and registry is there
rCreate := AuthenticatedRequestWithBody(app, http.MethodPost, "/api/v1/cluster/nodes/register", `{"NodeName":"pp-lock", "NodeRole":"`+cluster.RoleApp+`"}`, cluster.ExampleJoinToken)
assert.Equal(t, http.StatusCreated, rCreate.Code)
assert.Contains(t, rCreate.Body.String(), `"AlreadyProvisioned":false`)
// Attempt to change UUID via name without client credentials → 409
newUUID := rnd.UUIDv7()
r := AuthenticatedRequestWithBody(app, http.MethodPost, "/api/v1/cluster/nodes/register", `{"NodeName":"pp-lock","NodeUUID":"`+newUUID+`"}`, cluster.ExampleJoinToken)
assert.Equal(t, http.StatusConflict, r.Code)
cleanupRegisterProvisioning(t, conf, rCreate)
})
t.Run("BadAdvertiseUrlRejected", func(t *testing.T) {
app, router, conf := NewApiTest()
@ -188,7 +194,7 @@ func TestClusterNodesRegister(t *testing.T) {
conf.Options().JoinToken = cluster.ExampleJoinToken
ClusterNodesRegister(router)
// Empty nodeName → 400
// Empty NodeName → 400
r := AuthenticatedRequestWithBody(app, http.MethodPost, "/api/v1/cluster/nodes/register", `{"NodeName":""}`, cluster.ExampleJoinToken)
assert.Equal(t, http.StatusBadRequest, r.Code)
})
@ -204,8 +210,11 @@ func TestClusterNodesRegister(t *testing.T) {
// used by OAuth tests running in the same package.
regy, err := reg.NewClientRegistryWithConfig(conf)
assert.NoError(t, err)
n := &reg.Node{Node: cluster.Node{Name: "pp-node-01", Role: cluster.RoleApp}}
assert.NoError(t, regy.Put(n))
// Register the node to ensure that the database and registry is there
rCreate := AuthenticatedRequestWithBody(app, http.MethodPost, "/api/v1/cluster/nodes/register", `{"NodeName":"pp-node-01", "NodeRole":"`+cluster.RoleApp+`"}`, cluster.ExampleJoinToken)
assert.Equal(t, http.StatusCreated, rCreate.Code)
assert.Contains(t, rCreate.Body.String(), `"AlreadyProvisioned":false`)
cleanupRegisterProvisioning(t, conf, rCreate)
r := AuthenticatedRequestWithBody(app, http.MethodPost, "/api/v1/cluster/nodes/register", `{"NodeName":"pp-node-01","RotateSecret":true}`, cluster.ExampleJoinToken)
assert.Equal(t, http.StatusOK, r.Code)
@ -228,8 +237,9 @@ func TestClusterNodesRegister(t *testing.T) {
// Pre-create node in registry so handler goes through existing-node path.
regy, err := reg.NewClientRegistryWithConfig(conf)
assert.NoError(t, err)
n := &reg.Node{Node: cluster.Node{Name: "pp-node-02", Role: cluster.RoleApp}}
assert.NoError(t, regy.Put(n))
rCreate := AuthenticatedRequestWithBody(app, http.MethodPost, "/api/v1/cluster/nodes/register", `{"NodeName":"pp-node-02", "NodeRole":"`+cluster.RoleApp+`"}`, cluster.ExampleJoinToken)
assert.Equal(t, http.StatusCreated, rCreate.Code)
assert.Contains(t, rCreate.Body.String(), `"AlreadyProvisioned":false`)
// Provisioner is independent; endpoint should respond 200 and persist metadata.
r := AuthenticatedRequestWithBody(app, http.MethodPost, "/api/v1/cluster/nodes/register", `{"NodeName":"pp-node-02","SiteUrl":"https://Photos.Example.COM"}`, cluster.ExampleJoinToken)
@ -240,6 +250,9 @@ func TestClusterNodesRegister(t *testing.T) {
n2, err := regy.FindByName("pp-node-02")
assert.NoError(t, err)
assert.Equal(t, "https://photos.example.com", n2.SiteUrl)
cleanupRegisterProvisioning(t, conf, rCreate)
})
t.Run("AssignNodeUUIDWhenMissing", func(t *testing.T) {
app, router, conf := NewApiTest()
@ -311,9 +324,14 @@ func cleanupRegisterProvisioning(t *testing.T, conf *config.Config, r *httptest.
t.Fatalf("unmarshal register response: %v", err)
}
if !resp.AlreadyProvisioned {
return
}
// Why? This prevents cleanup in most cases, which means that some tests are failing because the item
// is still there after execution of a previous test. Which means that a test that expects it not to be
// there fails.
// Every unit test should be able to be run without depending on the results of a previous unit test,
// so you should clean up fully every time.
// if !resp.AlreadyProvisioned {
// return
// }
name := resp.Database.Name
user := resp.Database.User
@ -336,9 +354,22 @@ func cleanupRegisterProvisioning(t *testing.T, conf *config.Config, r *httptest.
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
if err := provisioner.DropCredentials(ctx, name, user); err != nil {
t.Fatalf("drop credentials for %s/%s: %v", name, user, err)
count1269 := strings.Count(err.Error(), "Error 1269")
countError := strings.Count(err.Error(), "Error")
if countError > count1269 { // Only abort if there was an issue other than Error 1269 (HY000): Can't revoke all privileges for one or more of the requested users
t.Fatalf("drop credentials for %s/%s: %v", name, user, err)
}
}
})
if resp.Node.UUID != "" {
t.Cleanup(func() {
if err := entity.UnscopedDb().Where("node_uuid = ?", resp.Node.UUID).Delete(&entity.Client{}).Error; err != nil {
t.Fatalf("remove client for %s: %v", resp.Node.UUID, err)
}
})
}
}
// TestValidateAdvertiseURL ensures the validator accepts HTTPS everywhere and allows

View file

@ -2,6 +2,8 @@ package api
import (
"encoding/json"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
@ -17,6 +19,13 @@ func TestGetFoldersOriginals(t *testing.T) {
_ = conf.CreateDirectories()
expected, err := fs.Dirs(conf.OriginalsPath(), false, true)
if len(expected) == 0 {
// create something so that the test does some work.
newpath := filepath.Join(conf.OriginalsPath(), "2025/01")
os.MkdirAll(newpath, os.ModePerm)
expected, err = fs.Dirs(conf.OriginalsPath(), false, true)
}
if err != nil {
t.Fatal(err)
}

View file

@ -155,7 +155,7 @@ func PhotoUnstack(router *gin.RouterGroup) {
}
if updateErr := entity.UnscopedDb().Exec(`UPDATE files
SET photo_id = ?, photo_uid = ?, file_name = ?, file_missing = 0
SET photo_id = ?, photo_uid = ?, file_name = ?, file_missing = FALSE
WHERE file_name = ? AND file_root = ?`,
newPhoto.ID, newPhoto.PhotoUID, r.RootRelName(),
relName, relRoot).Error; updateErr != nil {

View file

@ -2,9 +2,12 @@ package api
import (
"net/http"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/pkg/fs"
)
func TestPhotoUnstack(t *testing.T) {
@ -17,12 +20,13 @@ func TestPhotoUnstack(t *testing.T) {
// t.Logf("RESP: %s", r.Body.String())
})
t.Run("UnstackBridge3Jpg", func(t *testing.T) {
app, router, _ := NewApiTest()
app, router, c := NewApiTest()
PhotoUnstack(router)
fs.Copy("./testdata/london_160x160.jpg", filepath.Join(c.Options().OriginalsPath, "London/bridge3.jpg"), true)
fs.Copy("./testdata/face_160x160.jpg", filepath.Join(c.Options().OriginalsPath, "1990/04/bridge2.jpg"), true)
r := PerformRequest(app, "POST", "/api/v1/photos/ps6sg6be2lvl0yh7/files/fs6sg6bwhhbnlqdn/unstack")
// TODO: Have a real file in place for testing the success case. This file does not exist, so it cannot be unstacked.
assert.Equal(t, http.StatusNotFound, r.Code)
// t.Logf("RESP: %s", r.Body.String())
assert.Equal(t, http.StatusOK, r.Code)
//t.Logf("RESP: %s", r.Body.String())
})
t.Run("NotExistingFile", func(t *testing.T) {
app, router, _ := NewApiTest()

View file

@ -27,5 +27,5 @@ func TestMain(m *testing.M) {
}
func newTestConfig(t *testing.T) *cfg.Config {
return cfg.NewMinimalTestConfig(t.TempDir())
return cfg.NewMinimalTestConfig("", t.TempDir())
}

View file

@ -3,11 +3,14 @@ package session
import (
"os"
"testing"
"time"
"github.com/sirupsen/logrus"
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/internal/testextras"
"github.com/photoprism/photoprism/pkg/dsn"
"github.com/photoprism/photoprism/pkg/fs"
)
@ -16,9 +19,24 @@ func TestMain(m *testing.M) {
log.SetLevel(logrus.TraceLevel)
event.AuditLog = log
caller := "internal/auth/session/session_test.go/TestMain"
dbc, dbn, err := testextras.AcquireDBMutex(log, caller)
if err != nil {
log.Error("FAIL")
os.Exit(1)
}
defer testextras.UnlockDBMutex(dbc.Db())
_, dsname := dsn.PhotoPrismTestToDriverDSN(dbn)
dsn.SetDSNToEnv(dsname)
c := config.TestConfig()
beforeTimestamp := time.Now().UTC()
code := m.Run()
code = testextras.ValidateDBErrors(c.Db(), log, beforeTimestamp, code)
testextras.ReleaseDBMutex(dbc.Db(), log, caller, code)
// Remove temporary SQLite files after running the tests.
if err := c.CloseDb(); err != nil {

View file

@ -15,6 +15,7 @@ import (
)
func TestAuthJWTCommands(t *testing.T) {
defer resetConfigAndOpenDB()
conf := get.Config()
origEdition := conf.Options().Edition

View file

@ -7,6 +7,7 @@ import (
)
func TestAuthListCommand(t *testing.T) {
resetConfigAndOpenDB()
t.Run("All", func(t *testing.T) {
// Run command with test context.
output, err := RunWithTestContext(AuthListCommand, []string{"ls"})
@ -71,7 +72,7 @@ func TestAuthListCommand(t *testing.T) {
// Check command output for plausibility.
// t.Logf(output)
assert.Empty(t, output)
assert.Contains(t, output, "Incorrect Usage: flag provided but not defined: -xyz")
assert.Error(t, err)
})
}

View file

@ -30,7 +30,7 @@ func authRemoveAction(ctx *cli.Context) error {
return cli.ShowSubcommandHelp(ctx)
}
if cliMode == NONINTERACTIVE {
if RunNonInteractively(false) {
// proceed without prompt
if m, err := query.Session(id); err != nil {
return errors.New("session not found")

View file

@ -1,9 +1,14 @@
package commands
import (
"bytes"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/entity"
)
func TestAuthRemoveCommand(t *testing.T) {
@ -13,17 +18,68 @@ func TestAuthRemoveCommand(t *testing.T) {
// t.Logf(output0)
assert.NoError(t, err)
assert.NotEmpty(t, output0)
assert.Contains(t, output0, "sessgh6123yt")
// Setup and capture output
buffer := bytes.Buffer{}
log.SetOutput(&buffer)
output, err := RunWithTestContext(AuthRemoveCommand, []string{"rm", "sessgh6123yt"})
// Reset logger
log.SetOutput(os.Stdout)
// t.Logf(output)
assert.NoError(t, err)
assert.Empty(t, output)
assert.Contains(t, buffer.String(), "session 'sessgh6123yt' was not removed")
output1, err := RunWithTestContext(AuthShowCommand, []string{"show", "sessgh6123yt"})
// t.Logf(output1)
assert.NoError(t, err)
assert.NotEmpty(t, output1)
assert.Contains(t, output1, "sessgh6123yt")
})
t.Run("noninteractive", func(t *testing.T) {
output0, err := RunWithTestContext(AuthShowCommand, []string{"show", "sessgh6123yt"})
// t.Log(output0)
assert.NoError(t, err)
assert.NotEmpty(t, output0)
assert.Contains(t, output0, "sessgh6123yt")
_ = os.Setenv(config.EnvVar("cli"), "noninteractive")
defer os.Unsetenv(config.EnvVar("cli"))
// Setup and capture output
buffer := bytes.Buffer{}
log.SetOutput(&buffer)
output, err := RunWithTestContext(AuthRemoveCommand, []string{"rm", "sessgh6123yt"})
// Reset logger
log.SetOutput(os.Stdout)
// t.Log(output)
// t.Log(buffer.String())
assert.NoError(t, err)
assert.Empty(t, output)
assert.Contains(t, buffer.String(), "session 'sessgh6123yt' has been removed")
output1, err := RunWithTestContext(AuthShowCommand, []string{"show", "sessgh6123yt"})
// t.Log(output1)
assert.Error(t, err)
assert.Empty(t, output1)
assert.Contains(t, err.Error(), "session sessgh6123yt not found: record not found")
// Put the deleted session back
c := reopenConnection()
s := entity.SessionFixtures.Get("client_analytics")
if err := c.Db().Create(&s).Error; err != nil {
assert.NoError(t, err)
}
output2, err := RunWithTestContext(AuthShowCommand, []string{"show", "sessgh6123yt"})
assert.NoError(t, err)
assert.NotEmpty(t, output2)
assert.Contains(t, output2, "sessgh6123yt")
})
}

View file

@ -58,12 +58,12 @@ func authResetAction(ctx *cli.Context) error {
db := conf.Db()
// Drop existing sessions table.
if err := db.DropTableIfExists(entity.Session{}).Error; err != nil {
if err := db.Migrator().DropTable(entity.Session{}); err != nil {
return err
}
// Re-create auth_sessions.
if err := db.CreateTable(entity.Session{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.Session{}); err != nil {
return err
}

View file

@ -60,7 +60,7 @@ func TestClientsListCommand(t *testing.T) {
// Check command output for plausibility.
// t.Logf(output)
assert.Empty(t, output)
assert.Contains(t, output, "Incorrect Usage: flag provided but not defined: -xyz")
assert.Error(t, err)
})
}

View file

@ -53,12 +53,12 @@ func clientsResetAction(ctx *cli.Context) error {
db := conf.Db()
// Drop existing auth_clients table.
if err := db.DropTableIfExists(entity.Client{}).Error; err != nil {
if err := db.Migrator().DropTable(entity.Client{}); err != nil {
return err
}
// Re-create auth_clients.
if err := db.CreateTable(entity.Client{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.Client{}); err != nil {
return err
}

View file

@ -1,9 +1,13 @@
package commands
import (
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/entity"
)
func TestClientsResetCommand(t *testing.T) {
@ -34,4 +38,48 @@ func TestClientsResetCommand(t *testing.T) {
assert.Contains(t, output1, "alice")
assert.Contains(t, output1, "metrics")
})
t.Run("Confirmed", func(t *testing.T) {
_ = os.Setenv(config.EnvVar("cli"), "noninteractive")
defer os.Unsetenv(config.EnvVar("cli"))
// Run command with test context.
output0, err := RunWithTestContext(ClientsListCommand, []string{"ls"})
// Check command output for plausibility.
// t.Logf(output)
assert.NoError(t, err)
assert.Contains(t, output0, "alice")
assert.Contains(t, output0, "metrics")
// Run command with test context.
output, err := RunWithTestContext(ClientsResetCommand, []string{"reset"})
// Check command output for plausibility.
//t.Logf(output)
assert.NoError(t, err)
assert.Empty(t, output)
// Run command with test context.
output1, err := RunWithTestContext(ClientsListCommand, []string{"ls"})
// Check command output for plausibility.
// t.Logf(output)
assert.NoError(t, err)
assert.NotContains(t, output1, "alice")
assert.NotContains(t, output1, "metrics")
// Put the clients back
c := reopenConnection()
entity.SetDbProvider(c)
entity.CreateClientFixtures()
// Run command with test context.
output2, err := RunWithTestContext(ClientsListCommand, []string{"ls"})
// Check command output for plausibility.
// t.Logf(output)
assert.NoError(t, err)
assert.Contains(t, output2, "alice")
assert.Contains(t, output2, "metrics")
})
}

View file

@ -42,11 +42,10 @@ import (
const NONINTERACTIVE = "noninteractive"
var log = event.Log
var cliMode = strings.ToLower(os.Getenv(config.EnvVar("cli")))
// RunNonInteractively checks if command should run non-interactively.
func RunNonInteractively(confirmed bool) bool {
return confirmed || cliMode == NONINTERACTIVE
return confirmed || strings.ToLower(os.Getenv(config.EnvVar("cli"))) == NONINTERACTIVE
}
// PhotoPrism contains the photoprism CLI (sub-)commands.

View file

@ -1,20 +1,27 @@
package commands
import (
"bytes"
"flag"
"os"
"testing"
"time"
"github.com/sirupsen/logrus"
"github.com/urfave/cli/v2"
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/internal/photoprism/get"
"github.com/photoprism/photoprism/internal/testextras"
"github.com/photoprism/photoprism/pkg/capture"
"github.com/photoprism/photoprism/pkg/dsn"
"github.com/photoprism/photoprism/pkg/fs"
)
var savedPath string
// TODO: Several CLI commands defer conf.Shutdown(), which closes the shared
// database connection. To avoid flakiness, RunWithTestContext re-initializes
// and re-registers the DB provider before each command invocation. If you see
@ -31,10 +38,22 @@ func TestMain(m *testing.M) {
// Remove temporary SQLite files before running the tests.
fs.PurgeTestDbFiles(".", false)
caller := "internal/commands/commands_test.go/TestMain"
dbc, dbn, err := testextras.AcquireDBMutex(log, caller)
if err != nil {
log.Error("FAIL")
os.Exit(1)
}
defer testextras.UnlockDBMutex(dbc.Db())
_, dsname := dsn.PhotoPrismTestToDriverDSN(dbn)
dsn.SetDSNToEnv(dsname)
tempDir, err := os.MkdirTemp("", "commands-test")
if err != nil {
panic(err)
}
savedPath = tempDir
c := config.NewMinimalTestConfigWithDb("commands", tempDir)
get.SetConfig(c)
@ -48,7 +67,11 @@ func TestMain(m *testing.M) {
}
// Run unit tests.
beforeTimestamp := time.Now().UTC()
code := m.Run()
code = testextras.ValidateDBErrors(c.Db(), log, beforeTimestamp, code)
testextras.ReleaseDBMutex(dbc.Db(), log, caller, code)
if err = c.CloseDb(); err != nil {
log.Errorf("close db: %v", err)
@ -75,7 +98,7 @@ func NewTestContext(args []string) *cli.Context {
app.Commands = PhotoPrism
app.HelpName = app.Name
app.CustomAppHelpTemplate = ""
app.HideHelp = true
app.HideHelp = false
app.HideHelpCommand = true
app.Action = func(*cli.Context) error { return nil }
app.EnableBashCompletion = false
@ -91,23 +114,62 @@ func NewTestContext(args []string) *cli.Context {
LogErr(flagSet.Parse(args))
// Create and return new test context.
return cli.NewContext(app, flagSet, nil)
return cli.NewContext(app, flagSet, cli.NewContext(app, flagSet, nil))
}
// RunWithTestContext executes a command with a test context and returns its output.
func RunWithTestContext(cmd *cli.Command, args []string) (output string, err error) {
// Create test context with flags and arguments.
ctx := NewTestContext(args)
return RunWithProvidedTestContext(NewTestContext(args), cmd, args)
}
// TODO: Help output can currently not be generated in test mode due to
// a nil pointer panic in the "github.com/urfave/cli/v2" package.
cmd.HideHelp = true
// Ensure DB connection is open for each command run (some commands call Shutdown).
if c := get.Config(); c != nil {
c.RegisterDb() // (re)register provider
// NewTestContextWithParse creates a new CLI test context with the flags and arguments provided.
func NewTestContextWithParse(appArgs []string, cmdArgs []string) *cli.Context {
// Create new command-line test app.
app := cli.NewApp()
app.Name = "photoprism"
app.Usage = "PhotoPrism®"
app.Description = ""
app.Version = "test"
app.Copyright = "(c) 2018-2025 PhotoPrism UG. All rights reserved."
app.Flags = config.Flags.Cli()
app.Commands = PhotoPrism
app.HelpName = app.Name
app.CustomAppHelpTemplate = ""
app.HideHelp = false
app.HideHelpCommand = true
app.Action = func(*cli.Context) error { return nil }
app.EnableBashCompletion = false
app.Metadata = map[string]interface{}{
"Name": "PhotoPrism",
"About": "PhotoPrism®",
"Edition": "ce",
"Version": "test",
}
// Parse photoprism command arguments.
photoprismFlagSet := flag.NewFlagSet("photoprism", flag.ContinueOnError)
for _, f := range app.Flags {
f.Apply(photoprismFlagSet)
}
LogErr(photoprismFlagSet.Parse(appArgs[1:]))
// Parse command test arguments.
flagSet := flag.NewFlagSet("test", flag.ContinueOnError)
LogErr(flagSet.Parse(cmdArgs))
// Create and return new test context.
return cli.NewContext(app, flagSet, cli.NewContext(app, photoprismFlagSet, nil))
}
func RunWithProvidedTestContext(ctx *cli.Context, cmd *cli.Command, args []string) (output string, err error) {
// Ensure DB connection is open for each command run (some commands call Shutdown).
_ = reopenConnection()
// Redirect the output from cli to buffer for transfer to output for testing
var catureOutput bytes.Buffer
oldWriter := ctx.App.Writer
ctx.App.Writer = &catureOutput
// Run command via cli.Command.Run but neutralize os.Exit so ExitCoder
// errors don't terminate the test binary.
output = capture.Output(func() {
@ -116,12 +178,64 @@ func RunWithTestContext(cmd *cli.Command, args []string) (output string, err err
defer func() { cli.OsExiter = origExiter }()
err = cmd.Run(ctx, args...)
})
ctx.App.Writer = oldWriter
output += catureOutput.String()
// Re-open the database after the command completed so follow-up checks
// (potentially issued by the test itself) have an active connection.
if c := get.Config(); c != nil {
c.RegisterDb()
}
// // Re-open the database after the command completed so follow-up checks
// // (potentially issued by the test itself) have an active connection.
_ = reopenConnection()
return output, err
}
// resetConfigAndDB replaces the config with a generated minimal config, and may replace the database if it doesn't exist.
// it does call Migrate and TestFixtures for Postgres and MariaDB. It may call Migrate and TestFixtures for SQLite if the database
// doesn't exist. That can only happen if you are using PHOTOPRISM_TEST_DSN_NAME="sqlite".
func resetConfigAndDB() *config.Config {
c := config.NewMinimalTestConfigWithDb("commands", savedPath)
get.SetConfig(c)
entity.SetDbProvider(c)
InitConfig = func(ctx *cli.Context) (*config.Config, error) {
return c, c.Init()
}
return c
}
// resetConfigAndOpenDB replaces the config with a generated minimal config, and opens the configured database.
// it does not call Migrate and TestFixtures if the database has records in auth_users and photos.
func resetConfigAndOpenDB() *config.Config {
c := config.NewMinimalTestConfig("commands", savedPath)
config.RestoreDBFromCache(c) // If using sqlite (not sqlitefile) then the db is removed by NewMinimalTestConfig
if err := c.Init(); err != nil {
log.Fatalf("config: %s (init)", err.Error())
}
get.SetConfig(c)
entity.SetDbProvider(c)
InitConfig = func(ctx *cli.Context) (*config.Config, error) {
return c, c.Init()
}
return c
}
// reopenConnection gets the current configured connection and opens it if it is closed.
// It returns the current config to allow queries in tests if needed.
func reopenConnection() *config.Config {
if c := get.Config(); c != nil {
if !c.IsDbOpen() {
c.RegisterDb()
} else {
entity.SetDbProvider(c) // entity can get out of sync with c, so make sure it's correct
}
InitConfig = func(ctx *cli.Context) (*config.Config, error) {
return c, c.Init()
}
return c
} else {
log.Warn("reopenConnection: config is nil")
return nil
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,63 @@
package commands
import (
"bytes"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/pkg/dsn"
)
func TestResetCommand(t *testing.T) {
// make sure that database is in a good state for later tests as this test empties it
defer resetConfigAndDB()
t.Run("ResetIndex", func(t *testing.T) {
c := resetConfigAndOpenDB()
count := int64(0)
if err := c.Db().Model(&entity.Photo{}).Count(&count).Error; err != nil {
assert.NoError(t, err)
return
}
assert.Greater(t, count, int64(0))
dbDrv, dbDSN := dsn.PhotoPrismTestToDriverDSN(0)
// Run command with test context.
appArgs := []string{"photoprism",
"--database-driver", dbDrv,
"--database-dsn", dbDSN}
if dbDrv == "sqlite" {
appArgs = []string{"photoprism",
"--database-driver", dbDrv,
"--database-dsn", c.DatabaseDSN()}
}
cmdArgs := []string{"reset", "--index", "--yes"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
// Setup and capture SQL Logging output
buffer := bytes.Buffer{}
log.SetOutput(&buffer)
output, err := RunWithProvidedTestContext(ctx, ResetCommand, cmdArgs)
// Reset logger
log.SetOutput(os.Stdout)
// Check command output for plausibility.
// t.Logf("buffer = %s", buffer.String())
assert.NoError(t, err)
assert.Empty(t, output)
assert.Contains(t, buffer.String(), "dropping existing tables")
assert.Contains(t, buffer.String(), "restoring default schema")
c = reopenConnection()
if err := c.Db().Model(&entity.Photo{}).Count(&count).Error; err != nil {
assert.NoError(t, err)
return
}
assert.Equal(t, int64(0), count)
})
}

View file

@ -0,0 +1,6 @@
DROP DATABASE IF EXISTS migrate;
CREATE DATABASE IF NOT EXISTS migrate;
CREATE USER IF NOT EXISTS migrate@'%' IDENTIFIED BY 'migrate';
GRANT ALL PRIVILEGES ON migrate.* TO migrate@'%';
FLUSH PRIVILEGES;

View file

@ -0,0 +1,29 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 17.4
-- Dumped by pg_dump version 17.4 (Ubuntu 17.4-1)
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET transaction_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
DROP DATABASE IF EXISTS migrate WITH (FORCE);
--
-- TOC entry 3924 (class 1262 OID 25875)
-- Name: migrate; Type: DATABASE; Schema: -; Owner: migrate
--
CREATE DATABASE migrate WITH TEMPLATE = template0 ENCODING = 'UTF8' LOCALE_PROVIDER = libc LOCALE = 'en_US.utf8';
ALTER DATABASE migrate OWNER TO migrate;

2192
internal/commands/testdata/transfer_mysql vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

View file

@ -7,6 +7,7 @@ import (
)
func TestUsersAddCommand(t *testing.T) {
resetConfigAndOpenDB()
t.Run("AddUserThatAlreadyExists", func(t *testing.T) {
// Run command with test context.
output, err := RunWithTestContext(UsersAddCommand, []string{"add", "--name=Alice", "--email=jane@test.de", "--password=test1234", "--role=admin", "alice"})

View file

@ -1,7 +1,9 @@
package commands
import (
"strings"
"testing"
"unicode"
"github.com/stretchr/testify/assert"
)
@ -14,6 +16,14 @@ func TestUsersLegacyCommand(t *testing.T) {
// Check command output for plausibility.
// t.Logf(output)
assert.NoError(t, err)
assert.Contains(t, output, "│ ID │ UID │ Name │ User │ Email │ Admin │ Created At │")
// remove spaces as this test will fail if there are records in the table due to dynamic sizing of headings
var result strings.Builder
result.Grow(len(output))
for _, char := range output {
if !unicode.IsSpace(char) {
result.WriteRune(char)
}
}
assert.Contains(t, result.String(), "│ID│UID│Name│User│Email│Admin│CreatedAt│")
})
}

View file

@ -75,7 +75,7 @@ func usersListAction(ctx *cli.Context) error {
}
if ctx.Bool("deleted") {
rows[i] = append(rows[i], report.DateTime(user.DeletedAt))
rows[i] = append(rows[i], report.DateTime(&user.DeletedAt.Time))
}
}

View file

@ -7,6 +7,7 @@ import (
)
func TestUsersListCommand(t *testing.T) {
resetConfigAndOpenDB()
t.Run("All", func(t *testing.T) {
// Run command with test context.
output, err := RunWithTestContext(UsersListCommand, []string{"ls", "--login", "--created", "--deleted", "-n", "100", "--md"})
@ -94,7 +95,7 @@ func TestUsersListCommand(t *testing.T) {
// Check command output for plausibility.
// t.Logf(output)
assert.Empty(t, output)
assert.Contains(t, output, "Incorrect Usage: flag provided but not defined: -xyz")
assert.Error(t, err)
})
}

View file

@ -5,6 +5,7 @@ import (
"github.com/manifoldco/promptui"
"github.com/urfave/cli/v2"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/config"
"github.com/photoprism/photoprism/internal/entity"
@ -60,7 +61,7 @@ func usersModAction(ctx *cli.Context) error {
return fmt.Errorf("user already exists")
}
m.DeletedAt = nil
m.DeletedAt = gorm.DeletedAt{}
log.Infof("user %s will be restored", m.String())
}

View file

@ -57,37 +57,37 @@ func usersResetAction(ctx *cli.Context) error {
db := conf.Db()
// Drop existing user management tables.
if err := db.DropTableIfExists(entity.User{}, entity.UserDetails{}, entity.UserSettings{}, entity.UserShare{}, entity.Passcode{}, entity.Session{}).Error; err != nil {
if err := db.Migrator().DropTable(entity.User{}, entity.UserDetails{}, entity.UserSettings{}, entity.UserShare{}, entity.Passcode{}, entity.Session{}); err != nil {
return err
}
// Re-create auth_users.
if err := db.CreateTable(entity.User{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.User{}); err != nil {
return err
}
// Re-create auth_users_details.
if err := db.CreateTable(entity.UserDetails{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.UserDetails{}); err != nil {
return err
}
// Re-create auth_users_settings.
if err := db.CreateTable(entity.UserSettings{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.UserSettings{}); err != nil {
return err
}
// Re-create auth_users_shares.
if err := db.CreateTable(entity.UserShare{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.UserShare{}); err != nil {
return err
}
// Re-create passcodes.
if err := db.CreateTable(entity.Passcode{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.Passcode{}); err != nil {
return err
}
// Re-create auth_sessions.
if err := db.CreateTable(entity.Session{}).Error; err != nil {
if err := db.Migrator().CreateTable(entity.Session{}); err != nil {
return err
}

View file

@ -1,12 +1,21 @@
package commands
import (
"bytes"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/pkg/dsn"
)
func TestUsersResetCommand(t *testing.T) {
c := resetConfigAndOpenDB()
// reset as this test removes all users
defer resetConfigAndDB()
t.Run("NotConfirmed", func(t *testing.T) {
// Run command with test context.
output0, err := RunWithTestContext(UsersListCommand, []string{"ls"})
@ -34,4 +43,46 @@ func TestUsersResetCommand(t *testing.T) {
assert.Contains(t, output1, "alice")
assert.Contains(t, output1, "bob")
})
t.Run("Reset", func(t *testing.T) {
// c := resetConfigAndDB()
count := int64(0)
if err := c.Db().Model(&entity.User{}).Count(&count).Error; err != nil {
assert.NoError(t, err)
return
}
assert.Greater(t, count, int64(3)) // Make sure we have a populated database
dbDrv, dbDSN := dsn.PhotoPrismTestToDriverDSN(0)
// Run command with test context.
appArgs := []string{"photoprism",
"--database-driver", dbDrv,
"--database-dsn", dbDSN}
cmdArgs := []string{"reset", "--yes"}
ctx := NewTestContextWithParse(appArgs, cmdArgs)
// Setup and capture output
buffer := bytes.Buffer{}
log.SetOutput(&buffer)
output, err := RunWithProvidedTestContext(ctx, UsersResetCommand, cmdArgs)
// Reset logger
log.SetOutput(os.Stdout)
// Check command output for plausibility.
// t.Logf("buffer = %s", buffer.String())
assert.NoError(t, err)
assert.Empty(t, output)
assert.Contains(t, buffer.String(), "the user database has been recreated and is now in a clean state")
c = reopenConnection()
count = int64(1)
if err := c.Db().Model(&entity.User{}).Count(&count).Error; err != nil {
assert.NoError(t, err)
return
}
assert.Equal(t, int64(0), count)
})
}

View file

@ -7,6 +7,7 @@ import (
)
func TestUsersShowCommand(t *testing.T) {
resetConfigAndOpenDB()
t.Run("Alice", func(t *testing.T) {
// Run command with test context.
output, err := RunWithTestContext(UsersShowCommand, []string{"show", "alice"})

View file

@ -25,6 +25,7 @@ func TestUsersCommand(t *testing.T) {
assert.Contains(t, output2, "John")
assert.Contains(t, output2, "admin")
assert.Contains(t, output2, "john@test.de")
assert.NotContains(t, output2, "DeletedAt")
// Modify John
// Run command with test context.
@ -43,7 +44,7 @@ func TestUsersCommand(t *testing.T) {
assert.Contains(t, output4, "Johnny")
assert.Contains(t, output4, "admin")
assert.Contains(t, output4, "johnnny@test.de")
assert.Contains(t, output4, "DeletedAt │ <nil>")
assert.NotContains(t, output4, "DeletedAt")
// Remove John
// Run command with test context.
@ -62,7 +63,7 @@ func TestUsersCommand(t *testing.T) {
assert.Contains(t, output6, "Johnny")
assert.Contains(t, output6, "admin")
assert.Contains(t, output6, "johnnny@test.de")
assert.Contains(t, output6, "│ DeletedAt │ time.Date")
assert.NotContains(t, output6, "│ DeletedAt │ <nil>")
assert.Contains(t, output6, "│ DeletedAt │ gorm.DeletedAt{Time:time.Date")
assert.NotContains(t, output6, "│ DeletedAt │ gorm.DeletedAt{Time:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), Valid:false}")
})
}

View file

@ -576,33 +576,33 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
if hidePrivate {
c.Db().
Table("photos").
Select("SUM(photo_type = 'animated' AND photo_quality > -1 AND photo_private = 0) AS animated, " +
"SUM(photo_type = 'video' AND photo_quality > -1 AND photo_private = 0) AS videos, " +
"SUM(photo_type = 'live' AND photo_quality > -1 AND photo_private = 0) AS live, " +
"SUM(photo_type = 'audio' AND photo_quality > -1 AND photo_private = 0) AS audio, " +
"SUM(photo_type = 'document' AND photo_quality > -1 AND photo_private = 0) AS documents, " +
"SUM(photo_quality = -1) AS hidden, " +
"SUM(photo_type NOT IN ('animated','video','live','audio','document') AND photo_quality > -1 AND photo_private = 0) AS photos, " +
"SUM(photo_quality BETWEEN 0 AND 2) AS review, " +
"SUM(photo_favorite = 1 AND photo_private = 0 AND photo_quality > -1) AS favorites, " +
"SUM(photo_private = 1 AND photo_quality > -1) AS private").
Where("photos.id NOT IN (SELECT photo_id FROM files WHERE file_primary = 1 AND (file_missing = 1 OR file_error <> ''))").
Select("COUNT(CASE WHEN photo_type = 'animated' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS animated, " +
"COUNT(CASE WHEN photo_type = 'video' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS videos, " +
"COUNT(CASE WHEN photo_type = 'live' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS live, " +
"COUNT(CASE WHEN photo_type = 'audio' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS audio, " +
"COUNT(CASE WHEN photo_type = 'document' AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS documents, " +
"COUNT(CASE WHEN photo_quality = -1 THEN 1 END) AS hidden, " +
"COUNT(CASE WHEN photo_type NOT IN ('animated','video','live','audio','document') AND photo_quality > -1 AND photo_private = FALSE THEN 1 END) AS photos, " +
"COUNT(CASE WHEN photo_quality BETWEEN 0 AND 2 THEN 1 END) AS review, " +
"COUNT(CASE WHEN photo_favorite = TRUE AND photo_private = FALSE AND photo_quality > -1 THEN 1 END) AS favorites, " +
"COUNT(CASE WHEN photo_private = TRUE AND photo_quality > -1 THEN 1 END) AS private").
Where("photos.id NOT IN (SELECT photo_id FROM files WHERE file_primary = TRUE AND (file_missing = TRUE OR file_error <> ''))").
Where("deleted_at IS NULL").
Take(&cfg.Count)
} else {
c.Db().
Table("photos").
Select("SUM(photo_type = 'animated' AND photo_quality > -1) AS animated, " +
"SUM(photo_type = 'video' AND photo_quality > -1) AS videos, " +
"SUM(photo_type = 'live' AND photo_quality > -1) AS live, " +
"SUM(photo_type = 'audio' AND photo_quality > -1) AS audio, " +
"SUM(photo_type = 'document' AND photo_quality > -1) AS documents, " +
"SUM(photo_quality = -1) AS hidden, " +
"SUM(photo_type NOT IN ('animated','video','live','audio','document') AND photo_quality > -1) AS photos, " +
"SUM(photo_quality BETWEEN 0 AND 2) AS review, " +
"SUM(photo_favorite = 1 AND photo_quality > -1) AS favorites, " +
Select("COUNT(CASE WHEN photo_type = 'animated' AND photo_quality > -1 THEN 1 END) AS animated, " +
"COUNT(CASE WHEN photo_type = 'video' AND photo_quality > -1 THEN 1 END) AS videos, " +
"COUNT(CASE WHEN photo_type = 'live' AND photo_quality > -1 THEN 1 END) AS live, " +
"COUNT(CASE WHEN photo_type = 'audio' AND photo_quality > -1 THEN 1 END) AS audio, " +
"COUNT(CASE WHEN photo_type = 'document' AND photo_quality > -1 THEN 1 END) AS documents, " +
"COUNT(CASE WHEN photo_quality = -1 THEN 1 END) AS hidden, " +
"COUNT(CASE WHEN photo_type NOT IN ('animated','video','live','audio','document') AND photo_quality > -1 THEN 1 END) AS photos, " +
"COUNT(CASE WHEN photo_quality BETWEEN 0 AND 2 THEN 1 END) AS review, " +
"COUNT(CASE WHEN photo_favorite = TRUE AND photo_quality > -1 THEN 1 END) AS favorites, " +
"0 AS private").
Where("photos.id NOT IN (SELECT photo_id FROM files WHERE file_primary = 1 AND (file_missing = 1 OR file_error <> ''))").
Where("photos.id NOT IN (SELECT photo_id FROM files WHERE file_primary = TRUE AND (file_missing = TRUE OR file_error <> ''))").
Where("deleted_at IS NULL").
Take(&cfg.Count)
}
@ -611,7 +611,7 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
if c.Settings().Features.Archive {
c.Db().
Table("photos").
Select("SUM(photo_quality > -1) AS archived").
Select("COUNT(CASE WHEN photo_quality > -1 THEN 1 END) AS archived").
Where("deleted_at IS NOT NULL").
Take(&cfg.Count)
}
@ -630,34 +630,34 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
Select("MAX(photo_count) AS label_max_photos, COUNT(*) AS labels").
Where("photo_count > 0").
Where("deleted_at IS NULL").
Where("(labels.label_priority >= 0 AND labels.photo_count > 1 OR labels.label_favorite = 1)").
Where("(labels.label_priority >= 0 AND labels.photo_count > 1 OR labels.label_favorite = TRUE)").
Take(&cfg.Count)
if hidePrivate {
c.Db().
Table("albums").
Select("SUM(album_type = ?) AS albums, "+
"SUM(album_type = ?) AS moments, "+
"SUM(album_type = ?) AS months, "+
"SUM(album_type = ?) AS states, "+
"SUM(album_type = ?) AS folders, "+
"SUM(album_type = ? AND album_private = 1) AS private_albums, "+
"SUM(album_type = ? AND album_private = 1) AS private_moments, "+
"SUM(album_type = ? AND album_private = 1) AS private_months, "+
"SUM(album_type = ? AND album_private = 1) AS private_states, "+
"SUM(album_type = ? AND album_private = 1) AS private_folders",
Select("COUNT(CASE WHEN album_type = ? THEN 1 END) AS albums, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS moments, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS months, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS states, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS folders, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_albums, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_moments, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_months, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_states, "+
"COUNT(CASE WHEN album_type = ? AND album_private = TRUE THEN 1 END) AS private_folders",
entity.AlbumManual, entity.AlbumMoment, entity.AlbumMonth, entity.AlbumState, entity.AlbumFolder,
entity.AlbumManual, entity.AlbumMoment, entity.AlbumMonth, entity.AlbumState, entity.AlbumFolder).
Where("deleted_at IS NULL AND (albums.album_type <> 'folder' OR albums.album_path IN (SELECT photos.photo_path FROM photos WHERE photos.photo_private = 0 AND photos.deleted_at IS NULL))").
Where("deleted_at IS NULL AND (albums.album_type <> 'folder' OR albums.album_path IN (SELECT photos.photo_path FROM photos WHERE photos.photo_private = FALSE AND photos.deleted_at IS NULL))").
Take(&cfg.Count)
} else {
c.Db().
Table("albums").
Select("SUM(album_type = ?) AS albums, "+
"SUM(album_type = ?) AS moments, "+
"SUM(album_type = ?) AS months, "+
"SUM(album_type = ?) AS states, "+
"SUM(album_type = ?) AS folders",
Select("COUNT(CASE WHEN album_type = ? THEN 1 END) AS albums, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS moments, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS months, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS states, "+
"COUNT(CASE WHEN album_type = ? THEN 1 END) AS folders",
entity.AlbumManual, entity.AlbumMoment, entity.AlbumMonth, entity.AlbumState, entity.AlbumFolder).
Where("deleted_at IS NULL AND (albums.album_type <> 'folder' OR albums.album_path IN (SELECT photos.photo_path FROM photos WHERE photos.deleted_at IS NULL))").
Take(&cfg.Count)
@ -666,7 +666,7 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
c.Db().
Table("files").
Select("COUNT(*) AS files").
Where("file_missing = 0 AND file_root = ? AND deleted_at IS NULL", entity.RootOriginals).
Where("file_missing = FALSE AND file_root = ? AND deleted_at IS NULL", entity.RootOriginals).
Take(&cfg.Count)
c.Db().
@ -676,7 +676,7 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
c.Db().
Table("places").
Select("SUM(photo_count > 0) AS places").
Select("COUNT(CASE WHEN photo_count > 0 THEN 1 END) AS places").
Where("id <> 'zz'").
Take(&cfg.Count)
@ -700,7 +700,7 @@ func (c *Config) ClientUser(withSettings bool) *ClientConfig {
Find(&cfg.Lenses)
c.Db().
Where("deleted_at IS NULL AND album_favorite = 1").
Where("deleted_at IS NULL AND album_favorite = TRUE").
Limit(20).Order("album_title").
Find(&cfg.Albums)

View file

@ -39,14 +39,13 @@ import (
"time"
"github.com/dustin/go-humanize"
"github.com/jinzhu/gorm"
_ "github.com/jinzhu/gorm/dialects/mysql" // register mysql dialect
_ "github.com/jinzhu/gorm/dialects/sqlite" // register sqlite dialect
"github.com/jackc/pgx/v5/pgxpool"
"github.com/klauspost/cpuid/v2"
gc "github.com/patrickmn/go-cache"
"github.com/pbnjay/memory"
"github.com/sirupsen/logrus"
"github.com/urfave/cli/v2"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/ai/face"
"github.com/photoprism/photoprism/internal/ai/vision"
@ -54,6 +53,7 @@ import (
"github.com/photoprism/photoprism/internal/config/customize"
"github.com/photoprism/photoprism/internal/config/ttl"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/internal/mutex"
"github.com/photoprism/photoprism/internal/photoprism/dl"
"github.com/photoprism/photoprism/internal/service/hub"
@ -73,6 +73,7 @@ type Config struct {
options *Options
settings *customize.Settings
db *gorm.DB
pool *pgxpool.Pool
dbVersion string
hub *hub.Config
hubCancel context.CancelFunc
@ -664,6 +665,9 @@ func (c *Config) Shutdown() {
// Shutdown thumbnail library.
thumb.Shutdown()
// Close the subscriptions
event.SharedHub().Close()
// Close database connection.
if err := c.CloseDb(); err != nil {
log.Errorf("could not close database connection: %s", err)
@ -881,3 +885,43 @@ func (c *Config) Hub() *hub.Config {
return c.hub
}
// Swap the database and transfer settings in the config.
func (c *Config) SwapDBAndTransfer() error {
if c.db != nil {
return fmt.Errorf("config: database must not be initialised")
}
if c.options.DBTransferDriver == "" &&
c.options.DBTransferDSN == "" &&
c.options.DBTransferName == "" &&
c.options.DBTransferServer == "" {
return fmt.Errorf("config: transfer config must be provided")
}
tempString := c.options.DBTransferDriver
c.options.DBTransferDriver = c.options.DatabaseDriver
c.options.DatabaseDriver = tempString
tempString = c.options.DBTransferDSN
c.options.DBTransferDSN = c.options.DatabaseDSN
c.options.DatabaseDSN = tempString
tempString = c.options.DBTransferName
c.options.DBTransferName = c.options.DatabaseName
c.options.DatabaseName = tempString
tempString = c.options.DBTransferPassword
c.options.DBTransferPassword = c.options.DatabasePassword
c.options.DatabasePassword = tempString
tempString = c.options.DBTransferServer
c.options.DBTransferServer = c.options.DatabaseServer
c.options.DatabaseServer = tempString
tempString = c.options.DBTransferUser
c.options.DBTransferUser = c.options.DatabaseUser
c.options.DatabaseUser = tempString
return nil
}

View file

@ -4,16 +4,20 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/pkg/dsn"
)
func TestConfig_BackupPath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.BackupPath(""), "/storage/testdata/backup")
expected := "/storage/testdata/" + dsn.PhotoPrismTestToFolderName() + "/backup"
assert.Contains(t, c.BackupPath(""), expected)
}
func TestConfig_BackupBasePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.BackupBasePath(), "/storage/testdata/backup")
expected := "/storage/testdata/" + dsn.PhotoPrismTestToFolderName() + "/backup"
assert.Contains(t, c.BackupBasePath(), expected)
path := c.options.BackupPath
c.options.BackupPath = "./"
assert.Contains(t, c.BackupBasePath(), "/photoprism/internal/config")
@ -59,7 +63,8 @@ func TestConfig_BackupDatabasePath(t *testing.T) {
// Ensure DB defaults (SQLite) so path resolves to sqlite backup path
c.options.DatabaseDriver = ""
c.options.DatabaseDSN = ""
assert.Contains(t, c.BackupDatabasePath(), "/storage/testdata/backup/sqlite")
expected := "/storage/testdata/" + dsn.PhotoPrismTestToFolderName() + "/backup/sqlite"
assert.Contains(t, c.BackupDatabasePath(), expected)
}
func TestConfig_BackupAlbums(t *testing.T) {

View file

@ -6,14 +6,17 @@ import (
"os"
"path/filepath"
"runtime"
"runtime/debug"
"strconv"
"strings"
"time"
"github.com/jinzhu/gorm"
_ "github.com/jinzhu/gorm/dialects/mysql" // register mysql dialect
_ "github.com/jinzhu/gorm/dialects/sqlite"
"golang.org/x/mod/semver"
"gorm.io/driver/mysql"
"gorm.io/driver/postgres"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/entity/migrate"
@ -26,7 +29,6 @@ import (
)
// SQL Databases.
// TODO: PostgreSQL support requires upgrading GORM, so generic column data types can be used.
const (
Auto = "auto"
MySQL = dsn.DriverMySQL
@ -35,6 +37,12 @@ const (
SQLite3 = dsn.DriverSQLite3
)
var drivers = map[string]func(string) gorm.Dialector{
MySQL: mysql.Open,
SQLite3: sqlite.Open,
Postgres: postgres.Open,
}
// DatabaseDriver returns the database driver name.
func (c *Config) DatabaseDriver() string {
c.normalizeDatabaseDSN()
@ -42,8 +50,10 @@ func (c *Config) DatabaseDriver() string {
switch strings.ToLower(c.options.DatabaseDriver) {
case MySQL, MariaDB:
c.options.DatabaseDriver = MySQL
case SQLite3, "sqlite", "test", "file", "":
case SQLite3, "sqlite3", "test", "file", "":
c.options.DatabaseDriver = SQLite3
case Postgres:
c.options.DatabaseDriver = Postgres
case "tidb":
log.Warnf("config: database driver 'tidb' is deprecated, using sqlite")
c.options.DatabaseDriver = SQLite3
@ -62,8 +72,10 @@ func (c *Config) DatabaseDriverName() string {
switch c.DatabaseDriver() {
case MySQL, MariaDB:
return "MariaDB"
case SQLite3, "sqlite", "test", "file", "":
case SQLite3, "sqlite3", "test", "file", "":
return "SQLite"
case Postgres:
return "PostgreSQL"
case "tidb":
return "TiDB"
default:
@ -137,14 +149,13 @@ func (c *Config) DatabaseDSN() string {
)
case Postgres:
return fmt.Sprintf(
"user=%s password=%s dbname=%s host=%s port=%d connect_timeout=%d %s",
"postgresql://%s:%s@%s/%s?connect_timeout=%d&%s",
c.DatabaseUser(),
c.DatabasePassword(),
c.DatabaseServer(),
c.DatabaseName(),
c.DatabaseHost(),
c.DatabasePort(),
c.DatabaseTimeout(),
dsn.Params[dsn.DriverPostgres],
dsn.Params[dsn.DriverPostgreSQL],
)
case SQLite3:
return filepath.Join(c.StoragePath(), fmt.Sprintf("index.db?%s", dsn.Params[dsn.DriverSQLite3]))
@ -227,8 +238,9 @@ func (c *Config) DatabaseServer() string {
func (c *Config) DatabaseHost() string {
c.ParseDatabaseDSN()
if c.DatabaseDriver() == SQLite3 {
return ""
if c.DatabaseDriver() == SQLite3 || c.NoDatabaseDSN() {
d := dsn.DSN{Driver: c.DatabaseDriver(), Server: c.DatabaseServer(), DSN: ""}
return d.Host()
}
d := dsn.Parse(c.DatabaseDSN())
@ -239,8 +251,9 @@ func (c *Config) DatabaseHost() string {
func (c *Config) DatabasePort() int {
c.ParseDatabaseDSN()
if c.DatabaseDriver() == SQLite3 {
return 0
if c.DatabaseDriver() == SQLite3 || c.NoDatabaseDSN() {
d := dsn.DSN{Driver: c.DatabaseDriver(), Server: c.DatabaseServer(), DSN: ""}
return d.Port()
}
d := dsn.Parse(c.DatabaseDSN())
@ -417,6 +430,7 @@ func (c *Config) DatabaseConnsIdle() int {
// Db returns the db connection.
func (c *Config) Db() *gorm.DB {
if c.db == nil {
log.Debugf(fmt.Sprintf("Stack Trace: %s", debug.Stack()))
log.Fatal("config: database not connected")
}
@ -426,17 +440,47 @@ func (c *Config) Db() *gorm.DB {
// CloseDb closes the db connection (if any).
func (c *Config) CloseDb() error {
if c.db != nil {
if err := c.db.Close(); err == nil {
c.db = nil
sqldb, dberr := c.db.DB()
if dberr == nil {
log.Debug("config: closing database")
if err := sqldb.Close(); err != nil {
return err
}
entity.SetDbProvider(nil)
c.db = nil
} else {
return err
return dberr
}
if c.pool != nil {
log.Debug("config: closing postgres pool")
c.pool.Close()
c.pool = nil
}
}
return nil
}
// IsDbOpen determines if the database is available to use
func (c *Config) IsDbOpen() bool {
if c.db == nil {
log.Debug("isdbopen: c.db == nil")
return false
} else {
if sqlDB, err := c.db.DB(); err != nil {
log.Errorf("isdbopen: c.db.DB err = %+v", err)
return false
} else {
if sqlErr := sqlDB.Ping(); sqlErr != nil {
log.Errorf("isdbopen: Ping err = %+v", sqlErr)
return false
} else {
return true
}
}
}
}
// SetDbOptions sets the database collation to unicode if supported.
func (c *Config) SetDbOptions() {
switch c.DatabaseDriver() {
@ -491,6 +535,13 @@ func (c *Config) MigrateDb(runFailed bool, ids []string) {
// InitTestDb drops all tables in the currently configured database and re-creates them.
func (c *Config) InitTestDb() {
// Make sure that the migrations and versions tables are already there, as once prevents these from being handled correctly in tests.
if (!c.db.Migrator().HasTable(&migrate.Migration{})) {
c.db.Migrator().AutoMigrate(&migrate.Migration{})
}
if (!c.db.Migrator().HasTable(&migrate.Version{})) {
c.db.Migrator().AutoMigrate(&migrate.Version{})
}
entity.ResetTestFixtures()
if c.AdminPassword() == "" {
@ -544,6 +595,20 @@ func (c *Config) checkDb(db *gorm.DB) error {
case !c.IsDatabaseVersion("v10.5.12"):
return fmt.Errorf("config: MariaDB %s is not supported, see https://docs.photoprism.app/getting-started/#databases", c.dbVersion)
}
case Postgres:
var versions []string
err := db.Raw("SELECT VERSION() AS Value").Pluck("value", &versions).Error
// Version query not supported.
if err != nil {
log.Tracef("config: failed to detect database version (%s)", err)
return nil
}
c.dbVersion = clean.Version(versions[0])
if c.dbVersion == "" {
log.Warnf("config: unknown database server version")
}
case SQLite3:
type Res struct {
Value string `gorm:"column:Value;"`
@ -569,6 +634,26 @@ func (c *Config) checkDb(db *gorm.DB) error {
return nil
}
// Configure database logging.
func gormConfig() *gorm.Config {
return &gorm.Config{
Logger: logger.New(
log, // This should be dummy.NewLogger(), to match GORM1. Set to log before release...
logger.Config{
SlowThreshold: time.Second, // Slow SQL threshold
LogLevel: logger.Error, // Log level <-- This should be Silent to match GORM1, set to Error before release...
IgnoreRecordNotFoundError: true, // Ignore ErrRecordNotFound error for logger
ParameterizedQueries: true, // Don't include params in the SQL log
Colorful: false, // Disable color
},
),
// Set UTC as the default for created and updated timestamps.
NowFunc: func() time.Time {
return time.Now().UTC()
},
}
}
// connectDb establishes a database connection.
func (c *Config) connectDb() error {
// Make sure this is not running twice.
@ -582,65 +667,89 @@ func (c *Config) connectDb() error {
// Get database driver and data source name.
dbDriver := c.DatabaseDriver()
dbDsn := c.DatabaseDSN()
dbDSN := c.DatabaseDSN()
if dbDriver == "" {
return errors.New("config: database driver not specified")
}
if dbDsn == "" {
if dbDSN == "" {
return errors.New("config: database DSN not specified")
}
// Open database connection.
db, err := gorm.Open(dbDriver, dbDsn)
if err != nil || db == nil {
log.Infof("config: waiting for the database to become available")
if c.IsDbOpen() {
log.Info("config: database is already open")
} else {
for i := 1; i <= 12; i++ {
db, err = gorm.Open(dbDriver, dbDsn)
// Open database connection.
var db *gorm.DB
var err error
if dbDriver == Postgres {
postgresDB, pgxPool := entity.OpenPostgreSQL(dbDSN)
c.pool = pgxPool
db, err = gorm.Open(postgres.New(postgres.Config{Conn: postgresDB}), gormConfig())
} else {
c.pool = nil
db, err = gorm.Open(drivers[dbDriver](dbDSN), gormConfig())
}
if err != nil || db == nil {
log.Infof("config: waiting for the database to become available")
if db != nil && err == nil {
break
for i := 1; i <= 12; i++ {
if dbDriver == Postgres {
postgresDB, pgxPool := entity.OpenPostgreSQL(dbDSN)
c.pool = pgxPool
db, err = gorm.Open(postgres.New(postgres.Config{Conn: postgresDB}), gormConfig())
} else {
c.pool = nil
db, err = gorm.Open(drivers[dbDriver](dbDSN), gormConfig())
}
if db != nil && err == nil {
break
}
time.Sleep(5 * time.Second)
}
time.Sleep(5 * time.Second)
if err != nil || db == nil {
return err
}
}
if err != nil || db == nil {
return err
// Set database connection parameters.
if dbDriver != Postgres {
sqlDB, err := db.DB()
if err != nil {
return err
}
sqlDB.SetMaxOpenConns(c.DatabaseConns())
sqlDB.SetMaxIdleConns(c.DatabaseConnsIdle())
sqlDB.SetConnMaxLifetime(time.Hour)
}
}
// Configure database logging.
db.LogMode(false)
db.SetLogger(log)
// Set database connection parameters.
db.DB().SetMaxOpenConns(c.DatabaseConns())
db.DB().SetMaxIdleConns(c.DatabaseConnsIdle())
db.DB().SetConnMaxLifetime(time.Hour)
// Check database server version.
if err = c.checkDb(db); err != nil {
if c.Unsafe() {
log.Error(err)
} else {
return err
// Check database server version.
if err = c.checkDb(db); err != nil {
if c.Unsafe() {
log.Error(err)
} else {
return err
}
}
}
if dbVersion := c.DatabaseVersion(); dbVersion != "" {
log.Debugf("database: opened connection to %s %s", c.DatabaseDriverName(), dbVersion)
}
if dbVersion := c.DatabaseVersion(); dbVersion != "" {
log.Debugf("database: opened connection to %s %s", c.DatabaseDriverName(), dbVersion)
}
// Ok.
c.db = db
// Ok.
c.db = db
}
return nil
}
// ImportSQL imports a file to the currently configured database.
// All lines, including comments, must be terminated with a ;\n
func (c *Config) ImportSQL(filename string) {
contents, err := os.ReadFile(filename) //nolint:gosec // import path is provided by trusted caller
@ -658,8 +767,9 @@ func (c *Config) ImportSQL(filename string) {
continue
}
var result struct{}
q.Raw(stmt).Scan(&result)
if err := q.Exec(stmt).Error; err != nil {
log.Error(err)
return
}
}
}

View file

@ -1,12 +1,14 @@
package config
import (
"bytes"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/service/cluster"
"github.com/photoprism/photoprism/pkg/dsn"
)
// resetDatabaseOptions clears all DB-related option fields so tests start from defaults even if
@ -57,22 +59,48 @@ func TestConfig_DatabaseVersion(t *testing.T) {
func TestConfig_DatabaseSsl(t *testing.T) {
c := TestConfig()
assert.False(t, c.DatabaseSsl())
driver := c.DatabaseDriverName()
switch driver {
case "SQLite":
assert.False(t, c.DatabaseSsl())
case "MariaDB":
assert.True(t, c.DatabaseSsl())
case "PostgreSQL":
assert.False(t, c.DatabaseSsl())
default:
assert.Empty(t, driver)
assert.Fail(t, "driver not recognised")
}
}
func TestConfig_normalizeDatabaseDSN(t *testing.T) {
c := NewConfig(CliTestContext())
t.Run("MariaDB", func(t *testing.T) {
c := NewConfig(CliTestContext())
c.options.Deprecated.DatabaseDsn = "foo:b@r@tcp(honeypot:1234)/baz?charset=utf8mb4,utf8&parseTime=true"
c.options.DatabaseDriver = MySQL
c.options.Deprecated.DatabaseDsn = "foo:b@r@tcp(honeypot:1234)/baz?charset=utf8mb4,utf8&parseTime=true"
c.options.DatabaseDriver = MySQL
assert.Equal(t, "honeypot:1234", c.DatabaseServer())
assert.Equal(t, "honeypot", c.DatabaseHost())
assert.Equal(t, 1234, c.DatabasePort())
assert.Equal(t, "baz", c.DatabaseName())
assert.Equal(t, "foo", c.DatabaseUser())
assert.Equal(t, "b@r", c.DatabasePassword())
assert.Equal(t, "honeypot:1234", c.DatabaseServer())
assert.Equal(t, "honeypot", c.DatabaseHost())
assert.Equal(t, 1234, c.DatabasePort())
assert.Equal(t, "baz", c.DatabaseName())
assert.Equal(t, "foo", c.DatabaseUser())
assert.Equal(t, "b@r", c.DatabasePassword())
})
t.Run("Postgres", func(t *testing.T) {
c := NewConfig(CliTestContext())
c.options.Deprecated.DatabaseDsn = "postgresql://foo:b@r@honeypot:1234/baz?TimeZone=UTC&connect_timeout=15&lock_timeout=5000&sslmode=disable"
c.options.DatabaseDriver = Postgres
assert.Equal(t, "honeypot:1234", c.DatabaseServer())
assert.Equal(t, "honeypot", c.DatabaseHost())
assert.Equal(t, 1234, c.DatabasePort())
assert.Equal(t, "baz", c.DatabaseName())
assert.Equal(t, "foo", c.DatabaseUser())
assert.Equal(t, "b@r", c.DatabasePassword())
})
}
func TestConfig_ParseDatabaseDSN(t *testing.T) {
@ -174,7 +202,8 @@ func TestConfig_DatabasePortString(t *testing.T) {
func TestConfig_DatabaseName(t *testing.T) {
c := NewConfig(CliTestContext())
resetDatabaseOptions(c)
assert.Equal(t, ProjectRoot+"/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseName())
expected := ProjectRoot + "/storage/testdata/" + dsn.PhotoPrismTestToFolderName() + "/index.db?_busy_timeout=5000&_foreign_keys=on"
assert.Equal(t, expected, c.DatabaseName())
}
func TestConfig_DatabaseUser(t *testing.T) {
@ -220,24 +249,25 @@ func TestDatabaseProvisionPrefix(t *testing.T) {
func TestShouldAutoRotateDatabase(t *testing.T) {
t.Run("PortalAlwaysFalse", func(t *testing.T) {
conf := NewMinimalTestConfig(t.TempDir())
conf := NewMinimalTestConfig("config", t.TempDir())
conf.Options().NodeRole = cluster.RolePortal
conf.Options().DatabaseDriver = MySQL
assert.False(t, conf.ShouldAutoRotateDatabase())
})
t.Run("NonMySQLDriverFalse", func(t *testing.T) {
conf := NewMinimalTestConfig(t.TempDir())
conf := NewMinimalTestConfig("config", t.TempDir())
conf.Options().DatabaseDriver = SQLite3
assert.False(t, conf.ShouldAutoRotateDatabase())
})
t.Run("MySQLMissingFieldsTrue", func(t *testing.T) {
conf := NewMinimalTestConfig(t.TempDir())
conf := NewMinimalTestConfig("config", t.TempDir())
conf.Options().DatabaseDriver = MySQL
conf.Options().DatabaseName = "photoprism"
conf.Options().DatabaseUser = ""
conf.Options().DatabasePassword = ""
conf.Options().DatabaseDSN = ""
assert.True(t, conf.ShouldAutoRotateDatabase())
})
}
@ -251,13 +281,13 @@ func TestConfig_DatabaseDSN(t *testing.T) {
c.options.DatabaseDriver = "MariaDB"
assert.Equal(t, "photoprism:@tcp(localhost)/photoprism?charset=utf8mb4,utf8&collation=utf8mb4_unicode_ci&parseTime=true&timeout=15s", c.DatabaseDSN())
c.options.DatabaseDriver = "tidb"
assert.Equal(t, ProjectRoot+"/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDSN())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/index.db?_busy_timeout=5000&_foreign_keys=on", c.DatabaseDSN())
c.options.DatabaseDriver = "Postgres"
assert.Equal(t, ProjectRoot+"/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDSN())
assert.Equal(t, "postgresql://photoprism:@localhost/photoprism?connect_timeout=15&sslmode=disable&TimeZone=UTC&lock_timeout=5000", c.DatabaseDSN())
c.options.DatabaseDriver = "SQLite"
assert.Equal(t, ProjectRoot+"/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDSN())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/index.db?_busy_timeout=5000&_foreign_keys=on", c.DatabaseDSN())
c.options.DatabaseDriver = ""
assert.Equal(t, ProjectRoot+"/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDSN())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/index.db?_busy_timeout=5000&_foreign_keys=on", c.DatabaseDSN())
t.Run("CustomServer", func(t *testing.T) {
conf := NewConfig(CliTestContext())
@ -337,8 +367,8 @@ func TestConfig_DatabaseFile(t *testing.T) {
driver := c.DatabaseDriver()
assert.Equal(t, SQLite3, driver)
c.options.DatabaseDSN = ""
assert.Equal(t, ProjectRoot+"/storage/testdata/index.db", c.DatabaseFile())
assert.Equal(t, ProjectRoot+"/storage/testdata/index.db?_busy_timeout=5000", c.DatabaseDSN())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/index.db", c.DatabaseFile())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/index.db?_busy_timeout=5000&_foreign_keys=on", c.DatabaseDSN())
}
func TestConfig_DatabaseTimeout(t *testing.T) {
@ -378,6 +408,27 @@ func TestConfig_DatabaseConnsIdle(t *testing.T) {
assert.Equal(t, 28, c.DatabaseConnsIdle())
}
func TestImportSQL(t *testing.T) {
c := NewConfig(CliTestContext())
c.options.DatabaseDriver, c.options.DatabaseDSN = dsn.PhotoPrismTestToDriverDSN(0)
if err := c.connectDb(); err != nil {
assert.Empty(t, err)
return
}
// Setup and capture SQL Logging output
buffer := bytes.Buffer{}
log.SetOutput(&buffer)
c.ImportSQL("./testdata/importtest.sql")
// Reset logger
log.SetOutput(os.Stdout)
assert.NotContains(t, buffer.String(), "level=error")
}
func TestConfig_checkDb(t *testing.T) {
c := NewConfig(CliTestContext())

View file

@ -722,6 +722,16 @@ func (c *Config) MariadbDumpBin() string {
return FindBin("", "mariadb-dump", "mysqldump")
}
// PostgreSQLBin returns the PostgreSQL restore executable file name.
func (c *Config) PostgreSQLRestoreBin() string {
return FindBin("", "pg_restore")
}
// PostgreSQLDumpBin returns the PostgreSQL backup executable file name.
func (c *Config) PostgreSQLDumpBin() string {
return FindBin("", "pg_dump")
}
// SqliteBin returns the sqlite executable file name.
func (c *Config) SqliteBin() string {
return FindBin("", "sqlite3")

View file

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/photoprism/photoprism/pkg/dsn"
"github.com/photoprism/photoprism/pkg/fs"
"github.com/photoprism/photoprism/pkg/rnd"
)
@ -29,11 +30,11 @@ func TestConfig_FindBin(t *testing.T) {
func TestConfig_SidecarPath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.SidecarPath(), "testdata/sidecar")
assert.Contains(t, c.SidecarPath(), "testdata/"+dsn.PhotoPrismTestToFolderName()+"/sidecar")
c.options.SidecarPath = ".photoprism"
assert.Equal(t, ".photoprism", c.SidecarPath())
c.options.SidecarPath = ""
assert.Equal(t, ProjectRoot+"/storage/testdata/sidecar", c.SidecarPath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/sidecar", c.SidecarPath())
}
func TestConfig_SidecarYaml(t *testing.T) {
@ -138,7 +139,7 @@ func TestConfig_TempPath(t *testing.T) {
t.Logf("c.options.TempPath: '%s'", c.options.TempPath)
t.Logf("c.tempPath(): '%s'", d0)
assert.Equal(t, ProjectRoot+"/storage/testdata/temp", c.tempPath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/temp", c.tempPath())
c.options.TempPath = ""
@ -195,9 +196,9 @@ func TestConfig_CmdLibPath(t *testing.T) {
func TestConfig_CachePath2(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, ProjectRoot+"/storage/testdata/cache", c.CachePath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/cache", c.CachePath())
c.options.CachePath = ""
assert.Equal(t, ProjectRoot+"/storage/testdata/cache", c.CachePath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/cache", c.CachePath())
}
func TestConfig_SettingsYaml(t *testing.T) {
@ -240,15 +241,15 @@ func TestConfig_HubConfigFile(t *testing.T) {
func TestConfig_StoragePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, ProjectRoot+"/storage/testdata", c.StoragePath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName(), c.StoragePath())
c.options.StoragePath = ""
assert.Equal(t, ProjectRoot+"/storage/testdata/originals/.photoprism/storage", c.StoragePath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/originals/.photoprism/storage", c.StoragePath())
}
func TestConfig_TestdataPath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, ProjectRoot+"/storage/testdata/testdata", c.TestdataPath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/testdata", c.TestdataPath())
}
func TestConfig_AlbumsPath(t *testing.T) {
@ -259,13 +260,13 @@ func TestConfig_AlbumsPath(t *testing.T) {
// If this test fails, please manually move “albums” to the “backup” folder
// in the “storage/testdata” directory within your development environment:
// https://github.com/photoprism/photoprism/discussions/4520
assert.Equal(t, ProjectRoot+"/storage/testdata/backup/albums", c.BackupAlbumsPath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/backup/albums", c.BackupAlbumsPath())
}
func TestConfig_OriginalsAlbumsPath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, ProjectRoot+"/storage/testdata/originals/albums", c.OriginalsAlbumsPath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/originals/albums", c.OriginalsAlbumsPath())
}
func TestConfig_CreateDirectories(t *testing.T) {
@ -460,21 +461,21 @@ func TestConfig_CreateDirectories2(t *testing.T) {
func TestConfig_PIDFilename2(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, ProjectRoot+"/storage/testdata/photoprism.pid", c.PIDFilename())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/photoprism.pid", c.PIDFilename())
c.options.PIDFilename = ProjectRoot + "/internal/config/testdata/test.pid"
assert.Equal(t, ProjectRoot+"/internal/config/testdata/test.pid", c.PIDFilename())
}
func TestConfig_LogFilename2(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, ProjectRoot+"/storage/testdata/photoprism.log", c.LogFilename())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/photoprism.log", c.LogFilename())
c.options.LogFilename = ProjectRoot + "/internal/config/testdata/test.log"
assert.Equal(t, ProjectRoot+"/internal/config/testdata/test.log", c.LogFilename())
}
func TestConfig_OriginalsPath2(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, ProjectRoot+"/storage/testdata/originals", c.OriginalsPath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/originals", c.OriginalsPath())
c.options.OriginalsPath = ""
if s := c.OriginalsPath(); s != "" && s != "/photoprism/originals" {
t.Errorf("unexpected originals path: %s", s)

View file

@ -11,6 +11,8 @@ import (
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/service/hub"
"github.com/photoprism/photoprism/internal/testextras"
"github.com/photoprism/photoprism/pkg/dsn"
"github.com/photoprism/photoprism/pkg/fs"
)
@ -27,9 +29,24 @@ func TestMain(m *testing.M) {
log = logrus.StandardLogger()
log.SetLevel(logrus.TraceLevel)
caller := "internal/config/config_test.go/TestMain"
dbc, dbn, err := testextras.AcquireDBMutex(log, caller)
if err != nil {
log.Error("FAIL")
os.Exit(1)
}
defer testextras.UnlockDBMutex(dbc.Db())
_, dsname := dsn.PhotoPrismTestToDriverDSN(dbn)
dsn.SetDSNToEnv(dsname)
c := TestConfig()
beforeTimestamp := time.Now().UTC()
code := m.Run()
code = testextras.ValidateDBErrors(c.Db(), log, beforeTimestamp, code)
testextras.ReleaseDBMutex(dbc.Db(), log, caller, code)
// Remove temporary SQLite files after running the tests.
if err := c.CloseDb(); err != nil {
@ -153,14 +170,14 @@ func TestConfig_OptionsYaml(t *testing.T) {
func TestConfig_PIDFilename(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.PIDFilename(), "/storage/testdata/photoprism.pid")
expected := "/storage/testdata/" + dsn.PhotoPrismTestToFolderName() + "/photoprism.pid"
assert.Contains(t, c.PIDFilename(), expected)
}
func TestConfig_LogFilename(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Contains(t, c.LogFilename(), "/storage/testdata/photoprism.log")
assert.Contains(t, c.LogFilename(), "/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/photoprism.log")
}
func TestConfig_DetachServer(t *testing.T) {
@ -175,17 +192,17 @@ func TestConfig_OriginalsPath(t *testing.T) {
result := c.OriginalsPath()
assert.True(t, strings.HasPrefix(result, "/"))
assert.True(t, strings.HasSuffix(result, "/storage/testdata/originals"))
assert.True(t, strings.HasSuffix(result, "/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/originals"))
}
func TestConfig_ImportPath(t *testing.T) {
c := NewConfig(CliTestContext())
c.AssertTestData(t)
assert.Equal(t, ProjectRoot+"/storage/testdata/import", c.ImportPath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/import", c.ImportPath())
result := c.ImportPath()
assert.True(t, strings.HasPrefix(result, "/"))
assert.True(t, strings.HasSuffix(result, "/storage/testdata/import"))
assert.True(t, strings.HasSuffix(result, "/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/import"))
c.options.ImportPath = ""
if s := c.ImportPath(); s != "" && s != "/photoprism/import" {
@ -198,14 +215,14 @@ func TestConfig_ImportPath(t *testing.T) {
func TestConfig_CachePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.True(t, strings.HasSuffix(c.CachePath(), "storage/testdata/cache"))
assert.True(t, strings.HasSuffix(c.CachePath(), "storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/cache"))
}
func TestConfig_MediaCachePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.True(t, strings.HasPrefix(c.MediaCachePath(), "/"))
assert.True(t, strings.HasSuffix(c.MediaCachePath(), "storage/testdata/cache/media"))
assert.True(t, strings.HasSuffix(c.MediaCachePath(), "storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/cache/media"))
}
func TestConfig_MediaFileCachePath(t *testing.T) {
@ -220,7 +237,7 @@ func TestConfig_ThumbCachePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.True(t, strings.HasPrefix(c.ThumbCachePath(), "/"))
assert.True(t, strings.HasSuffix(c.ThumbCachePath(), "storage/testdata/cache/thumbnails"))
assert.True(t, strings.HasSuffix(c.ThumbCachePath(), "storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/cache/thumbnails"))
}
func TestConfig_AdminUser(t *testing.T) {
@ -309,11 +326,12 @@ func TestConfig_StaticImgFile(t *testing.T) {
func TestConfig_ThemePath(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, ProjectRoot+"/storage/testdata/config/theme", c.ThemePath())
expected := ProjectRoot + "/storage/testdata/" + dsn.PhotoPrismTestToFolderName() + "/config/theme"
assert.Equal(t, expected, c.ThemePath())
c.SetThemePath("testdata/static/img/wallpaper")
assert.Equal(t, ProjectRoot+"/internal/config/testdata/static/img/wallpaper", c.ThemePath())
c.SetThemePath("")
assert.Equal(t, ProjectRoot+"/storage/testdata/config/theme", c.ThemePath())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/config/theme", c.ThemePath())
}
func TestConfig_IndexWorkers(t *testing.T) {

View file

@ -9,13 +9,14 @@ import (
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/ai/vision"
"github.com/photoprism/photoprism/pkg/dsn"
"github.com/photoprism/photoprism/pkg/fs"
)
func TestConfig_VisionYaml(t *testing.T) {
t.Run("Default", func(t *testing.T) {
c := NewConfig(CliTestContext())
assert.Equal(t, ProjectRoot+"/storage/testdata/config/vision.yml", c.VisionYaml())
assert.Equal(t, ProjectRoot+"/storage/testdata/"+dsn.PhotoPrismTestToFolderName()+"/config/vision.yml", c.VisionYaml())
})
t.Run("PreferYamlExtension", func(t *testing.T) {
c := NewConfig(CliTestContext())

View file

@ -5,6 +5,8 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/pkg/dsn"
)
func TestNewSettings(t *testing.T) {
@ -87,13 +89,15 @@ func TestSettings_Save(t *testing.T) {
assert.Equal(t, "onyx", s.UI.Theme)
assert.Equal(t, "de", s.UI.Language)
if err := s.Save("testdata/settings_tmp.yml"); err != nil {
_ = os.Mkdir("testdata/"+dsn.PhotoPrismTestToFolderName(), os.ModePerm)
if err := s.Save("testdata/" + dsn.PhotoPrismTestToFolderName() + "/settings_tmp.yml"); err != nil {
t.Fatal(err)
}
if err := os.Remove("testdata/settings_tmp.yml"); err != nil {
if err := os.Remove("testdata/" + dsn.PhotoPrismTestToFolderName() + "/settings_tmp.yml"); err != nil {
t.Fatal(err)
}
_ = os.Remove("testdata/" + dsn.PhotoPrismTestToFolderName())
})
}

View file

@ -0,0 +1,126 @@
package config
import (
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/internal/mutex"
)
func TestError(t *testing.T) {
t.Run("AllOk", func(t *testing.T) {
c := NewTestConfig("config")
c.Init()
entity.SetDbProvider(c)
entity.LogWarningsAndErrors()
time.Sleep(time.Millisecond * 300) // Some time to allow background go routine to actually start
assert.True(t, mutex.ErrorWorker.Running())
msg := "All Ok This Should Go To The Database"
event.Error(msg)
time.Sleep(time.Millisecond * 500) // Some time to ensure the database write has happened.
result := &entity.Error{}
if err := entity.Db().Where("error_level = ? and error_message = ?", "error", strings.ToLower(msg)).First(&result).Error; err != nil {
assert.Empty(t, err)
return
}
assert.NotEmpty(t, result)
t.Logf("result = %+v", result)
event.Error(msg + "1")
event.Error(msg + "2")
event.Error(msg + "3")
time.Sleep(time.Millisecond * 500) // Some time to ensure the database write has happened.
mutex.ErrorWorker.Cancel()
event.Error(msg + "4")
time.Sleep(time.Millisecond * 500) // Some time to ensure the cancel is processed
})
t.Run("CloseDB", func(t *testing.T) {
c := NewTestConfig("config")
c.Init()
entity.SetDbProvider(c)
entity.LogWarningsAndErrors()
time.Sleep(time.Millisecond * 300) // Some time to allow background go routine to actually start
assert.True(t, mutex.ErrorWorker.Running())
msg := "CloseDB This Should Go To The Database"
event.Warn(msg)
time.Sleep(time.Millisecond * 500) // Some time to ensure the database write has happened.
event.Warn(msg + "1")
event.Warn(msg + "2")
event.Warn(msg + "3")
//backup := c.db
//c.db = nil // This causes Fatal as expected
if err := c.CloseDb(); err != nil {
assert.Empty(t, err)
return
}
event.Warn(msg + "4")
time.Sleep(time.Millisecond * 500) // Some time to ensure the database write has happened.
result := &entity.Error{}
// Reconnect the database
c.connectDb()
entity.SetDbProvider(c)
if err := entity.Db().Where("error_level = ?", "warning").First(&result).Error; err != nil {
assert.Empty(t, err)
return
}
assert.NotEmpty(t, result)
t.Logf("result = %+v", result)
mutex.ErrorWorker.Cancel()
})
t.Run("Shutdown", func(t *testing.T) {
c := NewTestConfig("config")
c.Init()
entity.SetDbProvider(c)
entity.LogWarningsAndErrors()
time.Sleep(time.Millisecond * 300) // Some time to allow background go routine to actually start
assert.True(t, mutex.ErrorWorker.Running())
msg := "Shutdown This Should Go To The Database"
event.Error(msg)
time.Sleep(time.Millisecond * 500) // Some time to ensure the database write has happened.
event.Error(msg + "1")
event.Error(msg + "2")
event.Error(msg + "3")
c.Shutdown()
event.Error(msg + "4")
time.Sleep(time.Millisecond * 500) // Some time to ensure the database write has happened.
result := &entity.Error{}
// Reconnect the database
if !c.IsDbOpen() {
_ = c.Init() // safe to call; re-opens DB if needed
c.RegisterDb() // (re)register provider
} else {
t.Log("commands: DB is still open")
}
if err := entity.Db().Where("error_level = ?", "error").First(&result).Error; err != nil {
assert.Empty(t, err)
return
}
assert.NotEmpty(t, result)
t.Logf("result = %+v", result)
mutex.ErrorWorker.Cancel()
})
}

View file

@ -886,14 +886,14 @@ var Flags = CliFlags{
Flag: &cli.StringFlag{
Name: "database-driver",
Aliases: []string{"db"},
Usage: "database `DRIVER` (sqlite, mysql)",
Usage: "database `DRIVER` (sqlite, mysql, postgres)",
Value: "sqlite",
EnvVars: EnvVars("DATABASE_DRIVER"),
}}, {
Flag: &cli.StringFlag{
Name: "database-dsn",
Aliases: []string{"dsn"},
Usage: "database connection `DSN` (sqlite file, optional for mysql)",
Usage: "database connection `DSN` (sqlite file, optional for mysql and postgres)",
EnvVars: EnvVars("DATABASE_DSN"),
}}, {
Flag: &cli.StringFlag{
@ -1311,5 +1311,44 @@ var Flags = CliFlags{
Value: "",
EnvVars: EnvVars("LOG_FILENAME"),
TakesFile: true,
}}, {
Flag: &cli.StringFlag{
Name: "transfer-driver",
Aliases: []string{"tfr-db"},
Usage: "database `DRIVER` (sqlite, mysql, postgres)",
Value: "sqlite",
EnvVars: EnvVars("TRANSFER_DRIVER"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-dsn",
Aliases: []string{"tfr-dsn"},
Usage: "database connection `DSN` (sqlite file, optional for mysql and postgres)",
EnvVars: EnvVars("TRANSFER_DSN"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-name",
Aliases: []string{"tfr-db-name"},
Value: "photoprism",
Usage: "database schema `NAME`",
EnvVars: EnvVars("TRANSFER_NAME"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-server",
Aliases: []string{"tfr-db-server"},
Usage: "database `HOST` incl. port e.g. \"mariadb:3306\" (or socket path)",
EnvVars: EnvVars("TRANSFER_SERVER"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-user",
Aliases: []string{"tfr-db-user"},
Value: "photoprism",
Usage: "database user `NAME`",
EnvVars: EnvVars("TRANSFER_USER"),
}}, {
Flag: &cli.StringFlag{
Name: "transfer-password",
Aliases: []string{"tfr-db-pass"},
Usage: "database user `PASSWORD`",
EnvVars: EnvVars("TRANSFER_PASSWORD"),
}},
}

View file

@ -255,6 +255,12 @@ type Options struct {
PIDFilename string `yaml:"PIDFilename" json:"-" flag:"pid-filename"`
LogFilename string `yaml:"LogFilename" json:"-" flag:"log-filename"`
DetachServer bool `yaml:"DetachServer" json:"-" flag:"detach-server"`
DBTransferDriver string `yaml:"DBTransferDriver" json:"-" flag:"transfer-driver"`
DBTransferDSN string `yaml:"DBTransferDSN" json:"-" flag:"transfer-dsn"`
DBTransferName string `yaml:"DBTransferName" json:"-" flag:"transfer-name"`
DBTransferServer string `yaml:"DBTransferServer" json:"-" flag:"transfer-server"`
DBTransferUser string `yaml:"DBTransferUser" json:"-" flag:"transfer-user"`
DBTransferPassword string `yaml:"DBTransferPassword" json:"-" flag:"transfer-password"`
Deprecated struct {
DatabaseDsn string `yaml:"DatabaseDsn,omitempty" json:"-" flag:"-"`
} `yaml:",inline,omitempty" json:"-" flag:"-"`

View file

@ -14,9 +14,6 @@ import (
gc "github.com/patrickmn/go-cache"
"github.com/urfave/cli/v2"
_ "github.com/jinzhu/gorm/dialects/mysql" // register mysql dialect
_ "github.com/jinzhu/gorm/dialects/sqlite"
"github.com/photoprism/photoprism/internal/config/customize"
"github.com/photoprism/photoprism/internal/service/hub"
"github.com/photoprism/photoprism/internal/thumb"
@ -62,7 +59,7 @@ func NewTestOptions(dbName string) *Options {
storagePath = fs.Abs("../../storage")
}
dataPath := filepath.Join(storagePath, fs.TestdataDir)
dataPath := filepath.Join(storagePath, fs.TestdataDir, dsn.PhotoPrismTestToFolderName())
return NewTestOptionsForPath(dbName, dataPath)
}
@ -79,7 +76,8 @@ func NewTestOptionsForPath(dbName, dataPath string) *Options {
storagePath = fs.Abs("../../storage")
}
dataPath = filepath.Join(storagePath, fs.TestdataDir)
// enforce folder separation for testdata folders to prevent parallel tests of DBMS' clashing
dataPath = filepath.Join(storagePath, fs.TestdataDir, dsn.PhotoPrismTestToFolderName())
}
// Enable test mode in dependencies.
@ -122,8 +120,7 @@ func NewTestOptionsForPath(dbName, dataPath string) *Options {
// Example PHOTOPRISM_TEST_DSN for MariaDB / MySQL:
// - "photoprism:photoprism@tcp(mariadb:4001)/photoprism?parseTime=true"
dbName = PkgNameRegexp.ReplaceAllString(dbName, "")
testDriver := os.Getenv("PHOTOPRISM_TEST_DRIVER")
testDsn := os.Getenv("PHOTOPRISM_TEST_DSN")
testDriver, testDsn := dsn.PhotoPrismTestToDriverDSN(0)
// Set default test database driver.
if testDriver == "test" || testDriver == "sqlite" || testDriver == "" || testDsn == "" {
@ -138,6 +135,7 @@ func NewTestOptionsForPath(dbName, dataPath string) *Options {
} else if err := os.Remove(testDsn); err != nil {
log.Errorf("sqlite: failed to remove existing test database %s (%s)", clean.Log(testDsn), err)
}
testDsn = testDsn + "?_foreign_keys=on&_busy_timeout=5000"
} else if testDsn == "" || testDsn == dsn.SQLiteTestDB {
testDsn = dsn.SQLiteTestDB
if !fs.FileExists(testDsn) {
@ -145,6 +143,7 @@ func NewTestOptionsForPath(dbName, dataPath string) *Options {
} else if err := os.Remove(testDsn); err != nil {
log.Errorf("sqlite: failed to remove existing test database %s (%s)", clean.Log(testDsn), err)
}
testDsn = testDsn + "?_foreign_keys=on&_busy_timeout=5000"
}
}
@ -225,11 +224,32 @@ func TestConfig() *Config {
return testConfig
}
// RestoreDBFromCache will restore an SQLite database from a cache.
// Only works if the target database does not exist.
func RestoreDBFromCache(c *Config) (cachedDB bool) {
cachedDB = false
// Try to restore test db from cache.
if len(testDbCache) > 0 && c.DatabaseDriver() == SQLite3 && !fs.FileExists(c.DatabaseFile()) {
if err := os.WriteFile(c.DatabaseFile(), testDbCache, fs.ModeFile); err != nil {
log.Warnf("config: %s (restore test database)", err)
} else {
log.Infof("config: restored %s from cache", c.DatabaseFile())
cachedDB = true
}
// Open the database
c.RegisterDb()
} else {
log.Infof("config: cache was not used for %s", c.DatabaseFile())
}
return cachedDB
}
// NewMinimalTestConfig creates a lightweight test Config (no DB, minimal filesystem).
//
// Not suitable for tests requiring a database or pre-created storage directories.
func NewMinimalTestConfig(dataPath string) *Config {
return NewIsolatedTestConfig("", dataPath, false)
func NewMinimalTestConfig(dbName, dataPath string) *Config {
return NewIsolatedTestConfig(dbName, dataPath, false)
}
var testDbCache []byte
@ -241,30 +261,19 @@ var testDbMutex sync.Mutex
func NewMinimalTestConfigWithDb(dbName, dataPath string) *Config {
c := NewIsolatedTestConfig(dbName, dataPath, true)
cachedDb := false
// Try to restore test db from cache.
if len(testDbCache) > 0 && c.DatabaseDriver() == SQLite3 && !fs.FileExists(c.DatabaseDSN()) {
if err := os.WriteFile(c.DatabaseDSN(), testDbCache, fs.ModeFile); err != nil {
log.Warnf("config: %s (restore test database)", err)
} else {
cachedDb = true
}
}
cachedDb := RestoreDBFromCache(c)
if err := c.Init(); err != nil {
log.Fatalf("config: %s (init)", err.Error())
}
c.RegisterDb()
if cachedDb {
return c
}
c.InitTestDb()
if testDbCache == nil && c.DatabaseDriver() == SQLite3 && fs.FileExistsNotEmpty(c.DatabaseDSN()) {
if testDbCache == nil && c.DatabaseDriver() == SQLite3 && fs.FileExistsNotEmpty(c.DatabaseFile()) {
testDbMutex.Lock()
defer testDbMutex.Unlock()
@ -272,9 +281,10 @@ func NewMinimalTestConfigWithDb(dbName, dataPath string) *Config {
return c
}
if testDb, readErr := os.ReadFile(c.DatabaseDSN()); readErr != nil {
if testDb, readErr := os.ReadFile(c.DatabaseFile()); readErr != nil {
log.Warnf("config: could not cache test database (%s)", readErr)
} else {
log.Infof("config: test database %s has been cached", c.DatabaseFile())
testDbCache = testDb
}
}

View file

@ -3,9 +3,9 @@ package config
import (
"testing"
"github.com/jinzhu/gorm"
"github.com/stretchr/testify/assert"
"github.com/urfave/cli/v2"
"gorm.io/gorm"
"github.com/photoprism/photoprism/pkg/fs"
)

View file

@ -0,0 +1,5 @@
# This is an import test file;
# Comments should be ignored;
# Comments have to END in a semicolon;
CREATE TABLE importtest (col1 integer);
DROP TABLE importtest;

View file

@ -0,0 +1,845 @@
//go:build ignore
// +build ignore
package main
import (
"bufio"
"bytes"
"flag"
"fmt"
"math"
"math/rand/v2"
"os"
"os/exec"
"strings"
"sync"
"time"
"github.com/sirupsen/logrus"
"gorm.io/driver/mysql"
"gorm.io/driver/postgres"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"github.com/jackc/pgx/v5/pgxpool"
"github.com/photoprism/photoprism/internal/ai/classify"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/entity/migrate"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/pkg/fs"
"github.com/photoprism/photoprism/pkg/media"
"github.com/photoprism/photoprism/pkg/rnd"
"github.com/photoprism/photoprism/pkg/txt"
)
var drivers = map[string]func(string) gorm.Dialector{
MySQL: mysql.Open,
SQLite3: sqlite.Open,
}
var log = event.Log
// Log logs the error if any and keeps quiet otherwise.
func Log(model, action string, err error) {
if err != nil {
log.Errorf("%s: %s (%s)", model, err, action)
}
}
// UTC returns the current Coordinated Universal Time (UTC).
func UTC() time.Time {
return time.Now().UTC()
}
// Now returns the current time in UTC, truncated to seconds.
func Now() time.Time {
return UTC().Truncate(time.Second)
}
// Db returns the default *gorm.DB connection.
func Db() *gorm.DB {
if dbConn == nil {
return nil
}
return dbConn.Db()
}
// UnscopedDb returns an unscoped *gorm.DB connection
// that returns all records including deleted records.
func UnscopedDb() *gorm.DB {
return Db().Unscoped()
}
// Supported test databases.
const (
MySQL = "mysql"
SQLite3 = "sqlite"
SQLiteTestDB = ".test.db"
SQLiteMemoryDSN = ":memory:?cache=shared"
)
// dbConn is the global gorm.DB connection provider.
var dbConn Gorm
// Gorm is a gorm.DB connection provider interface.
type Gorm interface {
Db() *gorm.DB
}
// DbConn is a gorm.DB connection provider.
type DbConn struct {
Driver string
Dsn string
once sync.Once
db *gorm.DB
pool *pgxpool.Pool
}
// Db returns the gorm db connection.
func (g *DbConn) Db() *gorm.DB {
g.once.Do(g.Open)
if g.db == nil {
log.Fatal("migrate: database not connected")
}
return g.db
}
// Open creates a new gorm db connection.
func (g *DbConn) Open() {
log.Infof("Opening DB connection with driver %s", g.Driver)
var db *gorm.DB
var err error
if g.Driver == entity.Postgres {
postgresDB, pgxPool := entity.OpenPostgreSQL(g.Dsn)
g.pool = pgxPool
db, err = gorm.Open(postgres.New(postgres.Config{Conn: postgresDB}), gormConfig())
} else {
db, err = gorm.Open(drivers[g.Driver](g.Dsn), gormConfig())
}
if err != nil || db == nil {
for i := 1; i <= 12; i++ {
fmt.Printf("gorm.Open(%s, %s) %d\n", g.Driver, g.Dsn, i)
if g.Driver == entity.Postgres {
postgresDB, pgxPool := entity.OpenPostgreSQL(g.Dsn)
g.pool = pgxPool
db, err = gorm.Open(postgres.New(postgres.Config{Conn: postgresDB}), gormConfig())
} else {
db, err = gorm.Open(drivers[g.Driver](g.Dsn), gormConfig())
}
if db != nil && err == nil {
break
} else {
time.Sleep(5 * time.Second)
}
}
if err != nil || db == nil {
fmt.Println(err)
log.Fatal(err)
}
}
log.Info("DB connection established successfully")
if g.Driver != entity.Postgres {
sqlDB, _ := db.DB()
sqlDB.SetMaxIdleConns(4) // in config_db it uses c.DatabaseConnsIdle(), but we don't have the c here.
sqlDB.SetMaxOpenConns(256) // in config_db it uses c.DatabaseConns(), but we don't have the c here.
}
g.db = db
}
// Close closes the gorm db connection.
func (g *DbConn) Close() {
if g.db != nil {
sqlDB, _ := g.db.DB()
if err := sqlDB.Close(); err != nil {
log.Fatal(err)
}
g.db = nil
}
}
func gormConfig() *gorm.Config {
return &gorm.Config{
Logger: logger.New(
log,
logger.Config{
SlowThreshold: time.Second, // Slow SQL threshold
LogLevel: logger.Error, // Log level
IgnoreRecordNotFoundError: true, // Ignore ErrRecordNotFound error for logger
ParameterizedQueries: true, // Don't include params in the SQL log
Colorful: false, // Disable color
},
),
// Set UTC as the default for created and updated timestamps.
NowFunc: func() time.Time {
return UTC()
},
}
}
// IsDialect returns true if the given sql dialect is used.
func IsDialect(name string) bool {
return name == Db().Dialector.Name()
}
// DbDialect returns the sql dialect name.
func DbDialect() string {
return Db().Dialector.Name()
}
// SetDbProvider sets the Gorm database connection provider.
func SetDbProvider(conn Gorm) {
dbConn = conn
}
// HasDbProvider returns true if a db provider exists.
func HasDbProvider() bool {
return dbConn != nil
}
var characterRunes = []rune("ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
func randomSHA1() string {
result := make([]rune, 32)
for i := range result {
result[i] = characterRunes[rand.IntN(len(characterRunes))]
}
return string(result)
}
func main() {
var (
numberOfPhotos int
driver string
dsn string
dropdb bool
sqlitescript bool
)
log = logrus.StandardLogger()
log.SetLevel(logrus.TraceLevel)
event.AuditLog = log
flag.IntVar(&numberOfPhotos, "numberOfPhotos", 0, "Number of photos to generate")
flag.StringVar(&driver, "driver", "sqlite", "GORM driver to use. Choose from sqlite, mysql and postgres")
flag.StringVar(&dsn, "dsn", "testdb.db", "DSN to access the database")
flag.BoolVar(&dropdb, "dropdb", false, "Drop/Delete the database")
flag.BoolVar(&sqlitescript, "sqlitescript", true, "Create an SQLite database from script")
flag.Parse()
if numberOfPhotos < 1 {
flag.PrintDefaults()
log.Errorf("Number of photos is not enough %d", numberOfPhotos)
os.Exit(1)
}
if _, ok := drivers[driver]; ok == false {
flag.PrintDefaults()
log.Errorf("driver %v is not valid", driver)
os.Exit(1)
}
if len(dsn) < 3 {
flag.PrintDefaults()
log.Errorf("dsn %v is to short", dsn)
os.Exit(1)
}
// Set default test database driver.
if driver == "test" || driver == "sqlite" || driver == "" || dsn == "" {
driver = SQLite3
}
// Set default database DSN.
if driver == SQLite3 {
if dsn == "" {
dsn = SQLiteMemoryDSN
}
}
allowDelete := dropdb
if driver == MySQL && allowDelete {
basedsn := dsn[0 : strings.Index(dsn, "/")+1]
basedbname := dsn[strings.Index(dsn, "/")+1 : strings.Index(dsn, "?")]
log.Infof("Connecting to %v", basedsn)
database, err := gorm.Open(mysql.Open(basedsn), &gorm.Config{})
if err != nil {
log.Errorf("Unable to connect to MariaDB %v", err)
}
log.Infof("Dropping database %v if it exists", basedbname)
if res := database.Exec("DROP DATABASE IF EXISTS " + basedbname + ";"); res.Error != nil {
log.Errorf("Unable to drop database %v", res.Error)
os.Exit(1)
}
log.Infof("Creating database %v if it doesnt exist", basedbname)
if res := database.Exec("CREATE DATABASE IF NOT EXISTS " + basedbname + ";"); res.Error != nil {
log.Errorf("Unable to create database %v", res.Error)
os.Exit(1)
}
}
if driver == SQLite3 && dsn != SQLiteMemoryDSN && allowDelete {
filename := dsn
if strings.Index(dsn, "?") > 0 {
if strings.Index(dsn, ":") > 0 {
filename = dsn[strings.Index(dsn, ":")+1 : strings.Index(dsn, "?")]
} else {
filename = dsn[0:strings.Index(dsn, "?")]
}
}
log.Infof("Removing file %v", filename)
os.Remove(filename)
}
log.Infof("Connecting to driver %v with dsn %v", driver, dsn)
// Create gorm.DB connection provider.
db := &DbConn{
Driver: driver,
Dsn: dsn,
}
defer db.Close()
SetDbProvider(db)
// Disable journal to speed up.
if driver == SQLite3 {
Db().Exec("PRAGMA journal_mode=OFF")
}
start := time.Now()
log.Info("Create PhotoPrism tables if they don't exist")
// Run migration if the photos table doesn't exist.
// Otherwise assume that we have a valid structured database.
photoCounter := int64(0)
if err := Db().Model(&entity.Photo{}).Count(&photoCounter).Error; err != nil {
// Handle SQLite differently as it does table recreates on initial migrate, so we need to be able to simulate that.
if driver == SQLite3 && sqlitescript {
filename := dsn
if strings.Index(dsn, "?") > 0 {
if strings.Index(dsn, ":") > 0 {
filename = dsn[strings.Index(dsn, ":")+1 : strings.Index(dsn, "?")]
} else {
filename = dsn[0:strings.Index(dsn, "?")]
}
}
var cmd *exec.Cmd
bashCmd := fmt.Sprintf("cat ./sqlite3.sql | sqlite3 %s", filename)
cmd = exec.Command("bash", "-c", bashCmd)
// Write to stdout or file.
var f *os.File
log.Infof("restore: creating database tables from script")
f = os.Stdout
var stderr bytes.Buffer
cmd.Stderr = &stderr
cmd.Stdout = f
// Log exact command for debugging in trace mode.
log.Debug(cmd.String())
// Run restore command.
if cmdErr := cmd.Run(); cmdErr != nil {
if errStr := strings.TrimSpace(stderr.String()); errStr != "" {
log.Error(errStr)
os.Exit(1)
}
}
} else {
entity.Entities.Migrate(Db(), migrate.Opt(true, false, nil))
if err := entity.Entities.WaitForMigration(Db()); err != nil {
log.Errorf("migrate: %s [%s]", err, time.Since(start))
}
}
} else {
log.Errorf("The photos table already exists in driver %v dsn %v.\nAborting...", driver, dsn)
os.Exit(1)
}
entity.SetDbProvider(dbConn)
log.Info("Create default fixtures")
entity.CreateDefaultFixtures()
// Load the database with data.
// Create all the labels and keywords that have specific handling in internal/ai/classify/rules.go
log.Info("Create labels and keywords")
keywords := make(map[string]uint)
labels := make(map[string]uint)
keywordRandoms := make(map[int]uint)
labelRandoms := make(map[int]uint)
keywordPos, labelPos := 0, 0
for label, rule := range classify.Rules {
keyword := entity.Keyword{
Keyword: label,
Skip: false,
}
Db().Create(&keyword)
keywords[label] = keyword.ID
keywordRandoms[keywordPos] = keyword.ID
keywordPos++
if rule.Label != "" {
if _, found := keywords[rule.Label]; found == false {
keyword = entity.Keyword{
Keyword: rule.Label,
Skip: false,
}
Db().Create(&keyword)
keywords[rule.Label] = keyword.ID
keywordRandoms[keywordPos] = keyword.ID
keywordPos++
}
for _, category := range rule.Categories {
if _, found := labels[category]; found == false {
labelDb := entity.Label{
LabelSlug: strings.ToLower(category),
CustomSlug: strings.ToLower(category),
LabelName: strings.ToLower(category),
LabelPriority: 0,
LabelFavorite: false,
LabelDescription: "",
LabelNotes: "",
PhotoCount: 0,
LabelCategories: []*entity.Label{},
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
DeletedAt: gorm.DeletedAt{},
New: false,
}
Db().Create(&labelDb)
labels[category] = labelDb.ID
labelRandoms[labelPos] = labelDb.ID
labelPos++
}
}
if _, found := labels[rule.Label]; found == false {
labelDb := entity.Label{
LabelSlug: strings.ToLower(rule.Label),
CustomSlug: strings.ToLower(rule.Label),
LabelName: strings.ToLower(rule.Label),
LabelPriority: 0,
LabelFavorite: false,
LabelDescription: "",
LabelNotes: "",
PhotoCount: 0,
LabelCategories: []*entity.Label{},
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
DeletedAt: gorm.DeletedAt{},
New: false,
}
Db().Create(&labelDb)
labels[rule.Label] = labelDb.ID
labelRandoms[labelPos] = labelDb.ID
labelPos++
for _, category := range rule.Categories {
categoryDb := entity.Category{
LabelID: labelDb.ID,
CategoryID: labels[category],
}
Db().Create(&categoryDb)
}
}
}
}
// Create every possible camera and some lenses. Yeah the data is garbage but it's test data anyway.
log.Info("Create cameras and lenses")
lensList := [6]string{"Wide Angle", "Fisheye", "Ultra Wide Angle", "Macro", "Super Zoom", "F80"}
cameras := make(map[string]uint)
lenses := make(map[string]uint)
cameraRandoms := make(map[int]uint)
lensRandoms := make(map[int]uint)
cameraPos, lensPos := 0, 0
for _, make := range entity.CameraMakes {
for _, model := range entity.CameraModels {
camera := entity.NewCamera(make, model)
if _, found := cameras[camera.CameraSlug]; found == false {
Db().Create(camera)
cameras[camera.CameraSlug] = camera.ID
cameraRandoms[cameraPos] = camera.ID
cameraPos++
}
}
for _, model := range lensList {
lens := entity.NewLens(make, model)
if _, found := lenses[lens.LensSlug]; found == false {
Db().Create(lens)
lenses[lens.LensSlug] = lens.ID
lensRandoms[lensPos] = lens.ID
lensPos++
}
}
}
// Load up Countries and Places.
log.Info("Create countries and places")
countries := make(map[int]string)
countryPos := 0
places := make(map[int]string)
placePos := 0
PlaceUID := byte('P')
file, _ := os.Open("../../pkg/txt/resources/countries.txt")
defer file.Close()
scanner := bufio.NewScanner(file)
scanner.Split(bufio.ScanLines)
for scanner.Scan() {
parts := strings.Split(scanner.Text(), ":")
if len(parts) < 2 {
continue
}
country := entity.NewCountry(strings.ToLower(parts[0]), strings.ToLower(parts[1]))
counter := int64(0)
Db().Model(&entity.Country{}).Where("id = ?", country.ID).Count(&counter)
if counter == 0 {
Db().Create(country)
countries[countryPos] = strings.ToLower(parts[0])
countryPos++
}
}
for word := range txt.StopWords {
placeUID := rnd.GenerateUID(PlaceUID)
country := countries[rand.IntN(len(countries))]
place := entity.Place{
ID: placeUID,
PlaceLabel: word,
PlaceDistrict: word,
PlaceCity: word,
PlaceState: word,
PlaceCountry: country,
PlaceKeywords: "",
PlaceFavorite: false,
PhotoCount: 0,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(&place)
places[placePos] = placeUID
placePos++
}
// Create some Subjects
log.Info("Create subjects")
subjects := make(map[int]entity.Subject)
subjectPos := 0
for i := 1; i <= 100; i++ {
subject := entity.Subject{
SubjUID: rnd.GenerateUID('j'),
SubjType: entity.SubjPerson,
SubjSrc: entity.SrcImage,
SubjSlug: fmt.Sprintf("person-%03d", i),
SubjName: fmt.Sprintf("Person %03d", i),
SubjFavorite: false,
SubjPrivate: false,
SubjExcluded: false,
FileCount: 0,
PhotoCount: 0,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
DeletedAt: gorm.DeletedAt{},
}
Db().Create(&subject)
subjects[subjectPos] = subject
subjectPos++
}
log.Info("Start creating photos")
for i := 1; i <= numberOfPhotos; i++ {
if _, frac := math.Modf(float64(i) / 100.0); frac == 0 {
log.Infof("Generating photo number %v", i)
}
month := rand.IntN(11) + 1
day := rand.IntN(28) + 1
year := rand.IntN(45) + 1980
takenAt := time.Date(year, time.Month(month), day, rand.IntN(24), rand.IntN(60), rand.IntN(60), rand.IntN(1000), time.UTC)
labelCount := rand.IntN(5)
// Create the cell for the Photo's location
placeId := places[rand.IntN(len(places))]
lat := (rand.Float64() * 180.0) - 90.0
lng := (rand.Float64() * 360.0) - 180.0
cell := entity.NewCell(lat, lng)
cell.PlaceID = placeId
Db().FirstOrCreate(cell)
folder := entity.Folder{}
if res := Db().Model(&entity.Folder{}).Where("path = ?", fmt.Sprintf("%04d", year)).First(&folder); res.RowsAffected == 0 {
folder = entity.NewFolder("/", fmt.Sprintf("%04d", year), time.Now().UTC())
folder.Create()
}
folder = entity.Folder{}
if res := Db().Model(&entity.Folder{}).Where("path = ?", fmt.Sprintf("%04d/%02d", year, month)).First(&folder); res.RowsAffected == 0 {
folder = entity.NewFolder("/", fmt.Sprintf("%04d/%02d", year, month), time.Now().UTC())
folder.Create()
}
photo := entity.Photo{
// ID
//
// UUID
TakenAt: takenAt,
TakenAtLocal: takenAt,
TakenSrc: entity.SrcMeta,
PhotoUID: rnd.GenerateUID(entity.PhotoUID),
PhotoType: "image",
TypeSrc: entity.SrcAuto,
PhotoTitle: "Performance Test Load",
TitleSrc: entity.SrcImage,
PhotoDescription: "",
DescriptionSrc: entity.SrcAuto,
PhotoPath: fmt.Sprintf("%04d/%02d", year, month),
PhotoName: fmt.Sprintf("PIC%08d", i),
OriginalName: fmt.Sprintf("PIC%08d", i),
PhotoStack: 0,
PhotoFavorite: false,
PhotoPrivate: false,
PhotoScan: false,
PhotoPanorama: false,
TimeZone: "America/Mexico_City",
PlaceID: placeId,
PlaceSrc: entity.SrcMeta,
CellID: cell.ID,
CellAccuracy: 0,
PhotoAltitude: 5,
PhotoLat: lat,
PhotoLng: lng,
PhotoCountry: countries[rand.IntN(len(countries))],
PhotoYear: year,
PhotoMonth: month,
PhotoDay: day,
PhotoIso: 400,
PhotoExposure: "1/60",
PhotoFNumber: 8,
PhotoFocalLength: 2,
PhotoQuality: 3,
PhotoFaces: 0,
PhotoResolution: 0,
// PhotoDuration : 0,
PhotoColor: 12,
CameraID: cameraRandoms[rand.IntN(len(cameraRandoms))],
CameraSerial: "",
CameraSrc: "",
LensID: lensRandoms[rand.IntN(len(lensRandoms))],
// Details :,
// Camera
// Lens
// Cell
// Place
Keywords: []entity.Keyword{},
Albums: []entity.Album{},
Files: []entity.File{},
Labels: []entity.PhotoLabel{},
// CreatedBy
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
EditedAt: nil,
PublishedAt: nil,
CheckedAt: nil,
EstimatedAt: nil,
DeletedAt: gorm.DeletedAt{},
}
Db().Create(&photo)
// Allocate the labels for this photo
for i := 0; i < labelCount; i++ {
photoLabel := entity.NewPhotoLabel(photo.ID, labelRandoms[rand.IntN(len(labelRandoms))], 0, entity.SrcMeta)
Db().FirstOrCreate(photoLabel)
}
// Allocate the keywords for this photo
keywordCount := rand.IntN(5)
keywordStr := ""
for i := 0; i < keywordCount; i++ {
photoKeyword := entity.PhotoKeyword{PhotoID: photo.ID, KeywordID: keywordRandoms[rand.IntN(len(keywordRandoms))]}
keyword := entity.Keyword{}
Db().Model(&entity.Keyword{}).Where("id = ?", photoKeyword.KeywordID).First(&keyword)
Db().FirstOrCreate(&photoKeyword)
if len(keywordStr) > 0 {
keywordStr = fmt.Sprintf("%s,%s", keywordStr, keyword.Keyword)
} else {
keywordStr = keyword.Keyword
}
}
// Create File
file := entity.File{
// ID
// Photo
PhotoID: photo.ID,
PhotoUID: photo.PhotoUID,
PhotoTakenAt: photo.TakenAt,
// TimeIndex
// MediaID
// MediaUTC
InstanceID: "",
FileUID: rnd.GenerateUID(entity.FileUID),
FileName: fmt.Sprintf("%04d/%02d/PIC%08d.jpg", year, month, i),
FileRoot: entity.RootSidecar,
OriginalName: "",
FileHash: rnd.GenerateUID(entity.FileUID),
FileSize: rand.Int64N(1000000),
FileCodec: "",
FileType: string(fs.ImageJpeg),
MediaType: string(media.Image),
FileMime: "image/jpg",
FilePrimary: true,
FileSidecar: false,
FileMissing: false,
FilePortrait: true,
FileVideo: false,
FileDuration: 0,
// FileFPS
// FileFrames
FileWidth: 1200,
FileHeight: 1600,
FileOrientation: 6,
FileOrientationSrc: entity.SrcMeta,
FileProjection: "",
FileAspectRatio: 0.75,
// FileHDR : false,
// FileWatermark
// FileColorProfile
FileMainColor: "magenta",
FileColors: "226611CC1",
FileLuminance: "ABCDEF123",
FileDiff: 456,
FileChroma: 15,
// FileSoftware
// FileError
ModTime: time.Now().Unix(),
CreatedAt: time.Now().UTC(),
CreatedIn: 935962,
UpdatedAt: time.Now().UTC(),
UpdatedIn: 935962,
// PublishedAt
DeletedAt: gorm.DeletedAt{},
Share: []entity.FileShare{},
Sync: []entity.FileSync{},
//markers
}
Db().Create(&file)
// Add Markers
markersToCreate := rand.IntN(5)
for i := 0; i < markersToCreate; i++ {
subject := subjects[rand.IntN(len(subjects))]
marker := entity.Marker{
MarkerUID: rnd.GenerateUID('m'),
FileUID: file.FileUID,
MarkerType: entity.MarkerFace,
MarkerName: subject.SubjName,
MarkerReview: false,
MarkerInvalid: false,
SubjUID: subject.SubjUID,
SubjSrc: subject.SubjSrc,
X: rand.Float32() * 1024.0,
Y: rand.Float32() * 2048.0,
W: rand.Float32() * 10.0,
H: rand.Float32() * 20.0,
Q: 10,
Size: 100,
Score: 10,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(&marker)
face := entity.Face{
ID: randomSHA1(),
FaceSrc: entity.SrcImage,
FaceKind: 1,
FaceHidden: false,
SubjUID: subject.SubjUID,
Samples: 5,
SampleRadius: 0.35,
Collisions: 5,
CollisionRadius: 0.5,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(&face)
}
// Add to Album
albumSlug := fmt.Sprintf("my-photos-from-%04d", year)
album := entity.Album{}
if res := Db().Model(&entity.Album{}).Where("album_slug = ?", albumSlug).First(&album); res.RowsAffected == 0 {
album = entity.Album{
AlbumUID: rnd.GenerateUID(entity.AlbumUID),
AlbumSlug: albumSlug,
AlbumPath: "",
AlbumType: entity.AlbumManual,
AlbumTitle: fmt.Sprintf("My Photos From %04d", year),
AlbumLocation: "",
AlbumCategory: "",
AlbumCaption: "",
AlbumDescription: "A wonderful year",
AlbumNotes: "",
AlbumFilter: "",
AlbumOrder: "oldest",
AlbumTemplate: "",
AlbumCountry: entity.UnknownID,
AlbumYear: year,
AlbumMonth: 0,
AlbumDay: 0,
AlbumFavorite: false,
AlbumPrivate: false,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
DeletedAt: gorm.DeletedAt{},
}
Db().Create(&album)
}
photoAlbum := entity.PhotoAlbum{
PhotoUID: photo.PhotoUID,
AlbumUID: album.AlbumUID,
Order: 0,
Hidden: false,
Missing: false,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(photoAlbum)
details := entity.Details{
PhotoID: photo.ID,
Keywords: keywordStr,
KeywordsSrc: entity.SrcMeta,
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
Db().Create(details)
}
entity.File{}.RegenerateIndex()
entity.UpdateCounts()
log.Infof("Database Creation completed in %s", time.Since(start))
code := 0
os.Exit(code)
}

View file

@ -0,0 +1,909 @@
/*M!999999\- enable the sandbox mode */
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*M!100616 SET @OLD_NOTE_VERBOSITY=@@NOTE_VERBOSITY, NOTE_VERBOSITY=0 */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `albums` (
`id` int(10) unsigned NOT NULL,
`album_uid` varbinary(42) DEFAULT NULL,
`parent_uid` varbinary(42) DEFAULT '',
`album_slug` varbinary(160) DEFAULT NULL,
`album_path` varchar(1024) DEFAULT NULL,
`album_type` varbinary(8) DEFAULT 'album',
`album_title` varchar(160) DEFAULT NULL,
`album_location` varchar(160) DEFAULT NULL,
`album_category` varchar(100) DEFAULT NULL,
`album_caption` varchar(1024) DEFAULT NULL,
`album_description` varchar(2048) DEFAULT NULL,
`album_notes` varchar(1024) DEFAULT NULL,
`album_filter` varbinary(2048) DEFAULT '',
`album_order` varbinary(32) DEFAULT NULL,
`album_template` varbinary(255) DEFAULT NULL,
`album_state` varchar(100) DEFAULT NULL,
`album_country` varbinary(2) DEFAULT 'zz',
`album_year` int(11) DEFAULT NULL,
`album_month` int(11) DEFAULT NULL,
`album_day` int(11) DEFAULT NULL,
`album_favorite` tinyint(1) DEFAULT NULL,
`album_private` tinyint(1) DEFAULT NULL,
`thumb` varbinary(128) DEFAULT '',
`thumb_src` varbinary(8) DEFAULT '',
`created_by` varbinary(42) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_albums_album_uid` (`album_uid`),
KEY `idx_albums_album_state` (`album_state`),
KEY `idx_albums_ymd` (`album_day`),
KEY `idx_albums_thumb` (`thumb`),
KEY `idx_albums_deleted_at` (`deleted_at`),
KEY `idx_albums_album_slug` (`album_slug`),
KEY `idx_albums_album_title` (`album_title`),
KEY `idx_albums_album_category` (`album_category`),
KEY `idx_albums_country_year_month` (`album_country`,`album_year`,`album_month`),
KEY `idx_albums_created_by` (`created_by`),
KEY `idx_albums_published_at` (`published_at`),
KEY `idx_albums_album_path` (`album_path`(768)),
KEY `idx_albums_album_filter` (`album_filter`(512))
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `albums_users` (
`uid` varbinary(42) NOT NULL,
`user_uid` varbinary(42) NOT NULL,
`team_uid` varbinary(42) DEFAULT NULL,
`perm` int(10) unsigned DEFAULT NULL,
PRIMARY KEY (`uid`,`user_uid`),
KEY `idx_albums_users_user_uid` (`user_uid`),
KEY `idx_albums_users_team_uid` (`team_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_clients` (
`client_uid` varbinary(42) NOT NULL,
`user_uid` varbinary(42) DEFAULT '',
`user_name` varchar(200) DEFAULT NULL,
`client_name` varchar(200) DEFAULT NULL,
`client_role` varchar(64) DEFAULT '',
`client_type` varbinary(16) DEFAULT NULL,
`client_url` varbinary(255) DEFAULT '',
`callback_url` varbinary(255) DEFAULT '',
`auth_provider` varbinary(128) DEFAULT '',
`auth_method` varbinary(128) DEFAULT '',
`auth_scope` varchar(1024) DEFAULT '',
`auth_expires` bigint(20) DEFAULT NULL,
`auth_tokens` bigint(20) DEFAULT NULL,
`auth_enabled` tinyint(1) DEFAULT NULL,
`last_active` bigint(20) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`client_uid`),
KEY `idx_auth_clients_user_uid` (`user_uid`),
KEY `idx_auth_clients_user_name` (`user_name`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_sessions` (
`id` varbinary(2048) NOT NULL,
`user_uid` varbinary(42) DEFAULT '',
`user_name` varchar(200) DEFAULT NULL,
`client_uid` varbinary(42) DEFAULT '',
`client_name` varchar(200) DEFAULT '',
`client_ip` varchar(64) DEFAULT NULL,
`auth_provider` varbinary(128) DEFAULT '',
`auth_method` varbinary(128) DEFAULT '',
`auth_issuer` varbinary(255) DEFAULT '',
`auth_id` varbinary(255) DEFAULT '',
`auth_scope` varchar(1024) DEFAULT '',
`grant_type` varbinary(64) DEFAULT '',
`last_active` bigint(20) DEFAULT NULL,
`sess_expires` bigint(20) DEFAULT NULL,
`sess_timeout` bigint(20) DEFAULT NULL,
`preview_token` varbinary(64) DEFAULT '',
`download_token` varbinary(64) DEFAULT '',
`access_token` varbinary(4096) DEFAULT '',
`refresh_token` varbinary(2048) DEFAULT NULL,
`id_token` varbinary(2048) DEFAULT NULL,
`user_agent` varchar(512) DEFAULT NULL,
`data_json` varbinary(4096) DEFAULT NULL,
`ref_id` varbinary(16) DEFAULT '',
`login_ip` varchar(64) DEFAULT NULL,
`login_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_auth_sessions_user_uid` (`user_uid`),
KEY `idx_auth_sessions_user_name` (`user_name`),
KEY `idx_auth_sessions_client_uid` (`client_uid`),
KEY `idx_auth_sessions_client_ip` (`client_ip`),
KEY `idx_auth_sessions_auth_id` (`auth_id`),
KEY `idx_auth_sessions_sess_expires` (`sess_expires`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_users` (
`id` int(11) NOT NULL,
`user_uuid` varbinary(64) DEFAULT NULL,
`user_uid` varbinary(42) DEFAULT NULL,
`auth_provider` varbinary(128) DEFAULT '',
`auth_method` varbinary(128) DEFAULT '',
`auth_issuer` varbinary(255) DEFAULT '',
`auth_id` varbinary(255) DEFAULT '',
`user_name` varchar(200) DEFAULT NULL,
`display_name` varchar(200) DEFAULT NULL,
`user_email` varchar(255) DEFAULT NULL,
`backup_email` varchar(255) DEFAULT NULL,
`user_role` varchar(64) DEFAULT '',
`user_attr` varchar(1024) DEFAULT NULL,
`super_admin` tinyint(1) DEFAULT NULL,
`can_login` tinyint(1) DEFAULT NULL,
`login_at` datetime DEFAULT NULL,
`expires_at` datetime DEFAULT NULL,
`webdav` tinyint(1) DEFAULT NULL,
`base_path` varbinary(1024) DEFAULT NULL,
`upload_path` varbinary(1024) DEFAULT NULL,
`can_invite` tinyint(1) DEFAULT NULL,
`invite_token` varbinary(64) DEFAULT NULL,
`invited_by` varchar(64) DEFAULT NULL,
`verify_token` varbinary(64) DEFAULT NULL,
`verified_at` datetime DEFAULT NULL,
`consent_at` datetime DEFAULT NULL,
`born_at` datetime DEFAULT NULL,
`reset_token` varbinary(64) DEFAULT NULL,
`preview_token` varbinary(64) DEFAULT NULL,
`download_token` varbinary(64) DEFAULT NULL,
`thumb` varbinary(128) DEFAULT '',
`thumb_src` varbinary(8) DEFAULT '',
`ref_id` varbinary(16) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_auth_users_user_uid` (`user_uid`),
KEY `idx_auth_users_user_email` (`user_email`),
KEY `idx_auth_users_invite_token` (`invite_token`),
KEY `idx_auth_users_born_at` (`born_at`),
KEY `idx_auth_users_thumb` (`thumb`),
KEY `idx_auth_users_user_uuid` (`user_uuid`),
KEY `idx_auth_users_auth_id` (`auth_id`),
KEY `idx_auth_users_user_name` (`user_name`),
KEY `idx_auth_users_expires_at` (`expires_at`),
KEY `idx_auth_users_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_users_details` (
`user_uid` varbinary(42) NOT NULL,
`subj_uid` varbinary(42) DEFAULT NULL,
`subj_src` varbinary(8) DEFAULT '',
`place_id` varbinary(42) DEFAULT 'zz',
`place_src` varbinary(8) DEFAULT NULL,
`cell_id` varbinary(42) DEFAULT 'zz',
`birth_year` int(11) DEFAULT -1,
`birth_month` int(11) DEFAULT -1,
`birth_day` int(11) DEFAULT -1,
`name_title` varchar(32) DEFAULT NULL,
`given_name` varchar(64) DEFAULT NULL,
`middle_name` varchar(64) DEFAULT NULL,
`family_name` varchar(64) DEFAULT NULL,
`name_suffix` varchar(32) DEFAULT NULL,
`nick_name` varchar(64) DEFAULT NULL,
`name_src` varbinary(8) DEFAULT NULL,
`user_gender` varchar(16) DEFAULT NULL,
`user_about` varchar(512) DEFAULT NULL,
`user_bio` varchar(2048) DEFAULT NULL,
`user_location` varchar(512) DEFAULT NULL,
`user_country` varbinary(2) DEFAULT 'zz',
`user_phone` varchar(32) DEFAULT NULL,
`site_url` varbinary(512) DEFAULT NULL,
`profile_url` varbinary(512) DEFAULT NULL,
`feed_url` varbinary(512) DEFAULT NULL,
`avatar_url` varbinary(512) DEFAULT NULL,
`org_title` varchar(64) DEFAULT NULL,
`org_name` varchar(128) DEFAULT NULL,
`org_email` varchar(255) DEFAULT NULL,
`org_phone` varchar(32) DEFAULT NULL,
`org_url` varbinary(512) DEFAULT NULL,
`id_url` varbinary(512) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`user_uid`),
KEY `idx_auth_users_details_org_email` (`org_email`),
KEY `idx_auth_users_details_subj_uid` (`subj_uid`),
KEY `idx_auth_users_details_place_id` (`place_id`),
KEY `idx_auth_users_details_cell_id` (`cell_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_users_settings` (
`user_uid` varbinary(42) NOT NULL,
`ui_theme` varbinary(32) DEFAULT NULL,
`ui_language` varbinary(32) DEFAULT NULL,
`ui_time_zone` varbinary(64) DEFAULT NULL,
`maps_style` varbinary(32) DEFAULT NULL,
`maps_animate` int(11) DEFAULT 0,
`index_path` varbinary(1024) DEFAULT NULL,
`index_rescan` int(11) DEFAULT 0,
`import_path` varbinary(1024) DEFAULT NULL,
`import_move` int(11) DEFAULT 0,
`download_originals` int(11) DEFAULT 0,
`download_media_raw` int(11) DEFAULT 0,
`download_media_sidecar` int(11) DEFAULT 0,
`upload_path` varbinary(1024) DEFAULT NULL,
`default_page` varbinary(128) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`user_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `auth_users_shares` (
`user_uid` varbinary(42) NOT NULL,
`share_uid` varbinary(42) NOT NULL,
`link_uid` varbinary(42) DEFAULT NULL,
`expires_at` datetime DEFAULT NULL,
`comment` varchar(512) DEFAULT NULL,
`perm` int(10) unsigned DEFAULT NULL,
`ref_id` varbinary(16) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`user_uid`,`share_uid`),
KEY `idx_auth_users_shares_share_uid` (`share_uid`),
KEY `idx_auth_users_shares_expires_at` (`expires_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `cameras` (
`id` int(10) unsigned NOT NULL,
`camera_slug` varbinary(160) DEFAULT NULL,
`camera_name` varchar(160) DEFAULT NULL,
`camera_make` varchar(160) DEFAULT NULL,
`camera_model` varchar(160) DEFAULT NULL,
`camera_type` varchar(100) DEFAULT NULL,
`camera_description` varchar(2048) DEFAULT NULL,
`camera_notes` varchar(1024) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_cameras_camera_slug` (`camera_slug`),
KEY `idx_cameras_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `categories` (
`label_id` int(10) unsigned NOT NULL,
`category_id` int(10) unsigned NOT NULL,
PRIMARY KEY (`label_id`,`category_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `cells` (
`id` varbinary(42) NOT NULL,
`cell_name` varchar(200) DEFAULT NULL,
`cell_street` varchar(100) DEFAULT NULL,
`cell_postcode` varchar(50) DEFAULT NULL,
`cell_category` varchar(50) DEFAULT NULL,
`place_id` varbinary(42) DEFAULT 'zz',
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `countries` (
`id` varbinary(2) NOT NULL,
`country_slug` varbinary(160) DEFAULT NULL,
`country_name` varchar(160) DEFAULT NULL,
`country_description` varchar(2048) DEFAULT NULL,
`country_notes` varchar(1024) DEFAULT NULL,
`country_photo_id` int(10) unsigned DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_countries_country_slug` (`country_slug`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `details` (
`photo_id` int(10) unsigned NOT NULL,
`keywords` varchar(2048) DEFAULT NULL,
`keywords_src` varbinary(8) DEFAULT NULL,
`notes` varchar(2048) DEFAULT NULL,
`notes_src` varbinary(8) DEFAULT NULL,
`subject` varchar(1024) DEFAULT NULL,
`subject_src` varbinary(8) DEFAULT NULL,
`artist` varchar(1024) DEFAULT NULL,
`artist_src` varbinary(8) DEFAULT NULL,
`copyright` varchar(1024) DEFAULT NULL,
`copyright_src` varbinary(8) DEFAULT NULL,
`license` varchar(1024) DEFAULT NULL,
`license_src` varbinary(8) DEFAULT NULL,
`software` varchar(1024) DEFAULT NULL,
`software_src` varbinary(8) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`photo_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `duplicates` (
`file_name` varbinary(755) NOT NULL,
`file_root` varbinary(16) NOT NULL DEFAULT '/',
`file_hash` varbinary(128) DEFAULT '',
`file_size` bigint(20) DEFAULT NULL,
`mod_time` bigint(20) DEFAULT NULL,
PRIMARY KEY (`file_name`,`file_root`),
KEY `idx_duplicates_file_hash` (`file_hash`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `errors` (
`id` int(10) unsigned NOT NULL,
`error_time` datetime DEFAULT NULL,
`error_level` varbinary(32) DEFAULT NULL,
`error_message` varbinary(2048) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_errors_error_time` (`error_time`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `faces` (
`id` varbinary(64) NOT NULL,
`face_src` varbinary(8) DEFAULT NULL,
`face_kind` int(11) DEFAULT NULL,
`face_hidden` tinyint(1) DEFAULT NULL,
`subj_uid` varbinary(42) DEFAULT '',
`samples` int(11) DEFAULT NULL,
`sample_radius` double DEFAULT NULL,
`collisions` int(11) DEFAULT NULL,
`collision_radius` double DEFAULT NULL,
`embedding_json` mediumblob DEFAULT NULL,
`matched_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_faces_subj_uid` (`subj_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `files` (
`id` int(10) unsigned NOT NULL,
`photo_id` int(10) unsigned DEFAULT NULL,
`photo_uid` varbinary(42) DEFAULT NULL,
`photo_taken_at` datetime DEFAULT NULL,
`time_index` varbinary(64) DEFAULT NULL,
`media_id` varbinary(32) DEFAULT NULL,
`media_utc` bigint(20) DEFAULT NULL,
`instance_id` varbinary(64) DEFAULT NULL,
`file_uid` varbinary(42) DEFAULT NULL,
`file_name` varbinary(1024) DEFAULT NULL,
`file_root` varbinary(16) DEFAULT '/',
`original_name` varbinary(755) DEFAULT NULL,
`file_hash` varbinary(128) DEFAULT NULL,
`file_size` bigint(20) DEFAULT NULL,
`file_codec` varbinary(32) DEFAULT NULL,
`file_type` varbinary(16) DEFAULT NULL,
`media_type` varbinary(16) DEFAULT NULL,
`file_mime` varbinary(64) DEFAULT NULL,
`file_primary` tinyint(1) DEFAULT NULL,
`file_sidecar` tinyint(1) DEFAULT NULL,
`file_missing` tinyint(1) DEFAULT NULL,
`file_portrait` tinyint(1) DEFAULT NULL,
`file_video` tinyint(1) DEFAULT NULL,
`file_duration` bigint(20) DEFAULT NULL,
`file_fps` double DEFAULT NULL,
`file_frames` int(11) DEFAULT NULL,
`file_width` int(11) DEFAULT NULL,
`file_height` int(11) DEFAULT NULL,
`file_orientation` int(11) DEFAULT NULL,
`file_orientation_src` varbinary(8) DEFAULT '',
`file_projection` varbinary(64) DEFAULT NULL,
`file_aspect_ratio` float DEFAULT NULL,
`file_hdr` tinyint(1) DEFAULT NULL,
`file_watermark` tinyint(1) DEFAULT NULL,
`file_color_profile` varbinary(64) DEFAULT NULL,
`file_main_color` varbinary(16) DEFAULT NULL,
`file_colors` varbinary(18) DEFAULT NULL,
`File_luminance` varbinary(18) DEFAULT NULL,
`file_diff` int(11) DEFAULT -1,
`file_chroma` smallint(6) DEFAULT -1,
`file_software` varchar(64) DEFAULT NULL,
`file_error` varbinary(512) DEFAULT NULL,
`mod_time` bigint(20) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`created_in` bigint(20) DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`updated_in` bigint(20) DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_files_file_uid` (`file_uid`),
UNIQUE KEY `idx_files_name_root` (`file_name`,`file_root`),
UNIQUE KEY `idx_files_search_media` (`media_id`),
UNIQUE KEY `idx_files_search_timeline` (`time_index`),
KEY `idx_files_photo_id` (`photo_id`,`file_primary`),
KEY `idx_files_photo_taken_at` (`photo_taken_at`),
KEY `idx_files_file_error` (`file_error`),
KEY `idx_files_published_at` (`published_at`),
KEY `idx_files_deleted_at` (`deleted_at`),
KEY `idx_files_photo_uid` (`photo_uid`),
KEY `idx_files_media_utc` (`media_utc`),
KEY `idx_files_instance_id` (`instance_id`),
KEY `idx_files_file_hash` (`file_hash`),
KEY `idx_files_missing_root` (`file_missing`,`file_root`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `files_share` (
`file_id` int(10) unsigned NOT NULL,
`service_id` int(10) unsigned NOT NULL,
`remote_name` varbinary(255) NOT NULL,
`status` varbinary(16) DEFAULT NULL,
`error` varbinary(512) DEFAULT NULL,
`errors` int(11) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`file_id`,`service_id`,`remote_name`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `files_sync` (
`remote_name` varbinary(255) NOT NULL,
`service_id` int(10) unsigned NOT NULL,
`file_id` int(10) unsigned DEFAULT NULL,
`remote_date` datetime DEFAULT NULL,
`remote_size` bigint(20) DEFAULT NULL,
`status` varbinary(16) DEFAULT NULL,
`error` varbinary(512) DEFAULT NULL,
`errors` int(11) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`remote_name`,`service_id`),
KEY `idx_files_sync_file_id` (`file_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `folders` (
`path` varbinary(1024) DEFAULT NULL,
`root` varbinary(16) DEFAULT '',
`folder_uid` varbinary(42) NOT NULL,
`folder_type` varbinary(16) DEFAULT NULL,
`folder_title` varchar(200) DEFAULT NULL,
`folder_category` varchar(100) DEFAULT NULL,
`folder_description` varchar(2048) DEFAULT NULL,
`folder_order` varbinary(32) DEFAULT NULL,
`folder_country` varbinary(2) DEFAULT 'zz',
`folder_year` int(11) DEFAULT NULL,
`folder_month` int(11) DEFAULT NULL,
`folder_day` int(11) DEFAULT NULL,
`folder_favorite` tinyint(1) DEFAULT NULL,
`folder_private` tinyint(1) DEFAULT NULL,
`folder_ignore` tinyint(1) DEFAULT NULL,
`folder_watch` tinyint(1) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`modified_at` datetime DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`folder_uid`),
UNIQUE KEY `idx_folders_path_root` (`path`,`root`),
KEY `idx_folders_folder_category` (`folder_category`),
KEY `idx_folders_country_year_month` (`folder_country`,`folder_year`,`folder_month`),
KEY `idx_folders_published_at` (`published_at`),
KEY `idx_folders_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `keywords` (
`id` int(10) unsigned NOT NULL,
`keyword` varchar(64) DEFAULT NULL,
`skip` tinyint(1) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_keywords_keyword` (`keyword`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `labels` (
`id` int(10) unsigned NOT NULL,
`label_uid` varbinary(42) DEFAULT NULL,
`label_slug` varbinary(160) DEFAULT NULL,
`custom_slug` varbinary(160) DEFAULT NULL,
`label_name` varchar(160) DEFAULT NULL,
`label_priority` int(11) DEFAULT NULL,
`label_favorite` tinyint(1) DEFAULT NULL,
`label_description` varchar(2048) DEFAULT NULL,
`label_notes` varchar(1024) DEFAULT NULL,
`photo_count` int(11) DEFAULT 1,
`thumb` varbinary(128) DEFAULT '',
`thumb_src` varbinary(8) DEFAULT '',
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_labels_label_uid` (`label_uid`),
UNIQUE KEY `uix_labels_label_slug` (`label_slug`),
KEY `idx_labels_thumb` (`thumb`),
KEY `idx_labels_published_at` (`published_at`),
KEY `idx_labels_deleted_at` (`deleted_at`),
KEY `idx_labels_custom_slug` (`custom_slug`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `lenses` (
`id` int(10) unsigned NOT NULL,
`lens_slug` varbinary(160) DEFAULT NULL,
`lens_name` varchar(160) DEFAULT NULL,
`lens_make` varchar(160) DEFAULT NULL,
`lens_model` varchar(160) DEFAULT NULL,
`lens_type` varchar(100) DEFAULT NULL,
`lens_description` varchar(2048) DEFAULT NULL,
`lens_notes` varchar(1024) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_lenses_lens_slug` (`lens_slug`),
KEY `idx_lenses_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `links` (
`link_uid` varbinary(42) NOT NULL,
`share_uid` varbinary(42) DEFAULT NULL,
`share_slug` varbinary(160) DEFAULT NULL,
`link_token` varbinary(160) DEFAULT NULL,
`link_expires` int(11) DEFAULT NULL,
`link_views` int(10) unsigned DEFAULT NULL,
`max_views` int(10) unsigned DEFAULT NULL,
`has_password` tinyint(1) DEFAULT NULL,
`comment` varchar(512) DEFAULT NULL,
`perm` int(10) unsigned DEFAULT NULL,
`ref_id` varbinary(16) DEFAULT NULL,
`created_by` varbinary(42) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`modified_at` datetime DEFAULT NULL,
PRIMARY KEY (`link_uid`),
UNIQUE KEY `idx_links_uid_token` (`share_uid`,`link_token`),
KEY `idx_links_share_slug` (`share_slug`),
KEY `idx_links_created_by` (`created_by`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `markers` (
`marker_uid` varbinary(42) NOT NULL,
`file_uid` varbinary(42) DEFAULT '',
`marker_type` varbinary(8) DEFAULT '',
`marker_src` varbinary(8) DEFAULT '',
`marker_name` varchar(160) DEFAULT NULL,
`marker_review` tinyint(1) DEFAULT NULL,
`marker_invalid` tinyint(1) DEFAULT NULL,
`subj_uid` varbinary(42) DEFAULT NULL,
`subj_src` varbinary(8) DEFAULT '',
`face_id` varbinary(64) DEFAULT NULL,
`face_dist` double DEFAULT -1,
`embeddings_json` mediumblob DEFAULT NULL,
`landmarks_json` mediumblob DEFAULT NULL,
`x` float DEFAULT NULL,
`y` float DEFAULT NULL,
`w` float DEFAULT NULL,
`h` float DEFAULT NULL,
`q` int(11) DEFAULT NULL,
`size` int(11) DEFAULT -1,
`score` smallint(6) DEFAULT NULL,
`thumb` varbinary(128) DEFAULT '',
`matched_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`marker_uid`),
KEY `idx_markers_file_uid` (`file_uid`),
KEY `idx_markers_subj_uid_src` (`subj_uid`,`subj_src`),
KEY `idx_markers_face_id` (`face_id`),
KEY `idx_markers_thumb` (`thumb`),
KEY `idx_markers_matched_at` (`matched_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `migrations` (
`id` varchar(16) NOT NULL,
`dialect` varchar(16) DEFAULT NULL,
`stage` varchar(16) DEFAULT NULL,
`error` varchar(255) DEFAULT NULL,
`source` varchar(16) DEFAULT NULL,
`started_at` datetime DEFAULT NULL,
`finished_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `passcodes` (
`uid` varbinary(255) NOT NULL,
`key_type` varchar(64) NOT NULL DEFAULT '',
`key_url` varchar(2048) DEFAULT '',
`recovery_code` varchar(255) DEFAULT '',
`verified_at` datetime DEFAULT NULL,
`activated_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`uid`,`key_type`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `passwords` (
`uid` varbinary(255) NOT NULL,
`hash` varbinary(255) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos` (
`id` int(10) unsigned NOT NULL,
`uuid` varbinary(64) DEFAULT NULL,
`taken_at` datetime DEFAULT NULL,
`taken_at_local` datetime DEFAULT NULL,
`taken_src` varbinary(8) DEFAULT NULL,
`photo_uid` varbinary(42) DEFAULT NULL,
`photo_type` varbinary(8) DEFAULT 'image',
`type_src` varbinary(8) DEFAULT NULL,
`photo_title` varchar(200) DEFAULT NULL,
`title_src` varbinary(8) DEFAULT NULL,
`photo_caption` varchar(4096) DEFAULT NULL,
`caption_src` varbinary(8) DEFAULT NULL,
`photo_path` varbinary(1024) DEFAULT NULL,
`photo_name` varbinary(255) DEFAULT NULL,
`original_name` varbinary(755) DEFAULT NULL,
`photo_stack` tinyint(4) DEFAULT NULL,
`photo_favorite` tinyint(1) DEFAULT NULL,
`photo_private` tinyint(1) DEFAULT NULL,
`photo_scan` tinyint(1) DEFAULT NULL,
`photo_panorama` tinyint(1) DEFAULT NULL,
`time_zone` varbinary(64) DEFAULT NULL,
`place_id` varbinary(42) DEFAULT 'zz',
`place_src` varbinary(8) DEFAULT NULL,
`cell_id` varbinary(42) DEFAULT 'zz',
`cell_accuracy` int(11) DEFAULT NULL,
`photo_altitude` int(11) DEFAULT NULL,
`photo_lat` double DEFAULT NULL,
`photo_lng` double DEFAULT NULL,
`photo_country` varbinary(2) DEFAULT 'zz',
`photo_year` int(11) DEFAULT NULL,
`photo_month` int(11) DEFAULT NULL,
`photo_day` int(11) DEFAULT NULL,
`photo_iso` int(11) DEFAULT NULL,
`photo_exposure` varbinary(64) DEFAULT NULL,
`photo_f_number` float DEFAULT NULL,
`photo_focal_length` int(11) DEFAULT NULL,
`photo_quality` smallint(6) DEFAULT NULL,
`photo_faces` int(11) DEFAULT NULL,
`photo_resolution` smallint(6) DEFAULT NULL,
`photo_duration` bigint(20) DEFAULT NULL,
`photo_color` smallint(6) DEFAULT -1,
`camera_id` int(10) unsigned DEFAULT 1,
`camera_serial` varbinary(160) DEFAULT NULL,
`camera_src` varbinary(8) DEFAULT NULL,
`lens_id` int(10) unsigned DEFAULT 1,
`created_by` varbinary(42) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`edited_at` datetime DEFAULT NULL,
`published_at` datetime DEFAULT NULL,
`checked_at` datetime DEFAULT NULL,
`estimated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uix_photos_photo_uid` (`photo_uid`),
KEY `idx_photos_created_by` (`created_by`),
KEY `idx_photos_cell_id` (`cell_id`),
KEY `idx_photos_camera_lens` (`camera_id`,`lens_id`),
KEY `idx_photos_checked_at` (`checked_at`),
KEY `idx_photos_photo_lng` (`photo_lng`),
KEY `idx_photos_published_at` (`published_at`),
KEY `idx_photos_deleted_at` (`deleted_at`),
KEY `idx_photos_uuid` (`uuid`),
KEY `idx_photos_photo_lat` (`photo_lat`),
KEY `idx_photos_place_id` (`place_id`),
KEY `idx_photos_country_year_month` (`photo_country`,`photo_year`,`photo_month`),
KEY `idx_photos_ymd` (`photo_day`),
KEY `idx_photos_taken_uid` (`taken_at`,`photo_uid`),
KEY `idx_photos_path_name` (`photo_path`,`photo_name`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos_albums` (
`photo_uid` varbinary(42) NOT NULL,
`album_uid` varbinary(42) NOT NULL,
`order` int(11) DEFAULT NULL,
`hidden` tinyint(1) DEFAULT NULL,
`missing` tinyint(1) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`photo_uid`,`album_uid`),
KEY `idx_photos_albums_album_uid` (`album_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos_keywords` (
`photo_id` int(10) unsigned NOT NULL,
`keyword_id` int(10) unsigned NOT NULL,
PRIMARY KEY (`photo_id`,`keyword_id`),
KEY `idx_photos_keywords_keyword_id` (`keyword_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos_labels` (
`photo_id` int(10) unsigned NOT NULL,
`label_id` int(10) unsigned NOT NULL,
`label_src` varbinary(8) DEFAULT NULL,
`uncertainty` smallint(6) DEFAULT NULL,
PRIMARY KEY (`photo_id`,`label_id`),
KEY `idx_photos_labels_label_id` (`label_id`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `photos_users` (
`uid` varbinary(42) NOT NULL,
`user_uid` varbinary(42) NOT NULL,
`team_uid` varbinary(42) DEFAULT NULL,
`perm` int(10) unsigned DEFAULT NULL,
PRIMARY KEY (`uid`,`user_uid`),
KEY `idx_photos_users_user_uid` (`user_uid`),
KEY `idx_photos_users_team_uid` (`team_uid`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `places` (
`id` varbinary(42) NOT NULL,
`place_label` varchar(400) DEFAULT NULL,
`place_district` varchar(100) DEFAULT NULL,
`place_city` varchar(100) DEFAULT NULL,
`place_state` varchar(100) DEFAULT NULL,
`place_country` varbinary(2) DEFAULT NULL,
`place_keywords` varchar(300) DEFAULT NULL,
`place_favorite` tinyint(1) DEFAULT NULL,
`photo_count` int(11) DEFAULT 1,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_places_place_district` (`place_district`),
KEY `idx_places_place_city` (`place_city`),
KEY `idx_places_place_state` (`place_state`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `reactions` (
`uid` varbinary(42) NOT NULL,
`user_uid` varbinary(42) NOT NULL,
`reaction` varbinary(64) NOT NULL,
`reacted` int(11) DEFAULT NULL,
`reacted_at` datetime DEFAULT NULL,
PRIMARY KEY (`uid`,`user_uid`,`reaction`),
KEY `idx_reactions_reacted_at` (`reacted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `services` (
`id` int(10) unsigned NOT NULL,
`acc_name` varchar(160) DEFAULT NULL,
`acc_owner` varchar(160) DEFAULT NULL,
`acc_url` varchar(255) DEFAULT NULL,
`acc_type` varbinary(255) DEFAULT NULL,
`acc_key` varbinary(255) DEFAULT NULL,
`acc_user` varbinary(255) DEFAULT NULL,
`acc_pass` varbinary(255) DEFAULT NULL,
`acc_timeout` varbinary(16) DEFAULT NULL,
`acc_error` varbinary(512) DEFAULT NULL,
`acc_errors` int(11) DEFAULT NULL,
`acc_share` tinyint(1) DEFAULT NULL,
`acc_sync` tinyint(1) DEFAULT NULL,
`retry_limit` int(11) DEFAULT NULL,
`share_path` varbinary(1024) DEFAULT NULL,
`share_size` varbinary(16) DEFAULT NULL,
`share_expires` int(11) DEFAULT NULL,
`sync_path` varbinary(1024) DEFAULT NULL,
`sync_status` varbinary(16) DEFAULT NULL,
`sync_interval` int(11) DEFAULT NULL,
`sync_date` datetime DEFAULT NULL,
`sync_upload` tinyint(1) DEFAULT NULL,
`sync_download` tinyint(1) DEFAULT NULL,
`sync_filenames` tinyint(1) DEFAULT NULL,
`sync_raw` tinyint(1) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_services_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `subjects` (
`subj_uid` varbinary(42) NOT NULL,
`subj_type` varbinary(8) DEFAULT '',
`subj_src` varbinary(8) DEFAULT '',
`subj_slug` varbinary(160) DEFAULT '',
`subj_name` varchar(160) DEFAULT '',
`subj_alias` varchar(160) DEFAULT '',
`subj_about` varchar(512) DEFAULT NULL,
`subj_bio` varchar(2048) DEFAULT NULL,
`subj_notes` varchar(1024) DEFAULT NULL,
`subj_favorite` tinyint(1) DEFAULT 0,
`subj_hidden` tinyint(1) DEFAULT 0,
`subj_private` tinyint(1) DEFAULT 0,
`subj_excluded` tinyint(1) DEFAULT 0,
`file_count` int(11) DEFAULT 0,
`photo_count` int(11) DEFAULT 0,
`thumb` varbinary(128) DEFAULT '',
`thumb_src` varbinary(8) DEFAULT '',
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`subj_uid`),
UNIQUE KEY `uix_subjects_subj_name` (`subj_name`),
KEY `idx_subjects_subj_slug` (`subj_slug`),
KEY `idx_subjects_thumb` (`thumb`),
KEY `idx_subjects_deleted_at` (`deleted_at`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `versions` (
`id` int(10) unsigned NOT NULL,
`version` varchar(255) DEFAULT NULL,
`edition` varchar(255) DEFAULT NULL,
`error` varchar(255) DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`migrated_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `idx_version_edition` (`version`,`edition`)
);
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*M!100616 SET NOTE_VERBOSITY=@OLD_NOTE_VERBOSITY */;

View file

@ -0,0 +1,158 @@
CREATE TABLE IF NOT EXISTS "files_share" ("file_id" integer,"service_id" integer,"remote_name" VARBINARY(255),"status" VARBINARY(16),"error" VARBINARY(512),"errors" integer,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("file_id","service_id","remote_name"));
CREATE TABLE IF NOT EXISTS "photos_labels" ("photo_id" integer,"label_id" integer,"label_src" VARBINARY(8),"uncertainty" SMALLINT , PRIMARY KEY ("photo_id","label_id"));
CREATE INDEX idx_photos_labels_label_id ON "photos_labels"(label_id) ;
CREATE TABLE IF NOT EXISTS "services" ("id" integer primary key autoincrement,"acc_name" VARCHAR(255),"acc_owner" VARCHAR(255),"acc_url" VARBINARY(512),"acc_type" VARBINARY(255),"acc_key" VARBINARY(255),"acc_user" VARBINARY(255),"acc_pass" VARBINARY(255),"acc_error" VARBINARY(512),"acc_errors" integer,"acc_share" bool,"acc_sync" bool,"retry_limit" integer,"share_path" VARBINARY(500),"share_size" VARBINARY(16),"share_expires" integer,"sync_path" VARBINARY(500),"sync_status" VARBINARY(16),"sync_interval" integer,"sync_date" datetime,"sync_upload" bool,"sync_download" bool,"sync_filenames" bool,"sync_raw" bool,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "acc_timeout" VARBINARY(16));
CREATE INDEX idx_accounts_deleted_at ON "services"(deleted_at) ;
CREATE TABLE IF NOT EXISTS "photos" ("id" integer primary key autoincrement,"uuid" VARBINARY(42),"taken_at" datetime,"taken_at_local" datetime,"taken_src" VARBINARY(8),"photo_uid" VARBINARY(42),"photo_type" VARBINARY(8) DEFAULT 'image',"type_src" VARBINARY(8),"photo_title" VARCHAR(255),"title_src" VARBINARY(8),"photo_description" TEXT,"description_src" VARBINARY(8),"photo_path" VARBINARY(500),"photo_name" VARBINARY(255),"original_name" VARBINARY(755),"photo_stack" integer,"photo_favorite" bool,"photo_private" bool,"photo_scan" bool,"photo_panorama" bool,"time_zone" VARBINARY(64),"place_id" VARBINARY(42) DEFAULT 'zz',"place_src" VARBINARY(8),"cell_id" VARBINARY(42) DEFAULT 'zz',"cell_accuracy" integer,"photo_altitude" integer,"photo_lat" FLOAT,"photo_lng" FLOAT,"photo_country" VARBINARY(2) DEFAULT 'zz',"photo_year" integer,"photo_month" integer,"photo_day" integer,"photo_iso" integer,"photo_exposure" VARBINARY(64),"photo_f_number" FLOAT,"photo_focal_length" integer,"photo_quality" SMALLINT,"photo_resolution" SMALLINT,"photo_color" integer,"camera_id" integer DEFAULT 1,"camera_serial" VARBINARY(255),"camera_src" VARBINARY(8),"lens_id" integer DEFAULT 1,"created_at" datetime,"updated_at" datetime,"edited_at" datetime,"checked_at" datetime,"deleted_at" datetime , "photo_faces" integer, "estimated_at" datetime, "photo_duration" bigint, "created_by" VARBINARY(42), "published_at" datetime);
CREATE INDEX idx_photos_taken_uid ON "photos"(taken_at, photo_uid) ;
CREATE INDEX idx_photos_cell_id ON "photos"(cell_id) ;
CREATE INDEX idx_photos_photo_lat ON "photos"(photo_lat) ;
CREATE INDEX idx_photos_photo_lng ON "photos"(photo_lng) ;
CREATE INDEX idx_photos_country_year_month ON "photos"(photo_country, photo_year, photo_month) ;
CREATE INDEX idx_photos_checked_at ON "photos"(checked_at) ;
CREATE INDEX idx_photos_deleted_at ON "photos"(deleted_at) ;
CREATE INDEX idx_photos_uuid ON "photos"("uuid") ;
CREATE INDEX idx_photos_path_name ON "photos"(photo_path, photo_name) ;
CREATE INDEX idx_photos_place_id ON "photos"(place_id) ;
CREATE INDEX idx_photos_camera_lens ON "photos"(camera_id, lens_id) ;
CREATE UNIQUE INDEX uix_photos_photo_uid ON "photos"(photo_uid) ;
CREATE TABLE IF NOT EXISTS "details" ("photo_id" integer,"keywords" TEXT,"keywords_src" VARBINARY(8),"notes" TEXT,"notes_src" VARBINARY(8),"subject" VARCHAR(255),"subject_src" VARBINARY(8),"artist" VARCHAR(255),"artist_src" VARBINARY(8),"copyright" VARCHAR(255),"copyright_src" VARBINARY(8),"license" VARCHAR(255),"license_src" VARBINARY(8),"created_at" datetime,"updated_at" datetime , "software" VARCHAR(1024), "software_src" VARBINARY(8), PRIMARY KEY ("photo_id"));
CREATE TABLE IF NOT EXISTS "lenses" ("id" integer primary key autoincrement,"lens_slug" VARBINARY(255),"lens_name" VARCHAR(255),"lens_make" VARCHAR(255),"lens_model" VARCHAR(255),"lens_type" VARCHAR(255),"lens_description" TEXT,"lens_notes" TEXT,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime );
CREATE INDEX idx_lenses_deleted_at ON "lenses"(deleted_at) ;
CREATE UNIQUE INDEX uix_lenses_lens_slug ON "lenses"(lens_slug) ;
CREATE TABLE IF NOT EXISTS "countries" ("id" VARBINARY(2),"country_slug" VARBINARY(255),"country_name" varchar(255),"country_description" TEXT,"country_notes" TEXT,"country_photo_id" integer , PRIMARY KEY ("id"));
CREATE UNIQUE INDEX uix_countries_country_slug ON "countries"(country_slug) ;
CREATE TABLE IF NOT EXISTS "photos_albums" ("photo_uid" VARBINARY(42),"album_uid" VARBINARY(42),"order" integer,"hidden" bool,"missing" bool,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("photo_uid","album_uid"));
CREATE INDEX idx_photos_albums_album_uid ON "photos_albums"(album_uid) ;
CREATE TABLE IF NOT EXISTS "categories" ("label_id" integer,"category_id" integer, PRIMARY KEY ("label_id","category_id"));
CREATE TABLE IF NOT EXISTS "labels" ("id" integer primary key autoincrement,"label_uid" VARBINARY(42),"label_slug" VARBINARY(255),"custom_slug" VARBINARY(255),"label_name" VARCHAR(255),"label_priority" integer,"label_favorite" bool,"label_description" TEXT,"label_notes" TEXT,"photo_count" integer DEFAULT 1,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "thumb" VARBINARY(128) DEFAULT '', "thumb_src" VARBINARY(8) DEFAULT '', "published_at" datetime);
CREATE INDEX idx_labels_custom_slug ON "labels"(custom_slug) ;
CREATE INDEX idx_labels_deleted_at ON "labels"(deleted_at) ;
CREATE UNIQUE INDEX uix_labels_label_uid ON "labels"(label_uid) ;
CREATE UNIQUE INDEX uix_labels_label_slug ON "labels"(label_slug) ;
CREATE TABLE IF NOT EXISTS "photos_keywords" ("photo_id" integer,"keyword_id" integer , PRIMARY KEY ("photo_id","keyword_id"));
CREATE INDEX idx_photos_keywords_keyword_id ON "photos_keywords"(keyword_id) ;
CREATE TABLE IF NOT EXISTS "passwords" ("uid" VARBINARY(255),"hash" VARBINARY(255),"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("uid"));
CREATE TABLE IF NOT EXISTS "duplicates" ("file_name" VARBINARY(755),"file_root" VARBINARY(16) DEFAULT '/',"file_hash" VARBINARY(128) DEFAULT '',"file_size" bigint,"mod_time" bigint , PRIMARY KEY ("file_name","file_root"));
CREATE INDEX idx_duplicates_file_hash ON "duplicates"(file_hash) ;
CREATE TABLE IF NOT EXISTS "places" ("id" VARBINARY(42),"place_label" VARBINARY(755),"place_city" VARCHAR(255),"place_state" VARCHAR(255),"place_country" VARBINARY(2),"place_keywords" VARCHAR(255),"place_favorite" bool,"photo_count" integer DEFAULT 1,"created_at" datetime,"updated_at" datetime , "place_district" VARCHAR(100), PRIMARY KEY ("id"));
CREATE TABLE IF NOT EXISTS "cameras" ("id" integer primary key autoincrement,"camera_slug" VARBINARY(255),"camera_name" VARCHAR(255),"camera_make" VARCHAR(255),"camera_model" VARCHAR(255),"camera_type" VARCHAR(255),"camera_description" TEXT,"camera_notes" TEXT,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime );
CREATE INDEX idx_cameras_deleted_at ON "cameras"(deleted_at) ;
CREATE UNIQUE INDEX uix_cameras_camera_slug ON "cameras"(camera_slug) ;
CREATE TABLE IF NOT EXISTS "keywords" ("id" integer primary key autoincrement,"keyword" VARCHAR(64),"skip" bool );
CREATE INDEX idx_keywords_keyword ON "keywords"("keyword") ;
CREATE TABLE IF NOT EXISTS "folders" ("path" VARBINARY(500),"root" VARBINARY(16) DEFAULT '',"folder_uid" VARBINARY(42),"folder_type" VARBINARY(16),"folder_title" VARCHAR(255),"folder_category" VARCHAR(255),"folder_description" TEXT,"folder_order" VARBINARY(32),"folder_country" VARBINARY(2) DEFAULT 'zz',"folder_year" integer,"folder_month" integer,"folder_day" integer,"folder_favorite" bool,"folder_private" bool,"folder_ignore" bool,"folder_watch" bool,"created_at" datetime,"updated_at" datetime,"modified_at" datetime,"deleted_at" datetime , "published_at" datetime, PRIMARY KEY ("folder_uid"));
CREATE INDEX idx_folders_folder_category ON "folders"(folder_category) ;
CREATE INDEX idx_folders_country_year_month ON "folders"(folder_country, folder_year, folder_month) ;
CREATE INDEX idx_folders_deleted_at ON "folders"(deleted_at) ;
CREATE UNIQUE INDEX idx_folders_path_root ON "folders"("path", "root") ;
CREATE TABLE IF NOT EXISTS "users" ("id" integer primary key autoincrement,"address_id" integer DEFAULT 1,"user_uid" VARBINARY(42),"mother_uid" VARBINARY(42),"father_uid" VARBINARY(42),"global_uid" VARBINARY(42),"full_name" varchar(128),"nick_name" varchar(64),"maiden_name" varchar(64),"artist_name" varchar(64),"user_name" varchar(64),"user_status" varchar(32),"user_disabled" bool,"user_settings" LONGTEXT,"primary_email" varchar(255),"email_confirmed" bool,"backup_email" varchar(255),"person_url" VARBINARY(255),"person_phone" varchar(32),"person_status" varchar(32),"person_avatar" VARBINARY(255),"person_location" varchar(128),"person_bio" TEXT,"person_accounts" LONGTEXT,"business_url" VARBINARY(255),"business_phone" varchar(32),"business_email" varchar(255),"company_name" varchar(128),"department_name" varchar(128),"job_title" varchar(64),"birth_year" integer,"birth_month" integer,"birth_day" integer,"terms_accepted" bool,"is_artist" bool,"is_subject" bool,"role_admin" bool,"role_guest" bool,"role_child" bool,"role_family" bool,"role_friend" bool,"webdav" bool,"storage_path" VARBINARY(500),"can_invite" bool,"invite_token" VARBINARY(32),"invited_by" VARBINARY(32),"confirm_token" VARBINARY(64),"reset_token" VARBINARY(64),"api_token" VARBINARY(128),"api_secret" VARBINARY(128),"login_attempts" integer,"login_at" datetime,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "external_id" varchar(255));
CREATE INDEX idx_users_deleted_at ON "users"(deleted_at) ;
CREATE INDEX idx_users_global_uid ON "users"(global_uid) ;
CREATE INDEX idx_users_primary_email ON "users"(primary_email) ;
CREATE UNIQUE INDEX uix_users_user_uid ON "users"(user_uid) ;
CREATE TABLE IF NOT EXISTS "files" ("id" integer primary key autoincrement,"photo_id" integer,"photo_uid" VARBINARY(42),"instance_id" VARBINARY(42),"file_uid" VARBINARY(42),"file_name" VARBINARY(755),"file_root" VARBINARY(16) DEFAULT '/',"original_name" VARBINARY(755),"file_hash" VARBINARY(128),"file_size" bigint,"file_codec" VARBINARY(32),"file_type" VARBINARY(32),"file_mime" VARBINARY(64),"file_primary" bool,"file_sidecar" bool,"file_missing" bool,"file_portrait" bool,"file_video" bool,"file_duration" bigint,"file_width" integer,"file_height" integer,"file_orientation" integer,"file_projection" VARBINARY(16),"file_aspect_ratio" FLOAT,"file_main_color" VARBINARY(16),"file_colors" VARBINARY(9),"file_luminance" VARBINARY(9),"file_diff" integer,"file_chroma" integer,"file_error" VARBINARY(512),"mod_time" bigint,"created_at" datetime,"created_in" bigint,"updated_at" datetime,"updated_in" bigint,"deleted_at" datetime , "photo_taken_at" DATETIME, "time_index" VARBINARY(48), "media_id" VARBINARY(32), "media_utc" bigint, "media_type" VARBINARY(16), "file_fps" real, "file_frames" integer, "file_hdr" bool, "file_watermark" bool, "file_color_profile" VARBINARY(64), "file_software" VARCHAR(64), "published_at" datetime, "file_orientation_src" VARBINARY(8) DEFAULT '');
CREATE INDEX idx_files_instance_id ON "files"(instance_id) ;
CREATE INDEX idx_files_file_hash ON "files"(file_hash) ;
CREATE INDEX idx_files_file_main_color ON "files"(file_main_color) ;
CREATE INDEX idx_files_deleted_at ON "files"(deleted_at) ;
CREATE INDEX idx_files_photo_id ON "files"(photo_id) ;
CREATE INDEX idx_files_photo_uid ON "files"(photo_uid) ;
CREATE UNIQUE INDEX uix_files_file_uid ON "files"(file_uid) ;
CREATE UNIQUE INDEX idx_files_name_root ON "files"(file_name, file_root) ;
CREATE TABLE IF NOT EXISTS "files_sync" ("remote_name" VARBINARY(255),"service_id" integer,"file_id" integer,"remote_date" datetime,"remote_size" bigint,"status" VARBINARY(16),"error" VARBINARY(512),"errors" integer,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("remote_name","service_id"));
CREATE INDEX idx_files_sync_file_id ON "files_sync"(file_id) ;
CREATE TABLE IF NOT EXISTS "cells" ("id" VARBINARY(42),"cell_name" VARCHAR(255),"cell_category" VARCHAR(64),"place_id" VARBINARY(42) DEFAULT 'zz',"created_at" datetime,"updated_at" datetime , "cell_street" VARCHAR(100), "cell_postcode" VARCHAR(50), PRIMARY KEY ("id"));
CREATE TABLE IF NOT EXISTS "albums" ("id" integer primary key autoincrement,"album_uid" VARBINARY(42),"cover_uid" VARBINARY(42),"folder_uid" VARBINARY(42),"album_slug" VARBINARY(255),"album_path" VARBINARY(500),"album_type" VARBINARY(8) DEFAULT 'album',"album_title" VARCHAR(255),"album_location" VARCHAR(255),"album_category" VARCHAR(255),"album_caption" TEXT,"album_description" TEXT,"album_notes" TEXT,"album_filter" VARBINARY(1024),"album_order" VARBINARY(32),"album_template" VARBINARY(255),"album_country" VARBINARY(2) DEFAULT 'zz',"album_year" integer,"album_month" integer,"album_day" integer,"album_favorite" bool,"album_private" bool,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "parent_uid" VARBINARY(42) DEFAULT '', "thumb" VARBINARY(128) DEFAULT '', "thumb_src" VARBINARY(8) DEFAULT '', "album_state" VARCHAR(100), "created_by" VARBINARY(42), "published_at" datetime);
CREATE INDEX idx_albums_album_category ON "albums"(album_category) ;
CREATE INDEX idx_albums_country_year_month ON "albums"(album_country, album_year, album_month) ;
CREATE INDEX idx_albums_deleted_at ON "albums"(deleted_at) ;
CREATE INDEX idx_albums_folder_uid ON "albums"(folder_uid) ;
CREATE INDEX idx_albums_album_slug ON "albums"(album_slug) ;
CREATE INDEX idx_albums_album_path ON "albums"(album_path) ;
CREATE UNIQUE INDEX uix_albums_album_uid ON "albums"(album_uid) ;
CREATE TABLE IF NOT EXISTS "links" ("link_uid" VARBINARY(42),"share_uid" VARBINARY(42),"share_slug" VARBINARY(255),"link_token" VARBINARY(255),"link_expires" integer,"link_views" integer,"max_views" integer,"has_password" bool,"can_comment" bool,"can_edit" bool,"created_at" datetime,"modified_at" datetime , "comment" varchar(512), "perm" integer, "ref_id" VARBINARY(16), "created_by" VARBINARY(42), PRIMARY KEY ("link_uid"));
CREATE INDEX idx_links_share_slug ON "links"(share_slug) ;
CREATE UNIQUE INDEX idx_links_uid_token ON "links"(share_uid, link_token) ;
CREATE TABLE IF NOT EXISTS "errors" ("id" integer primary key autoincrement,"error_time" datetime,"error_level" VARBINARY(32),"error_message" VARBINARY(2048) );
CREATE INDEX idx_errors_error_time ON "errors"(error_time) ;
CREATE INDEX idx_labels_thumb ON "labels"("thumb") ;
CREATE TABLE IF NOT EXISTS "markers" ("marker_uid" VARBINARY(42),"file_uid" VARBINARY(42) DEFAULT '',"marker_type" VARBINARY(8) DEFAULT '',"marker_src" VARBINARY(8) DEFAULT '',"marker_name" VARCHAR(255),"marker_review" bool,"marker_invalid" bool,"subj_uid" VARBINARY(42),"subj_src" VARBINARY(8) DEFAULT '',"face_id" VARBINARY(42),"face_dist" real DEFAULT -1,"embeddings_json" MEDIUMBLOB,"landmarks_json" MEDIUMBLOB,"x" FLOAT,"y" FLOAT,"w" FLOAT,"h" FLOAT,"q" integer,"size" integer DEFAULT -1,"score" SMALLINT,"thumb" VARBINARY(128) DEFAULT '',"matched_at" datetime,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("marker_uid"));
CREATE INDEX idx_markers_subj_uid_src ON "markers"(subj_uid, subj_src) ;
CREATE INDEX idx_markers_face_id ON "markers"(face_id) ;
CREATE INDEX idx_markers_thumb ON "markers"("thumb") ;
CREATE INDEX idx_markers_matched_at ON "markers"(matched_at) ;
CREATE INDEX idx_markers_file_uid ON "markers"(file_uid) ;
CREATE INDEX idx_photos_ymd ON "photos"(photo_day) ;
CREATE INDEX idx_albums_thumb ON "albums"("thumb") ;
CREATE INDEX idx_albums_album_title ON "albums"(album_title) ;
CREATE INDEX idx_albums_ymd ON "albums"(album_day) ;
CREATE TABLE IF NOT EXISTS "subjects" ("subj_uid" VARBINARY(42),"subj_type" VARBINARY(8) DEFAULT '',"subj_src" VARBINARY(8) DEFAULT '',"subj_slug" VARBINARY(255) DEFAULT '',"subj_name" VARCHAR(255) DEFAULT '',"subj_alias" VARCHAR(255) DEFAULT '',"subj_bio" TEXT,"subj_notes" TEXT,"subj_favorite" bool DEFAULT false,"subj_private" bool DEFAULT false,"subj_excluded" bool DEFAULT false,"file_count" integer DEFAULT 0,"thumb" VARBINARY(128) DEFAULT '',"thumb_src" VARBINARY(8) DEFAULT '',"metadata_json" MEDIUMBLOB,"created_at" datetime,"updated_at" datetime,"deleted_at" datetime , "subj_hidden" bool DEFAULT false, "photo_count" integer DEFAULT 0, "subj_about" varchar(512), PRIMARY KEY ("subj_uid"));
CREATE INDEX idx_subjects_subj_slug ON "subjects"(subj_slug) ;
CREATE INDEX idx_subjects_thumb ON "subjects"("thumb") ;
CREATE INDEX idx_subjects_deleted_at ON "subjects"(deleted_at) ;
CREATE UNIQUE INDEX uix_subjects_subj_name ON "subjects"(subj_name) ;
CREATE TABLE IF NOT EXISTS "faces" ("id" VARBINARY(42),"face_src" VARBINARY(8),"face_hidden" bool,"subj_uid" VARBINARY(42) DEFAULT '',"samples" integer,"sample_radius" real,"collisions" integer,"collision_radius" real,"embedding_json" MEDIUMBLOB,"matched_at" datetime,"created_at" datetime,"updated_at" datetime , "face_kind" integer, PRIMARY KEY ("id"));
CREATE INDEX idx_faces_subj_uid ON "faces"(subj_uid) ;
CREATE TABLE IF NOT EXISTS "migrations" ("id" varchar(16),"dialect" varchar(16),"error" varchar(255),"source" varchar(16),"started_at" datetime,"finished_at" datetime , "stage" varchar(16), PRIMARY KEY ("id"));
CREATE INDEX idx_places_place_district ON "places"(place_district) ;
CREATE INDEX idx_places_place_city ON "places"(place_city) ;
CREATE INDEX idx_places_place_state ON "places"(place_state) ;
CREATE INDEX idx_albums_album_state ON "albums"(album_state) ;
CREATE INDEX idx_files_photo_taken_at ON "files"(photo_taken_at) ;
CREATE INDEX idx_files_media_utc ON "files"(media_utc) ;
CREATE INDEX idx_albums_album_filter ON albums (album_filter);
CREATE UNIQUE INDEX idx_files_search_media ON files (media_id);
CREATE UNIQUE INDEX idx_files_search_timeline ON files (time_index);
CREATE INDEX idx_services_deleted_at ON "services"(deleted_at) ;
CREATE TABLE IF NOT EXISTS "photos_users" ("uid" VARBINARY(42),"user_uid" VARBINARY(42),"team_uid" VARBINARY(42),"perm" integer , PRIMARY KEY ("uid","user_uid"));
CREATE INDEX idx_photos_users_user_uid ON "photos_users"(user_uid) ;
CREATE INDEX idx_photos_users_team_uid ON "photos_users"(team_uid) ;
CREATE TABLE IF NOT EXISTS "auth_users_shares" ("user_uid" VARBINARY(42),"share_uid" VARBINARY(42),"link_uid" VARBINARY(42),"expires_at" datetime,"comment" varchar(512),"perm" integer,"ref_id" VARBINARY(16),"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("user_uid","share_uid"));
CREATE INDEX idx_auth_users_shares_share_uid ON "auth_users_shares"(share_uid) ;
CREATE INDEX idx_auth_users_shares_expires_at ON "auth_users_shares"(expires_at) ;
CREATE TABLE IF NOT EXISTS "auth_users_details" ("user_uid" VARBINARY(42),"subj_uid" VARBINARY(42),"subj_src" VARBINARY(8) DEFAULT '',"place_id" VARBINARY(42) DEFAULT 'zz',"place_src" VARBINARY(8),"cell_id" VARBINARY(42) DEFAULT 'zz',"birth_year" integer,"birth_month" integer,"birth_day" integer,"name_title" varchar(32),"given_name" varchar(64),"middle_name" varchar(64),"family_name" varchar(64),"name_suffix" varchar(32),"nick_name" varchar(64),"name_src" VARBINARY(8),"user_gender" varchar(16),"user_about" varchar(512),"user_bio" varchar(512),"user_location" varchar(512),"user_country" VARBINARY(2),"user_phone" varchar(32),"site_url" VARBINARY(512),"profile_url" VARBINARY(512),"feed_url" VARBINARY(512),"avatar_url" VARBINARY(512),"org_title" varchar(64),"org_name" varchar(128),"org_email" varchar(255),"org_phone" varchar(32),"org_url" VARBINARY(512),"id_url" VARBINARY(512),"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("user_uid"));
CREATE INDEX idx_auth_users_details_subj_uid ON "auth_users_details"(subj_uid) ;
CREATE INDEX idx_auth_users_details_place_id ON "auth_users_details"(place_id) ;
CREATE INDEX idx_auth_users_details_cell_id ON "auth_users_details"(cell_id) ;
CREATE INDEX idx_auth_users_details_org_email ON "auth_users_details"(org_email) ;
CREATE TABLE IF NOT EXISTS "auth_sessions" ("id" VARBINARY(2048),"client_ip" varchar(64),"user_uid" VARBINARY(42) DEFAULT '',"user_name" varchar(64),"auth_provider" VARBINARY(128) DEFAULT '',"auth_method" VARBINARY(128) DEFAULT '',"auth_domain" VARBINARY(255) DEFAULT '',"auth_id" VARBINARY(128) DEFAULT '',"auth_scope" varchar(1024) DEFAULT '',"last_active" bigint,"sess_expires" bigint,"sess_timeout" bigint,"preview_token" VARBINARY(64) DEFAULT '',"download_token" VARBINARY(64) DEFAULT '',"access_token" VARBINARY(4096) DEFAULT '',"refresh_token" VARBINARY(512) DEFAULT '',"id_token" VARBINARY(1024) DEFAULT '',"user_agent" varchar(512),"data_json" VARBINARY(4096),"ref_id" VARBINARY(16) DEFAULT '',"login_ip" varchar(64),"login_at" datetime,"created_at" datetime,"updated_at" datetime , "client_uid" VARBINARY(42) DEFAULT '', "client_name" varchar(200) DEFAULT '', "grant_type" VARBINARY(64) DEFAULT '', PRIMARY KEY ("id"));
CREATE INDEX idx_auth_sessions_client_ip ON "auth_sessions"(client_ip) ;
CREATE INDEX idx_auth_sessions_user_uid ON "auth_sessions"(user_uid) ;
CREATE INDEX idx_auth_sessions_user_name ON "auth_sessions"(user_name) ;
CREATE INDEX idx_auth_sessions_auth_id ON "auth_sessions"(auth_id) ;
CREATE INDEX idx_auth_sessions_sess_expires ON "auth_sessions"(sess_expires) ;
CREATE TABLE IF NOT EXISTS "auth_users_settings" ("user_uid" VARBINARY(42),"ui_theme" VARBINARY(32),"ui_language" VARBINARY(32),"ui_time_zone" VARBINARY(64),"maps_style" VARBINARY(32),"maps_animate" integer,"index_path" VARBINARY(1024),"index_rescan" integer,"import_path" VARBINARY(1024),"import_move" integer,"upload_path" VARBINARY(1024),"default_page" VARBINARY(128),"created_at" datetime,"updated_at" datetime , "download_originals" integer DEFAULT 0, "download_media_raw" integer DEFAULT 0, "download_media_sidecar" integer DEFAULT 0, PRIMARY KEY ("user_uid"));
CREATE INDEX idx_photos_published_at ON "photos"(published_at) ;
CREATE INDEX idx_photos_created_by ON "photos"(created_by) ;
CREATE TABLE IF NOT EXISTS "auth_users" ("id" integer primary key autoincrement,"user_uuid" VARBINARY(64),"user_uid" VARBINARY(42),"auth_provider" VARBINARY(128) DEFAULT '',"auth_method" VARBINARY(128) DEFAULT '',"auth_issuer" VARBINARY(255) DEFAULT '',"auth_id" VARBINARY(128) DEFAULT '',"user_name" varchar(64),"display_name" varchar(200),"user_email" varchar(255),"backup_email" varchar(255),"user_role" varchar(64) DEFAULT '',"user_attr" varchar(1024),"super_admin" bool,"can_login" bool,"login_at" datetime,"expires_at" datetime,"webdav" bool,"base_path" VARBINARY(1024),"upload_path" VARBINARY(1024),"can_invite" bool,"invite_token" VARBINARY(64),"invited_by" varchar(64),"verify_token" VARBINARY(64),"verified_at" datetime,"consent_at" datetime,"born_at" datetime,"reset_token" VARBINARY(64),"preview_token" VARBINARY(64),"download_token" VARBINARY(64),"thumb" VARBINARY(128) DEFAULT '',"thumb_src" VARBINARY(8) DEFAULT '',"ref_id" VARBINARY(16),"created_at" datetime,"updated_at" datetime,"deleted_at" datetime);
CREATE INDEX idx_auth_users_auth_id ON "auth_users"(auth_id) ;
CREATE INDEX idx_auth_users_user_email ON "auth_users"(user_email) ;
CREATE INDEX idx_auth_users_invite_token ON "auth_users"(invite_token) ;
CREATE INDEX idx_auth_users_born_at ON "auth_users"(born_at) ;
CREATE INDEX idx_auth_users_thumb ON "auth_users"("thumb") ;
CREATE INDEX idx_auth_users_deleted_at ON "auth_users"(deleted_at) ;
CREATE INDEX idx_auth_users_user_uuid ON "auth_users"(user_uuid) ;
CREATE INDEX idx_auth_users_expires_at ON "auth_users"(expires_at) ;
CREATE INDEX idx_auth_users_user_name ON "auth_users"(user_name) ;
CREATE UNIQUE INDEX uix_auth_users_user_uid ON "auth_users"(user_uid) ;
CREATE INDEX idx_files_published_at ON "files"(published_at) ;
CREATE TABLE IF NOT EXISTS "reactions" ("uid" VARBINARY(42),"user_uid" VARBINARY(42),"reaction" VARBINARY(64),"reacted" integer,"reacted_at" datetime , PRIMARY KEY ("uid","user_uid","reaction"));
CREATE INDEX idx_reactions_reacted_at ON "reactions"(reacted_at) ;
CREATE INDEX idx_folders_published_at ON "folders"(published_at) ;
CREATE INDEX idx_labels_published_at ON "labels"(published_at) ;
CREATE INDEX idx_albums_created_by ON "albums"(created_by) ;
CREATE INDEX idx_albums_published_at ON "albums"(published_at) ;
CREATE TABLE IF NOT EXISTS "albums_users" ("uid" VARBINARY(42),"user_uid" VARBINARY(42),"team_uid" VARBINARY(42),"perm" integer , PRIMARY KEY ("uid","user_uid"));
CREATE INDEX idx_albums_users_user_uid ON "albums_users"(user_uid) ;
CREATE INDEX idx_albums_users_team_uid ON "albums_users"(team_uid) ;
CREATE INDEX idx_links_created_by ON "links"(created_by) ;
CREATE INDEX idx_files_missing_root ON files (file_missing, file_root);
CREATE TABLE IF NOT EXISTS "versions" ("id" integer primary key autoincrement,"version" varchar(255),"edition" varchar(255),"error" varchar(255),"created_at" datetime,"updated_at" datetime,"migrated_at" datetime );
CREATE UNIQUE INDEX idx_version_edition ON "versions"("version", "edition") ;
CREATE INDEX idx_files_file_error ON "files"(file_error) ;
CREATE TABLE IF NOT EXISTS "auth_clients" ("client_uid" VARBINARY(42),"user_uid" VARBINARY(42) DEFAULT '',"user_name" varchar(200),"client_name" varchar(200),"client_role" varchar(64) DEFAULT '',"client_type" VARBINARY(16),"client_url" VARBINARY(255) DEFAULT '',"callback_url" VARBINARY(255) DEFAULT '',"auth_provider" VARBINARY(128) DEFAULT '',"auth_method" VARBINARY(128) DEFAULT '',"auth_scope" varchar(1024) DEFAULT '',"auth_expires" bigint,"auth_tokens" bigint,"auth_enabled" bool,"last_active" bigint,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("client_uid"));
CREATE INDEX idx_auth_clients_user_name ON "auth_clients"(user_name) ;
CREATE INDEX idx_auth_clients_user_uid ON "auth_clients"(user_uid) ;
CREATE TABLE IF NOT EXISTS "passcodes" ("uid" VARBINARY(255),"key_type" varchar(64) DEFAULT '',"key_url" varchar(2048) DEFAULT '',"recovery_code" varchar(255) DEFAULT '',"verified_at" datetime,"activated_at" datetime,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("uid","key_type"));
CREATE INDEX idx_auth_sessions_client_uid ON "auth_sessions"(client_uid) ;

View file

@ -9,8 +9,8 @@ import (
"sync"
"time"
"github.com/jinzhu/gorm"
"github.com/ulule/deepcopier"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/entity/sortby"
"github.com/photoprism/photoprism/internal/event"
@ -48,36 +48,36 @@ type Albums []Album
// Album represents a photo album and its metadata, including filter definitions for virtual albums.
type Album struct {
ID uint `gorm:"primary_key" json:"ID" yaml:"-"`
AlbumUID string `gorm:"type:VARBINARY(42);unique_index;" json:"UID" yaml:"UID"`
ParentUID string `gorm:"type:VARBINARY(42);default:'';" json:"ParentUID,omitempty" yaml:"ParentUID,omitempty"`
AlbumSlug string `gorm:"type:VARBINARY(160);index;" json:"Slug" yaml:"Slug"`
AlbumPath string `gorm:"type:VARCHAR(1024);index;" json:"Path,omitempty" yaml:"Path,omitempty"`
AlbumType string `gorm:"type:VARBINARY(8);default:'album';" json:"Type" yaml:"Type,omitempty"`
AlbumTitle string `gorm:"type:VARCHAR(160);index;" json:"Title" yaml:"Title"`
AlbumLocation string `gorm:"type:VARCHAR(160);" json:"Location" yaml:"Location,omitempty"`
AlbumCategory string `gorm:"type:VARCHAR(100);index;" json:"Category" yaml:"Category,omitempty"`
AlbumCaption string `gorm:"type:VARCHAR(1024);" json:"Caption" yaml:"Caption,omitempty"`
AlbumDescription string `gorm:"type:VARCHAR(2048);" json:"Description" yaml:"Description,omitempty"`
AlbumNotes string `gorm:"type:VARCHAR(1024);" json:"Notes" yaml:"Notes,omitempty"`
AlbumFilter string `gorm:"type:VARBINARY(2048);" json:"Filter" yaml:"Filter,omitempty"`
AlbumOrder string `gorm:"type:VARBINARY(32);" json:"Order" yaml:"Order,omitempty"`
AlbumTemplate string `gorm:"type:VARBINARY(255);" json:"Template" yaml:"Template,omitempty"`
AlbumState string `gorm:"type:VARCHAR(100);index;" json:"State" yaml:"State,omitempty"`
AlbumCountry string `gorm:"type:VARBINARY(2);index:idx_albums_country_year_month;default:'zz';" json:"Country" yaml:"Country,omitempty"`
AlbumYear int `gorm:"index:idx_albums_ymd;index:idx_albums_country_year_month;" json:"Year" yaml:"Year,omitempty"`
AlbumMonth int `gorm:"index:idx_albums_ymd;index:idx_albums_country_year_month;" json:"Month" yaml:"Month,omitempty"`
AlbumDay int `gorm:"index:idx_albums_ymd;" json:"Day" yaml:"Day,omitempty"`
AlbumFavorite bool `json:"Favorite" yaml:"Favorite,omitempty"`
AlbumPrivate bool `json:"Private" yaml:"Private,omitempty"`
Thumb string `gorm:"type:VARBINARY(128);index;default:'';" json:"Thumb" yaml:"Thumb,omitempty"`
ThumbSrc string `gorm:"type:VARBINARY(8);default:'';" json:"ThumbSrc,omitempty" yaml:"ThumbSrc,omitempty"`
CreatedBy string `gorm:"type:VARBINARY(42);index" json:"CreatedBy,omitempty" yaml:"CreatedBy,omitempty"`
CreatedAt time.Time `json:"CreatedAt" yaml:"CreatedAt,omitempty"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"UpdatedAt,omitempty"`
PublishedAt *time.Time `sql:"index" json:"PublishedAt,omitempty" yaml:"PublishedAt,omitempty"`
DeletedAt *time.Time `sql:"index" json:"DeletedAt" yaml:"DeletedAt,omitempty"`
Photos PhotoAlbums `gorm:"foreignkey:AlbumUID;association_foreignkey:AlbumUID;" json:"-" yaml:"Photos,omitempty"`
ID uint `gorm:"primaryKey;" json:"ID" yaml:"-"`
AlbumUID string `gorm:"type:bytes;size:42;uniqueIndex;" json:"UID" yaml:"UID"`
ParentUID string `gorm:"type:bytes;size:42;default:'';" json:"ParentUID,omitempty" yaml:"ParentUID,omitempty"`
AlbumSlug string `gorm:"type:bytes;size:160;index;" json:"Slug" yaml:"Slug"`
AlbumPath string `gorm:"type:bytes;size:1024;index;" json:"Path,omitempty" yaml:"Path,omitempty"`
AlbumType string `gorm:"type:bytes;size:8;default:'album';" json:"Type" yaml:"Type,omitempty"`
AlbumTitle string `gorm:"size:160;index;" json:"Title" yaml:"Title"`
AlbumLocation string `gorm:"size:160;" json:"Location" yaml:"Location,omitempty"`
AlbumCategory string `gorm:"size:100;index;" json:"Category" yaml:"Category,omitempty"`
AlbumCaption string `gorm:"size:1024;" json:"Caption" yaml:"Caption,omitempty"`
AlbumDescription string `gorm:"size:2048;" json:"Description" yaml:"Description,omitempty"`
AlbumNotes string `gorm:"size:1024;" json:"Notes" yaml:"Notes,omitempty"`
AlbumFilter string `gorm:"type:bytes;size:2048;" json:"Filter" yaml:"Filter,omitempty"`
AlbumOrder string `gorm:"type:bytes;size:32;" json:"Order" yaml:"Order,omitempty"`
AlbumTemplate string `gorm:"type:bytes;size:255;" json:"Template" yaml:"Template,omitempty"`
AlbumState string `gorm:"size:100;index;" json:"State" yaml:"State,omitempty"`
AlbumCountry string `gorm:"type:bytes;size:2;index:idx_albums_country_year_month;default:'zz';" json:"Country" yaml:"Country,omitempty"`
AlbumYear int `gorm:"index:idx_albums_ymd;index:idx_albums_country_year_month;" json:"Year" yaml:"Year,omitempty"`
AlbumMonth int `gorm:"index:idx_albums_ymd;index:idx_albums_country_year_month;" json:"Month" yaml:"Month,omitempty"`
AlbumDay int `gorm:"index:idx_albums_ymd;" json:"Day" yaml:"Day,omitempty"`
AlbumFavorite bool `json:"Favorite" yaml:"Favorite,omitempty"`
AlbumPrivate bool `json:"Private" yaml:"Private,omitempty"`
Thumb string `gorm:"type:bytes;size:128;index;default:'';" json:"Thumb" yaml:"Thumb,omitempty"`
ThumbSrc string `gorm:"type:bytes;size:8;default:'';" json:"ThumbSrc,omitempty" yaml:"ThumbSrc,omitempty"`
CreatedBy string `gorm:"type:bytes;size:42;index" json:"CreatedBy,omitempty" yaml:"CreatedBy,omitempty"`
CreatedAt time.Time `json:"CreatedAt" yaml:"CreatedAt,omitempty"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"UpdatedAt,omitempty"`
PublishedAt *time.Time `sql:"index" json:"PublishedAt,omitempty" yaml:"PublishedAt,omitempty"`
DeletedAt gorm.DeletedAt `sql:"index" json:"DeletedAt" yaml:"DeletedAt,omitempty"`
Photos []PhotoAlbum `gorm:"foreignkey:AlbumUID;references:AlbumUID" json:"-" yaml:"Photos,omitempty"`
}
// AfterUpdate flushes the album cache when an album is updated.
@ -101,7 +101,7 @@ func (Album) TableName() string {
func UpdateAlbum(albumUID string, values interface{}) (err error) {
if rnd.InvalidUID(albumUID, AlbumUID) {
return fmt.Errorf("album: invalid uid %s", clean.Log(albumUID))
} else if err = Db().Model(Album{}).Where("album_uid = ?", albumUID).UpdateColumns(values).Error; err != nil {
} else if err = Db().Model(&Album{}).Where("album_uid = ?", albumUID).UpdateColumns(values).Error; err != nil {
return err
}
@ -474,11 +474,21 @@ func FindAlbum(find Album) *Album {
return nil
}
} else if find.AlbumTitle != "" && find.AlbumSlug != "" && find.AlbumSlug != UnknownSlug {
stmt = stmt.Where("album_slug = ? OR album_title LIKE ?", find.AlbumSlug, find.AlbumTitle)
switch DbDialect() {
case Postgres:
stmt = stmt.Where("album_slug = ? OR album_title ILIKE ?", find.AlbumSlug, find.AlbumTitle)
default:
stmt = stmt.Where("album_slug = ? OR album_title LIKE ?", find.AlbumSlug, find.AlbumTitle)
}
} else if find.AlbumSlug != "" && find.AlbumSlug != UnknownSlug {
stmt = stmt.Where("album_slug = ?", find.AlbumSlug)
} else if find.AlbumTitle != "" {
stmt = stmt.Where("album_title LIKE ?", find.AlbumTitle)
switch DbDialect() {
case Postgres:
stmt = stmt.Where("album_title ILIKE ?", find.AlbumTitle)
default:
stmt = stmt.Where("album_title LIKE ?", find.AlbumTitle)
}
} else {
return nil
}
@ -516,14 +526,14 @@ func (m *Album) Find() *Album {
}
// BeforeCreate creates a random UID if needed before inserting a new row to the database.
func (m *Album) BeforeCreate(scope *gorm.Scope) error {
func (m *Album) BeforeCreate(scope *gorm.DB) error {
if rnd.IsUID(m.AlbumUID, AlbumUID) {
return nil
}
m.AlbumUID = rnd.GenerateUID(AlbumUID)
return scope.SetColumn("AlbumUID", m.AlbumUID)
scope.Statement.SetColumn("AlbumUID", m.AlbumUID)
return scope.Error
}
// String returns the id or name as string.
@ -821,7 +831,7 @@ func (m *Album) Delete() error {
return err
} else {
m.UpdatedAt = now
m.DeletedAt = &now
m.DeletedAt = gorm.DeletedAt{Time: now, Valid: true}
FlushAlbumCache()
}
@ -853,11 +863,7 @@ func (m *Album) DeletePermanently() error {
// Deleted tests if the entity is deleted.
func (m *Album) Deleted() bool {
if m.DeletedAt == nil {
return false
}
return !m.DeletedAt.IsZero()
return m.DeletedAt.Valid
}
// Restore restores the entity in the database.
@ -874,7 +880,7 @@ func (m *Album) Restore() error {
return err
}
m.DeletedAt = nil
m.DeletedAt = gorm.DeletedAt{}
m.PublishCountChange(1)
event.PublishUserEntities("albums", event.EntityCreated, []*Album{m}, m.CreatedBy)

View file

@ -1,10 +1,12 @@
package entity
import (
"errors"
"fmt"
"time"
gc "github.com/patrickmn/go-cache"
"gorm.io/gorm"
"github.com/photoprism/photoprism/pkg/clean"
"github.com/photoprism/photoprism/pkg/rnd"
@ -33,7 +35,8 @@ func CachedAlbumByUID(uid string) (m Album, err error) {
// Find in database.
m = Album{}
if r := Db().First(&m, "album_uid = ?", uid); r.RecordNotFound() {
r := Db().First(&m, "album_uid = ?", uid)
if errors.Is(r.Error, gorm.ErrRecordNotFound) {
return m, fmt.Errorf("album not found")
} else if r.Error != nil {
return m, r.Error

View file

@ -2,6 +2,8 @@ package entity
import (
"time"
"gorm.io/gorm"
)
type AlbumMap map[string]Album
@ -46,7 +48,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"holiday-2030": {
ID: 1000001,
@ -71,7 +73,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"berlin-2019": {
ID: 1000002,
@ -96,7 +98,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"april-1990": {
ID: 1000003,
@ -121,7 +123,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"import": {
ID: 1000004,
@ -146,7 +148,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"emptyMoment": {
ID: 1000005,
@ -171,7 +173,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"2016-04": {
ID: 1000006,
@ -195,7 +197,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"september-2021": {
ID: 1000007,
@ -219,7 +221,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"california-usa": {
ID: 1000008,
@ -243,7 +245,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"california-duplicate-1": {
ID: 1000009,
@ -267,7 +269,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"california-duplicate-2": {
ID: 1000010,
@ -291,7 +293,7 @@ var AlbumFixtures = AlbumMap{
AlbumFavorite: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"&ilikefood": {
ID: 1000011,
@ -316,7 +318,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"i-love-%-dog": {
ID: 1000012,
@ -341,7 +343,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"%gold": {
ID: 1000013,
@ -366,7 +368,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"sale%": {
ID: 1000014,
@ -391,7 +393,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"pets&dogs": {
ID: 1000015,
@ -416,7 +418,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"light&": {
ID: 1000016,
@ -441,7 +443,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"'family": {
ID: 1000017,
@ -466,7 +468,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"father's-day": {
ID: 1000018,
@ -491,7 +493,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"ice-cream'": {
ID: 1000019,
@ -516,7 +518,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"*forrest": {
ID: 1000020,
@ -541,7 +543,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"my*kids": {
ID: 1000021,
@ -566,7 +568,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"yoga***": {
ID: 1000022,
@ -591,7 +593,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"|banana": {
ID: 1000023,
@ -616,7 +618,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"blue|": {
ID: 1000024,
@ -641,7 +643,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"345-shirt": {
ID: 1000025,
@ -666,7 +668,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"color-555-blue": {
ID: 1000026,
@ -691,7 +693,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"route-66": {
ID: 1000027,
@ -716,7 +718,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2024, 3, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"red|green": {
ID: 1000028,
@ -741,7 +743,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2016, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2025, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"germany": {
ID: 1000029,
@ -766,7 +768,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"mexico": {
ID: 1000030,
@ -791,7 +793,32 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"cows": {
ID: 1000035,
AlbumUID: "as6sg6bipotaajfa",
AlbumSlug: "cows",
AlbumPath: "",
AlbumType: AlbumMoment,
AlbumTitle: "Cows",
AlbumFilter: "public:true label:cow",
AlbumLocation: "",
AlbumCategory: "",
AlbumCaption: "",
AlbumDescription: "",
AlbumNotes: "",
AlbumOrder: "name",
AlbumTemplate: "",
AlbumCountry: "zz",
AlbumYear: 0,
AlbumMonth: 0,
AlbumDay: 0,
AlbumFavorite: false,
AlbumPrivate: false,
CreatedAt: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: gorm.DeletedAt{},
},
"november-2015": {
ID: 1000031,
@ -816,7 +843,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"holiday": {
ID: 1000032,
@ -841,7 +868,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2019, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"france-2020": {
ID: 1000033,
@ -866,7 +893,7 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2018, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
"greece-2024": {
ID: 1000034,
@ -891,13 +918,18 @@ var AlbumFixtures = AlbumMap{
AlbumPrivate: false,
CreatedAt: time.Date(2024, 7, 1, 0, 0, 0, 0, time.UTC),
UpdatedAt: time.Date(2025, 2, 1, 0, 0, 0, 0, time.UTC),
DeletedAt: nil,
DeletedAt: gorm.DeletedAt{},
},
}
// CreateAlbumFixtures inserts known entities into the database for testing.
func CreateAlbumFixtures() {
for _, entity := range AlbumFixtures {
Db().Create(&entity)
firstEntity := &Album{}
if err := Db().Model(&Album{}).Where("id = ?", entity.ID).First(&firstEntity).Error; err != nil {
Db().Create(&entity)
} else {
Db().Save(&entity)
}
}
}

View file

@ -229,11 +229,11 @@ func TestAddPhotoToUserAlbumsConcurrentCreate(t *testing.T) {
t.Fatalf("expected a single album, got %d", len(albums))
}
var relationCount int
var relationCount int64
if err := Db().Table(PhotoAlbum{}.TableName()).Where("album_uid = ?", albums[0].AlbumUID).Count(&relationCount).Error; err != nil {
t.Fatal(err)
}
assert.Equal(t, len(photos), relationCount)
assert.Equal(t, len(photos), int(relationCount))
}
// TestNewAlbum exercises the related album behavior.
@ -491,6 +491,26 @@ func TestFindAlbum(t *testing.T) {
assert.Equal(t, "April 1990", result.AlbumTitle)
})
t.Run("AlbumFilterNoSlug", func(t *testing.T) {
album := Album{AlbumSlug: UnknownSlug, AlbumType: AlbumFolder, AlbumFilter: `path:"1990/04" public:true`}
result := FindAlbum(album)
if result == nil {
t.Fatal("album should not be nil")
}
assert.Equal(t, "April 1990", result.AlbumTitle)
})
t.Run("AlbumTitleNoSlug", func(t *testing.T) {
album := Album{AlbumSlug: UnknownSlug, AlbumType: AlbumManual, AlbumTitle: `route%`}
result := FindAlbum(album)
if result == nil {
t.Fatal("album should not be nil")
}
assert.Equal(t, "Route 66", result.AlbumTitle)
})
t.Run("AlbumManual", func(t *testing.T) {
album := Album{AlbumSlug: "berlin-2019", AlbumType: AlbumManual}
result := FindAlbum(album)

View file

@ -7,9 +7,9 @@ import (
// AlbumUser maps an album to a user or team and stores the associated permissions.
type AlbumUser struct {
UID string `gorm:"type:VARBINARY(42);primary_key;auto_increment:false" json:"UID" yaml:"UID"`
UserUID string `gorm:"type:VARBINARY(42);primary_key;auto_increment:false;index" json:"UserUID,omitempty" yaml:"UserUID,omitempty"`
TeamUID string `gorm:"type:VARBINARY(42);index" json:"TeamUID,omitempty" yaml:"TeamUID,omitempty"`
UID string `gorm:"type:bytes;size:42;primaryKey;autoIncrement:false" json:"UID" yaml:"UID"`
UserUID string `gorm:"type:bytes;size:42;primaryKey;autoIncrement:false;index" json:"UserUID,omitempty" yaml:"UserUID,omitempty"`
TeamUID string `gorm:"type:bytes;size:42;index" json:"TeamUID,omitempty" yaml:"TeamUID,omitempty"`
Perm uint `json:"Perm,omitempty" yaml:"Perm,omitempty"`
}

View file

@ -0,0 +1,82 @@
package entity
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/photoprism/photoprism/pkg/rnd"
)
func TestNewAlbumUser(t *testing.T) {
t.Run("Ok", func(t *testing.T) {
uid := rnd.GenerateUID('n')
userUID := rnd.GenerateUID(UserUID)
teamUID := rnd.GenerateUID('t')
perm := uint(15)
au := NewAlbumUser(uid, userUID, teamUID, perm)
assert.Equal(t, uid, au.UID)
assert.Equal(t, userUID, au.UserUID)
assert.Equal(t, teamUID, au.TeamUID)
assert.Equal(t, perm, au.Perm)
})
}
func TestCreateAlbumUser(t *testing.T) {
t.Run("Ok", func(t *testing.T) {
uid := rnd.GenerateUID('n')
userUID := rnd.GenerateUID(UserUID)
teamUID := rnd.GenerateUID('t')
perm := uint(15)
au := NewAlbumUser(uid, userUID, teamUID, perm)
err := au.Create()
assert.Empty(t, err)
err = Db().Unscoped().Delete(au).Error
assert.Empty(t, err)
})
}
func TestSaveAlbumUser(t *testing.T) {
t.Run("Ok", func(t *testing.T) {
uid := rnd.GenerateUID('n')
userUID := rnd.GenerateUID(UserUID)
teamUID := rnd.GenerateUID('t')
perm := uint(15)
au := NewAlbumUser(uid, userUID, teamUID, perm)
err := au.Create()
assert.Empty(t, err)
au.Perm = uint(64)
err = au.Save()
assert.Empty(t, err)
err = Db().Unscoped().Delete(au).Error
assert.Empty(t, err)
})
}
func TestFirstOrCreateAlbumUser(t *testing.T) {
t.Run("Ok", func(t *testing.T) {
uid := rnd.GenerateUID('n')
userUID := rnd.GenerateUID(UserUID)
teamUID := rnd.GenerateUID('t')
perm := uint(15)
au := NewAlbumUser(uid, userUID, teamUID, perm)
actual := FirstOrCreateAlbumUser(au)
assert.Equal(t, au, actual)
find := AlbumUser{UID: uid}
actual = FirstOrCreateAlbumUser(&find)
assert.Equal(t, au, actual)
err := Db().Unscoped().Delete(au).Error
assert.Empty(t, err)
})
}

View file

@ -4,6 +4,7 @@ import (
"fmt"
"os"
"path/filepath"
"strings"
"sync"
"time"
@ -20,7 +21,7 @@ func (m *Album) Yaml() (out []byte, err error) {
m.CreatedAt = m.CreatedAt.UTC().Truncate(time.Second)
m.UpdatedAt = m.UpdatedAt.UTC().Truncate(time.Second)
if err = Db().Model(m).Association("Photos").Find(&m.Photos).Error; err != nil {
if err = Db().Model(m).Association("Photos").Find(&m.Photos); err != nil {
log.Errorf("album: %s (yaml)", err)
return out, err
}
@ -131,7 +132,20 @@ func (m *Album) LoadFromYaml(fileName string) error {
}
if err = yaml.Unmarshal(data, m); err != nil {
return err
if strings.Contains(err.Error(), "gorm.DeletedAt") && strings.Count(err.Error(), "\n") == 1 {
// try and fix the gorm.DeletedAt structure change
deletedAt := JustDeletedAt{}
if err = yaml.Unmarshal(data, &deletedAt); err != nil {
log.Errorf("album: yaml: unable to reparse DeletedAt with %s", err.Error())
return err
} else {
m.DeletedAt.Time = deletedAt.DeletedAt
m.DeletedAt.Valid = true
}
} else {
return err
}
}
return nil

View file

@ -1,12 +1,17 @@
package entity
import (
"errors"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"gorm.io/gorm"
"github.com/photoprism/photoprism/pkg/dsn"
"github.com/photoprism/photoprism/pkg/fs"
)
@ -57,12 +62,12 @@ func TestAlbum_SaveAsYaml(t *testing.T) {
m = *found
}
backupPath := fs.Abs("testdata/TestAlbum_SaveAsYaml")
backupPath := fs.Abs("testdata/" + dsn.PhotoPrismTestToFolderName() + "/TestAlbum_SaveAsYaml")
fileName, relName, err := m.YamlFileName(backupPath)
assert.NoError(t, err)
assert.True(t, strings.HasSuffix(fileName, "internal/entity/testdata/TestAlbum_SaveAsYaml/album/as6sg6bxpogaaba9.yml"))
assert.True(t, strings.HasSuffix(fileName, "internal/entity/testdata/"+dsn.PhotoPrismTestToFolderName()+"/TestAlbum_SaveAsYaml/album/as6sg6bxpogaaba9.yml"))
assert.Equal(t, "album/as6sg6bxpogaaba9.yml", relName)
if err = m.SaveAsYaml(fileName); err != nil {
@ -247,4 +252,175 @@ func TestAlbum_LoadFromYaml(t *testing.T) {
assert.Error(t, err)
})
t.Run("GormV1Format", func(t *testing.T) {
backupPath, err := filepath.Abs("./testdata/albums")
if err != nil {
t.Fatal(err)
}
if err = os.MkdirAll(backupPath+"/moment", fs.ModeDir); err != nil {
t.Fatal(err)
}
testFileName := backupPath + "/moment/as6sg6bipotaajfa.yml"
_, err = os.Stat(testFileName)
if errors.Is(err, os.ErrNotExist) {
// Gorm V1 format
newYaml := []byte("UID: as6sg6bipotaajfa\nSlug: cows\nType: moment\nTitle: Cows\nFilter: public:true label:supercow\nOrder: name\nDeletedAt: 2025-06-30T10:33:49Z\nCountry: zz\nCreatedAt: 2020-01-01T00:00:00Z\nUpdatedAt: 2025-06-30T10:33:49Z\n")
err = os.WriteFile(testFileName, newYaml, 0644)
assert.NoError(t, err)
}
albumToCheck := Album{}
err = albumToCheck.LoadFromYaml(testFileName)
assert.NoError(t, err)
assert.Equal(t, "as6sg6bipotaajfa", albumToCheck.AlbumUID)
assert.Equal(t, "cows", albumToCheck.AlbumSlug)
assert.Equal(t, "moment", albumToCheck.AlbumType)
assert.Equal(t, "Cows", albumToCheck.AlbumTitle)
assert.Equal(t, "public:true label:supercow", albumToCheck.AlbumFilter)
assert.Equal(t, "name", albumToCheck.AlbumOrder)
assert.Equal(t, "zz", albumToCheck.AlbumCountry)
assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), albumToCheck.CreatedAt)
assert.Equal(t, time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), albumToCheck.UpdatedAt)
assert.Equal(t, gorm.DeletedAt{Time: time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), Valid: true}, albumToCheck.DeletedAt)
if err = os.Remove(testFileName); err != nil {
t.Fatal(err)
}
})
t.Run("GormV2Format", func(t *testing.T) {
backupPath, err := filepath.Abs("./testdata/albums")
if err != nil {
t.Fatal(err)
}
if err = os.MkdirAll(backupPath+"/moment", fs.ModeDir); err != nil {
t.Fatal(err)
}
testFileName := backupPath + "/moment/as6sg6bipotaajfa.yml"
_, err = os.Stat(testFileName)
if errors.Is(err, os.ErrNotExist) {
// Gorm V2 format
newYaml := []byte("UID: as6sg6bipotaajfa\nSlug: cows\nType: moment\nTitle: Cows\nFilter: public:true label:cow\nOrder: name\nCountry: zz\nCreatedAt: 2020-01-01T00:00:00Z\nUpdatedAt: 2025-06-30T10:33:49Z\nDeletedAt:\n time: 2025-06-30T10:33:50Z\n valid: true\n")
err = os.WriteFile(testFileName, newYaml, 0644)
assert.NoError(t, err)
}
albumToCheck := Album{}
err = albumToCheck.LoadFromYaml(testFileName)
assert.NoError(t, err)
assert.Equal(t, "as6sg6bipotaajfa", albumToCheck.AlbumUID)
assert.Equal(t, "cows", albumToCheck.AlbumSlug)
assert.Equal(t, "moment", albumToCheck.AlbumType)
assert.Equal(t, "Cows", albumToCheck.AlbumTitle)
assert.Equal(t, "public:true label:cow", albumToCheck.AlbumFilter)
assert.Equal(t, "name", albumToCheck.AlbumOrder)
assert.Equal(t, "zz", albumToCheck.AlbumCountry)
assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), albumToCheck.CreatedAt)
assert.Equal(t, time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), albumToCheck.UpdatedAt)
assert.Equal(t, gorm.DeletedAt{Time: time.Date(2025, 6, 30, 10, 33, 50, 0, time.UTC), Valid: true}, albumToCheck.DeletedAt)
if err = os.Remove(testFileName); err != nil {
t.Fatal(err)
}
})
t.Run("GormV1Format_Bad", func(t *testing.T) {
backupPath, err := filepath.Abs("./testdata/albums")
if err != nil {
t.Fatal(err)
}
if err = os.MkdirAll(backupPath+"/moment", fs.ModeDir); err != nil {
t.Fatal(err)
}
testFileName := backupPath + "/moment/as6sg6bipotaajfa_bad.yml"
_, err = os.Stat(testFileName)
if errors.Is(err, os.ErrNotExist) {
// Gorm V1 format
newYaml := []byte("UID: as6sg6bipotaajfa\nSlug: cows\nType: moment\nTitle: Cows\nFilter: public:true label:supercow\nOrder: name\nDeletedAt: 2025-06-30T10:33:49Z\nCountry: zz\nCreatedAt: 2020-01-01T00:00:00Z\nUpdatedAt: 2025-06-30T10:33:49Z\nYear: TwentyTen\n")
err = os.WriteFile(testFileName, newYaml, 0644)
assert.NoError(t, err)
}
albumToCheck := Album{}
err = albumToCheck.LoadFromYaml(testFileName)
assert.Error(t, err)
assert.Contains(t, err.Error(), "!!timestamp")
assert.Contains(t, err.Error(), "!!str")
assert.Equal(t, "as6sg6bipotaajfa", albumToCheck.AlbumUID)
assert.Equal(t, "cows", albumToCheck.AlbumSlug)
assert.Equal(t, "moment", albumToCheck.AlbumType)
assert.Equal(t, "Cows", albumToCheck.AlbumTitle)
assert.Equal(t, "public:true label:supercow", albumToCheck.AlbumFilter)
assert.Equal(t, "name", albumToCheck.AlbumOrder)
assert.Equal(t, "zz", albumToCheck.AlbumCountry)
assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), albumToCheck.CreatedAt)
assert.Equal(t, time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), albumToCheck.UpdatedAt)
assert.Equal(t, gorm.DeletedAt{}, albumToCheck.DeletedAt)
if err = os.Remove(testFileName); err != nil {
t.Fatal(err)
}
})
t.Run("GormV2Format_Bad", func(t *testing.T) {
backupPath, err := filepath.Abs("./testdata/albums")
if err != nil {
t.Fatal(err)
}
if err = os.MkdirAll(backupPath+"/moment", fs.ModeDir); err != nil {
t.Fatal(err)
}
testFileName := backupPath + "/moment/as6sg6bipotaajfa_Bad.yml"
_, err = os.Stat(testFileName)
if errors.Is(err, os.ErrNotExist) {
// Gorm V2 format
newYaml := []byte("UID: as6sg6bipotaajfa\nSlug: cows\nType: moment\nTitle: Cows\nFilter: public:true label:cow\nOrder: name\nCountry: zz\nYear: TwentyTen\nCreatedAt: 2020-01-01T00:00:00Z\nUpdatedAt: 2025-06-30T10:33:49Z\nDeletedAt:\n time: 2025-06-30T10:33:50Z\n valid: true\n")
err = os.WriteFile(testFileName, newYaml, 0644)
assert.NoError(t, err)
}
albumToCheck := Album{}
err = albumToCheck.LoadFromYaml(testFileName)
assert.Error(t, err)
assert.Contains(t, err.Error(), "!!str")
assert.Equal(t, "as6sg6bipotaajfa", albumToCheck.AlbumUID)
assert.Equal(t, "cows", albumToCheck.AlbumSlug)
assert.Equal(t, "moment", albumToCheck.AlbumType)
assert.Equal(t, "Cows", albumToCheck.AlbumTitle)
assert.Equal(t, "public:true label:cow", albumToCheck.AlbumFilter)
assert.Equal(t, "name", albumToCheck.AlbumOrder)
assert.Equal(t, "zz", albumToCheck.AlbumCountry)
assert.Equal(t, time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC), albumToCheck.CreatedAt)
assert.Equal(t, time.Date(2025, 6, 30, 10, 33, 49, 0, time.UTC), albumToCheck.UpdatedAt)
assert.Equal(t, gorm.DeletedAt{Time: time.Date(2025, 6, 30, 10, 33, 50, 0, time.UTC), Valid: true}, albumToCheck.DeletedAt)
if err = os.Remove(testFileName); err != nil {
t.Fatal(err)
}
})
}

View file

@ -8,7 +8,7 @@ import (
"github.com/dustin/go-humanize/english"
"github.com/gin-gonic/gin"
"github.com/jinzhu/gorm"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/internal/event"
@ -31,27 +31,27 @@ type Clients []Client
// Client represents an OAuth/OpenID client registered with PhotoPrism.
type Client struct {
ClientUID string `gorm:"type:VARBINARY(42);primary_key;auto_increment:false;" json:"-" yaml:"ClientUID"`
NodeUUID string `gorm:"type:VARBINARY(64);index;default:'';" json:"NodeUUID,omitempty" yaml:"NodeUUID,omitempty"`
UserUID string `gorm:"type:VARBINARY(42);index;default:'';" json:"UserUID" yaml:"UserUID,omitempty"`
ClientUID string `gorm:"type:bytes;size:42;primaryKey;autoIncrement:false;" json:"-" yaml:"ClientUID"`
NodeUUID string `gorm:"type:bytes;size:64;index;default:'';" json:"NodeUUID,omitempty" yaml:"NodeUUID,omitempty"`
UserUID string `gorm:"type:bytes;size:42;index;default:'';" json:"UserUID" yaml:"UserUID,omitempty"`
UserName string `gorm:"size:200;index;" json:"UserName" yaml:"UserName,omitempty"`
user *User `gorm:"-" yaml:"-"`
user *User `gorm:"foreignKey:UserUID;references:UserUID" yaml:"-"`
AppName string `gorm:"size:64;" json:"AppName" yaml:"AppName,omitempty"`
AppVersion string `gorm:"size:64;" json:"AppVersion" yaml:"AppVersion,omitempty"`
ClientName string `gorm:"size:200;" json:"ClientName" yaml:"ClientName,omitempty"`
ClientRole string `gorm:"size:64;default:'';" json:"ClientRole" yaml:"ClientRole,omitempty"`
ClientType string `gorm:"type:VARBINARY(16)" json:"ClientType" yaml:"ClientType,omitempty"`
ClientURL string `gorm:"type:VARBINARY(255);default:'';column:client_url;" json:"ClientURL" yaml:"ClientURL,omitempty"`
CallbackURL string `gorm:"type:VARBINARY(255);default:'';column:callback_url;" json:"CallbackURL" yaml:"CallbackURL,omitempty"`
AuthProvider string `gorm:"type:VARBINARY(128);default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:VARBINARY(128);default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
ClientType string `gorm:"type:bytes;size:16" json:"ClientType" yaml:"ClientType,omitempty"`
ClientURL string `gorm:"type:bytes;size:255;default:'';column:client_url;" json:"ClientURL" yaml:"ClientURL,omitempty"`
CallbackURL string `gorm:"type:bytes;size:255;default:'';column:callback_url;" json:"CallbackURL" yaml:"CallbackURL,omitempty"`
AuthProvider string `gorm:"type:bytes;size:128;default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:bytes;size:128;default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
AuthScope string `gorm:"size:1024;default:'';" json:"AuthScope" yaml:"AuthScope,omitempty"`
AuthExpires int64 `json:"AuthExpires" yaml:"AuthExpires,omitempty"`
AuthTokens int64 `json:"AuthTokens" yaml:"AuthTokens,omitempty"`
AuthEnabled bool `json:"AuthEnabled" yaml:"AuthEnabled,omitempty"`
RefreshToken string `gorm:"type:VARBINARY(2048);column:refresh_token;default:'';" json:"-" yaml:"-"`
IdToken string `gorm:"type:VARBINARY(2048);column:id_token;default:'';" json:"IdToken,omitempty" yaml:"IdToken,omitempty"`
DataJSON json.RawMessage `gorm:"type:VARBINARY(4096);" json:"-" yaml:"Data,omitempty"`
RefreshToken string `gorm:"type:bytes;size:2048;column:refresh_token;default:'';" json:"-" yaml:"-"`
IdToken string `gorm:"type:bytes;size:2048;column:id_token;default:'';" json:"IdToken,omitempty" yaml:"IdToken,omitempty"`
DataJSON json.RawMessage `gorm:"type:bytes;size:4096;" json:"-" yaml:"Data,omitempty"`
data *ClientData `gorm:"-" yaml:"-"`
LastActive int64 `json:"LastActive" yaml:"LastActive,omitempty"`
CreatedAt time.Time `json:"CreatedAt" yaml:"-"`
@ -83,14 +83,14 @@ func NewClient() *Client {
}
// BeforeCreate creates a random UID if needed before inserting a new row to the database.
func (m *Client) BeforeCreate(scope *gorm.Scope) error {
func (m *Client) BeforeCreate(scope *gorm.DB) error {
if rnd.IsUID(m.ClientUID, ClientUID) {
return nil
}
m.ClientUID = rnd.GenerateUID(ClientUID)
return scope.SetColumn("ClientUID", m.ClientUID)
scope.Statement.SetColumn("ClientUID", m.ClientUID)
return scope.Error
}
// FindClientByUID returns the matching client or nil if it was not found.

View file

@ -151,6 +151,25 @@ var ClientFixtures = ClientMap{
AuthEnabled: true,
LastActive: 0,
},
"node": {
ClientUID: "cs5cpu17node2qo5",
UserUID: "",
UserName: "",
user: nil,
ClientName: "pp-node-fixture",
ClientRole: string(acl.RoleApp),
ClientType: authn.ClientConfidential,
ClientURL: "",
CallbackURL: "",
AuthProvider: authn.ProviderClient.String(),
AuthMethod: authn.MethodOAuth2.String(),
AuthScope: "",
AuthExpires: unix.Hour,
AuthTokens: 5,
AuthEnabled: true,
LastActive: 0,
NodeUUID: "019984c2-1e87-73a2-b734-b8d0ed31ac0c",
},
}
// CreateClientFixtures inserts known entities into the database for testing.

View file

@ -185,6 +185,7 @@ func TestClient_Create(t *testing.T) {
})
t.Run("AlreadyExists", func(t *testing.T) {
var m = ClientFixtures.Get("alice")
log.Info("Expect duplicate key violation Error or SQLSTATE from client.Create")
err := m.Create()
assert.Error(t, err)
})
@ -843,3 +844,28 @@ func TestClient_Validate(t *testing.T) {
}
})
}
func TestFindClientByNodeUUID(t *testing.T) {
t.Run("node", func(t *testing.T) {
expected := ClientFixtures.Get("node")
m := FindClientByNodeUUID(expected.NodeUUID)
if m == nil {
t.Fatal("result should not be nil")
}
assert.Equal(t, m.UserUID, UserFixtures.Get("node").UserUID)
assert.Equal(t, expected.ClientUID, m.GetUID())
assert.NotEmpty(t, m.CreatedAt)
assert.NotEmpty(t, m.UpdatedAt)
})
t.Run("Invalid", func(t *testing.T) {
m := FindClientByNodeUUID("123")
assert.Nil(t, m)
})
t.Run("Empty", func(t *testing.T) {
m := FindClientByNodeUUID("")
assert.Nil(t, m)
})
}

View file

@ -10,7 +10,7 @@ import (
"github.com/dustin/go-humanize/english"
"github.com/gin-gonic/gin"
"github.com/jinzhu/gorm"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/internal/event"
@ -38,33 +38,33 @@ type Sessions []Session
// Session represents an authenticated user or client session persisted in the database.
type Session struct {
ID string `gorm:"type:VARBINARY(2048);primary_key;auto_increment:false;" json:"-" yaml:"ID"`
ID string `gorm:"type:bytes;size:2048;primaryKey;autoIncrement:false;" json:"-" yaml:"ID"`
authToken string `gorm:"-" yaml:"-"`
UserUID string `gorm:"type:VARBINARY(42);index;default:'';" json:"UserUID" yaml:"UserUID,omitempty"`
UserUID string `gorm:"type:bytes;size:42;index;default:'';" json:"UserUID" yaml:"UserUID,omitempty"`
UserName string `gorm:"size:200;index;" json:"UserName" yaml:"UserName,omitempty"`
user *User `gorm:"-" yaml:"-"`
ClientUID string `gorm:"type:VARBINARY(42);index;default:'';" json:"ClientUID" yaml:"ClientUID,omitempty"`
ClientUID string `gorm:"type:bytes;size:42;index;default:'';" json:"ClientUID" yaml:"ClientUID,omitempty"`
ClientName string `gorm:"size:200;default:'';" json:"ClientName" yaml:"ClientName,omitempty"`
ClientIP string `gorm:"size:64;column:client_ip;index" json:"ClientIP" yaml:"ClientIP,omitempty"`
client *Client `gorm:"-" yaml:"-"`
AuthProvider string `gorm:"type:VARBINARY(128);default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:VARBINARY(128);default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
AuthIssuer string `gorm:"type:VARBINARY(255);default:'';" json:"AuthIssuer,omitempty" yaml:"AuthIssuer,omitempty"`
AuthID string `gorm:"type:VARBINARY(255);index;default:'';" json:"AuthID" yaml:"AuthID,omitempty"`
AuthProvider string `gorm:"type:bytes;size:128;default:'';" json:"AuthProvider" yaml:"AuthProvider,omitempty"`
AuthMethod string `gorm:"type:bytes;size:128;default:'';" json:"AuthMethod" yaml:"AuthMethod,omitempty"`
AuthIssuer string `gorm:"type:bytes;size:255;default:'';" json:"AuthIssuer,omitempty" yaml:"AuthIssuer,omitempty"`
AuthID string `gorm:"type:bytes;size:255;index;default:'';" json:"AuthID" yaml:"AuthID,omitempty"`
AuthScope string `gorm:"size:1024;default:'';" json:"AuthScope" yaml:"AuthScope,omitempty"`
GrantType string `gorm:"type:VARBINARY(64);default:'';" json:"GrantType" yaml:"GrantType,omitempty"`
GrantType string `gorm:"type:bytes;size:64;default:'';" json:"GrantType" yaml:"GrantType,omitempty"`
LastActive int64 `json:"LastActive" yaml:"LastActive,omitempty"`
SessExpires int64 `gorm:"index" json:"Expires" yaml:"Expires,omitempty"`
SessTimeout int64 `json:"Timeout" yaml:"Timeout,omitempty"`
PreviewToken string `gorm:"type:VARBINARY(64);column:preview_token;default:'';" json:"-" yaml:"-"`
DownloadToken string `gorm:"type:VARBINARY(64);column:download_token;default:'';" json:"-" yaml:"-"`
AccessToken string `gorm:"type:VARBINARY(4096);column:access_token;default:'';" json:"-" yaml:"-"`
RefreshToken string `gorm:"type:VARBINARY(2048);column:refresh_token;default:'';" json:"-" yaml:"-"`
IdToken string `gorm:"type:VARBINARY(2048);column:id_token;default:'';" json:"IdToken,omitempty" yaml:"IdToken,omitempty"`
PreviewToken string `gorm:"type:bytes;size:64;column:preview_token;default:'';" json:"-" yaml:"-"`
DownloadToken string `gorm:"type:bytes;size:64;column:download_token;default:'';" json:"-" yaml:"-"`
AccessToken string `gorm:"type:bytes;size:4096;column:access_token;default:'';" json:"-" yaml:"-"`
RefreshToken string `gorm:"type:bytes;size:2048;column:refresh_token;default:'';" json:"-" yaml:"-"`
IdToken string `gorm:"type:bytes;size:2048;column:id_token;default:'';" json:"IdToken,omitempty" yaml:"IdToken,omitempty"`
UserAgent string `gorm:"size:512;" json:"UserAgent" yaml:"UserAgent,omitempty"`
DataJSON json.RawMessage `gorm:"type:VARBINARY(4096);" json:"-" yaml:"Data,omitempty"`
DataJSON json.RawMessage `gorm:"type:bytes;size:4096;" json:"-" yaml:"Data,omitempty"`
data *SessionData `gorm:"-" yaml:"-"`
RefID string `gorm:"type:VARBINARY(16);default:'';" json:"ID" yaml:"-"`
RefID string `gorm:"type:bytes;size:16;default:'';" json:"ID" yaml:"-"`
LoginIP string `gorm:"size:64;column:login_ip" json:"LoginIP" yaml:"-"`
LoginAt time.Time `json:"LoginAt" yaml:"-"`
CreatedAt time.Time `json:"CreatedAt" yaml:"CreatedAt"`
@ -277,10 +277,11 @@ func (m *Session) Updates(values interface{}) error {
}
// BeforeCreate creates a random UID if needed before inserting a new row to the database.
func (m *Session) BeforeCreate(scope *gorm.Scope) error {
func (m *Session) BeforeCreate(scope *gorm.DB) error {
if rnd.InvalidRefID(m.RefID) {
m.RefID = rnd.RefID(SessionPrefix)
Log("session", "set ref id", scope.SetColumn("RefID", m.RefID))
scope.Statement.SetColumn("RefID", m.RefID)
Log("session", "set ref id", scope.Error)
}
if rnd.IsSessionID(m.ID) {
@ -288,8 +289,8 @@ func (m *Session) BeforeCreate(scope *gorm.Scope) error {
}
m.Regenerate()
return scope.SetColumn("ID", m.ID)
scope.Statement.SetColumn("ID", m.ID)
return scope.Error
}
// SetClient sets the client of this session.

View file

@ -1,10 +1,12 @@
package entity
import (
"errors"
"fmt"
"time"
gc "github.com/patrickmn/go-cache"
"gorm.io/gorm"
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/pkg/clean"
@ -38,7 +40,7 @@ func FindSession(id string) (*Session, error) {
} else if err := cached.Delete(); err != nil {
event.AuditErr([]string{cached.IP(), "session %s", "failed to delete after expiration", status.Error(err)}, cached.RefID)
}
} else if res := Db().First(&found, "id = ?", id); res.RecordNotFound() {
} else if res := Db().First(&found, "id = ?", id); errors.Is(res.Error, gorm.ErrRecordNotFound) {
return found, fmt.Errorf("invalid session")
} else if res.Error != nil {
return found, res.Error

View file

@ -7,6 +7,7 @@ import (
"github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/pkg/authn"
"github.com/photoprism/photoprism/pkg/convert"
"github.com/photoprism/photoprism/pkg/log/status"
"github.com/photoprism/photoprism/pkg/rnd"
"github.com/photoprism/photoprism/pkg/time/unix"
@ -91,7 +92,8 @@ func DeleteClientSessions(client *Client, authMethod authn.MethodType, limit int
q = q.Where("auth_method = ?", authMethod.String())
}
q = q.Order("created_at DESC").Limit(1000000000).Offset(limit)
// NOTE: this loses precision of the token limit. But I think int64 does not make sense for that limit type anyway.
q = q.Order("created_at DESC").Limit(1000000000).Offset(convert.SafeInt64toint(limit))
found := Sessions{}

View file

@ -1,6 +1,8 @@
package entity
import (
"time"
"github.com/photoprism/photoprism/pkg/authn"
"github.com/photoprism/photoprism/pkg/clean"
"github.com/photoprism/photoprism/pkg/rnd"
@ -112,9 +114,9 @@ var SessionFixtures = SessionMap{
user: UserFixtures.Pointer("bob"),
UserUID: UserFixtures.Pointer("bob").UserUID,
UserName: UserFixtures.Pointer("bob").UserName,
LoginAt: Now().Add(-24),
CreatedAt: Now().Add(-24),
UpdatedAt: Now().Add(-24),
LoginAt: Now().Add(time.Millisecond * -24),
CreatedAt: Now().Add(time.Millisecond * -24),
UpdatedAt: Now().Add(time.Millisecond * -24),
},
"unauthorized": {
authToken: "69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7ac2",
@ -167,9 +169,9 @@ var SessionFixtures = SessionMap{
user: UserFixtures.Pointer("friend"),
UserUID: UserFixtures.Pointer("friend").UserUID,
UserName: UserFixtures.Pointer("friend").UserName,
LoginAt: Now().Add(-12),
CreatedAt: Now().Add(-20),
UpdatedAt: Now().Add(-12),
LoginAt: Now().Add(time.Millisecond * -12),
CreatedAt: Now().Add(time.Millisecond * -20),
UpdatedAt: Now().Add(time.Millisecond * -12),
},
"client_metrics": {
authToken: "9d8b8801ffa23eb52e08ca7766283799ddfd8dd368212345",

View file

@ -285,9 +285,28 @@ func TestSession_Create(t *testing.T) {
s.SetAuthToken(authToken)
log.Info("Expect duplicate key violation Error or SQLSTATE from session.Create")
err := s.Create()
assert.Error(t, err)
})
t.Run("BadRefIDandID", func(t *testing.T) {
authToken := "69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7cad"
s := &Session{
UserName: "freddy",
SessExpires: unix.Day * 3,
SessTimeout: unix.Now() + unix.Week,
RefID: "1234567890",
}
s.SetAuthToken(authToken)
s.ID = "toshort"
err := s.Create()
assert.Empty(t, err)
})
}
func TestSession_Save(t *testing.T) {
@ -999,6 +1018,29 @@ func TestSession_UpdateLastActive(t *testing.T) {
assert.GreaterOrEqual(t, unix.Now(), m.LastActive)
})
t.Run("SaveMethodSession", func(t *testing.T) {
expected := unix.Now() - 10
m := NewSession(unix.Day, unix.Hour)
t.Logf("Timeout: %s, Expiration: %s", m.TimeoutAt().String(), m.ExpiresAt())
assert.Equal(t, int64(0), m.LastActive)
m.LastActive = expected
m.SetMethod(authn.MethodSession)
m.SetAuthToken("69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe70ca")
m.SetAuthID("MyIDString", "Testing")
m.AuthProvider = string(authn.ProviderClient)
if err := m.Create(); err != nil {
assert.Empty(t, err)
return
}
m = m.UpdateLastActive(true)
assert.Greater(t, m.LastActive, expected)
})
}
func TestSession_Expired(t *testing.T) {

Some files were not shown because too many files have changed in this diff Show more