This commit is contained in:
Keith Martin 2026-01-21 21:22:42 -05:00 committed by GitHub
commit cb8dc723f7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 387 additions and 13 deletions

View file

@ -8,6 +8,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/photoprism/photoprism/internal/auth/acl"
"github.com/photoprism/photoprism/pkg/authn"
@ -238,10 +239,11 @@ func TestSession_Create(t *testing.T) {
s.SetAuthToken("69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7xxx")
err := s.Create()
require.Nil(t, err)
if err != nil {
t.Fatal(err)
}
t.Cleanup(func() {
s.Delete()
})
m2 := FindSessionByRefID("sessxkkcxxxx")
assert.Equal(t, "charles", m2.UserName)
@ -264,18 +266,19 @@ func TestSession_Create(t *testing.T) {
s.SetAuthToken(authToken)
err := s.Create()
require.Nil(t, err)
if err != nil {
t.Fatal(err)
}
t.Cleanup(func() {
s.Delete()
})
m2, _ := FindSession(id)
assert.NotEqual(t, "123", m2.RefID)
})
t.Run("IdAlreadyExists", func(t *testing.T) {
authToken := "69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7ac0"
m := FindSessionByRefID("sessxkkcxxxx")
assert.Empty(t, m)
s := &Session{
UserName: "charles",
SessExpires: unix.Day * 3,
@ -283,11 +286,54 @@ func TestSession_Create(t *testing.T) {
RefID: "sessxkkcxxxx",
}
s.SetAuthToken(authToken)
s.SetAuthToken("69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7xxx")
err := s.Create()
require.Nil(t, err)
t.Cleanup(func() {
s.Delete()
})
authToken := "69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7ac0"
s2 := &Session{
UserName: "charles",
SessExpires: unix.Day * 3,
SessTimeout: unix.Now() + unix.Week,
RefID: "sessxkkcxxxx",
}
s2.SetAuthToken(authToken)
err = s2.Create()
assert.Error(t, err)
})
t.Run("LongNumericAuthID", func(t *testing.T) {
refID := rnd.RefID("ts")
m := FindSessionByRefID(refID)
assert.Empty(t, m)
s := &Session{
UserName: "charles",
SessExpires: unix.Day * 3,
SessTimeout: unix.Now() + unix.Week,
RefID: refID,
AuthID: "012345678901234567890",
}
s.SetAuthToken("69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7xxs")
err := s.Create()
require.Nil(t, err)
t.Cleanup(func() {
s.Delete()
})
m2 := FindSessionByRefID(refID)
assert.Equal(t, "charles", m2.UserName)
assert.Equal(t, "012345678901234567890", m2.AuthID)
})
}
func TestSession_Save(t *testing.T) {
@ -304,14 +350,37 @@ func TestSession_Save(t *testing.T) {
s.SetAuthToken("69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7xxy")
err := s.Save()
if err != nil {
t.Fatal(err)
}
require.Nil(t, err)
m2 := FindSessionByRefID("sessxkkcxxxy")
assert.Equal(t, "chris", m2.UserName)
})
t.Run("LongNumericAuthID", func(t *testing.T) {
refID := rnd.RefID("ts")
m := FindSessionByRefID(refID)
assert.Empty(t, m)
s := &Session{
UserName: "chris",
SessExpires: unix.Day * 3,
SessTimeout: unix.Now() + unix.Week,
RefID: refID,
AuthID: "012345678901234567890",
}
s.SetAuthToken("69be27ac5ca305b394046a83f6fda18167ca3d3f2dbe7xxy")
err := s.Save()
require.Nil(t, err)
t.Cleanup(func() {
s.Delete()
})
m2 := FindSessionByRefID(refID)
assert.Equal(t, "chris", m2.UserName)
assert.Equal(t, "012345678901234567890", m2.AuthID)
})
}
func TestSession_Updates(t *testing.T) {

View file

@ -70,6 +70,25 @@ func TestOidcUser(t *testing.T) {
assert.Equal(t, "jane.doe", m.UserName)
assert.Equal(t, "Jane Doe", m.DisplayName)
})
t.Run("LongNumberAsSubject", func(t *testing.T) {
info := &oidc.UserInfo{}
info.Name = "Jane Doe"
info.GivenName = "Jane"
info.FamilyName = "Doe"
info.Email = "jane@doe.com"
info.EmailVerified = true
info.Subject = "12345678901234567890"
info.PreferredUsername = "Jane Doe"
m := OidcUser(info, "", "jane.doe")
assert.Equal(t, "oidc", m.AuthProvider)
assert.Equal(t, "", m.AuthIssuer)
assert.Equal(t, "12345678901234567890", m.AuthID)
assert.Equal(t, "jane@doe.com", m.UserEmail)
assert.Equal(t, "jane.doe", m.UserName)
assert.Equal(t, "Jane Doe", m.DisplayName)
})
t.Run("NoUsername", func(t *testing.T) {
info := &oidc.UserInfo{}
info.Name = "Jane Doe"
@ -305,6 +324,32 @@ func TestUser_Create(t *testing.T) {
t.Fatal(err)
}
})
t.Run("LongNumericAuthID", func(t *testing.T) {
useruid := rnd.GenerateUID(UserUID)
var m = User{
UserUID: useruid,
UserName: "examplelong",
UserRole: string(acl.RoleGuest),
DisplayName: "Example Long",
SuperAdmin: false,
CanLogin: true,
AuthID: "012345678901234567890",
AuthProvider: string(authn.ProviderOIDC),
}
if err := m.Create(); err != nil {
t.Fatal(err)
}
t.Cleanup(func() {
m.Delete()
UnscopedDb().Delete(m)
})
assert.Equal(t, "examplelong", m.Username())
assert.Equal(t, "examplelong", m.UserName)
assert.Equal(t, "012345678901234567890", m.AuthID)
})
}
func TestUser_UpdateUsername(t *testing.T) {
@ -532,6 +577,14 @@ func TestFindUser(t *testing.T) {
assert.NotEmpty(t, m.UserUID)
assert.Equal(t, "jane.doe", m.UserName)
assert.Equal(t, "oidc", m.AuthProvider)
n := FindUser(User{AuthProvider: authn.ProviderOIDC.String(), AuthID: info.Subject})
require.NotNil(t, n)
assert.NotEmpty(t, n.UserUID)
assert.Equal(t, "jane.doe", n.UserName)
assert.Equal(t, "oidc", n.AuthProvider)
})
t.Run("UserName", func(t *testing.T) {
m := FindUser(User{UserName: "admin"})
@ -1803,11 +1856,44 @@ func TestUser_SetAuthID(t *testing.T) {
assert.Equal(t, uuid, m.AuthID)
assert.Equal(t, "", m.AuthIssuer)
})
t.Run("DupeAuthProviderAndID", func(t *testing.T) {
m := UserFixtures.Get("guest")
n := NewUser()
n.UserName = "guest2"
n.DisplayName = "Guest User2"
n.UserEmail = "guest2@example.com"
n.UserRole = acl.RoleGuest.String()
n.AuthProvider = authn.ProviderOIDC.String()
n.AuthMethod = authn.MethodDefault.String()
n.SuperAdmin = false
n.CanLogin = true
n.SetAuthID(uuid, issuer)
n.Save()
t.Cleanup(func() {
n.Delete()
UnscopedDb().Delete(n)
})
newUserUID := n.UserUID
m.SetAuthID(uuid, issuer)
assert.Equal(t, uuid, m.AuthID)
assert.Equal(t, issuer, m.AuthIssuer)
n = FindUserByUID(newUserUID)
require.NotNil(t, n)
assert.Equal(t, "guest2", n.UserName)
assert.Equal(t, "", n.AuthID)
assert.Equal(t, authn.ProviderNone.String(), n.AuthProvider)
})
}
func TestUser_UpdateAuthID(t *testing.T) {
uuid := rnd.UUID()
issuer := "http://dummy-oidc:9998"
longnumber := "12345678901234567890"
t.Run("UUID", func(t *testing.T) {
m := UserFixtures.Get("friend")
@ -1833,6 +1919,20 @@ func TestUser_UpdateAuthID(t *testing.T) {
err := m.UpdateAuthID(uuid, "")
assert.Error(t, err)
})
t.Run("LongNumber", func(t *testing.T) {
m := UserFixtures.Get("friend")
m.SetAuthID("", issuer)
assert.Equal(t, "", m.AuthID)
assert.Equal(t, "", m.AuthIssuer)
m.SetAuthID(longnumber, issuer)
assert.Equal(t, longnumber, m.AuthID)
assert.Equal(t, issuer, m.AuthIssuer)
err := m.UpdateAuthID(longnumber, "")
assert.NoError(t, err)
assert.Equal(t, longnumber, m.AuthID)
assert.Equal(t, "", m.AuthIssuer)
})
}
func TestUser_AuthInfo(t *testing.T) {
@ -2356,3 +2456,23 @@ func TestUser_SetValuesFromCliScope(t *testing.T) {
require.NoError(t, user.SetValuesFromCli(ctx))
assert.Equal(t, "videos:view", user.UserScope)
}
func TestUser_AuthIDSQLite(t *testing.T) {
user := FindLocalUser("alice")
require.NotNil(t, user)
original := user.AuthID
t.Cleanup(func() {
user.AuthID = original
user.Save()
})
expected := "012345678901234567890123456789"
user.AuthID = expected
user.Save()
user2 := FindLocalUser("alice")
require.NotNil(t, user2)
assert.Equal(t, expected, user2.AuthID)
}

View file

@ -122,6 +122,21 @@ func (list Tables) Migrate(db *gorm.DB, opt migrate.Options) {
// Run ORM auto migrations.
if opt.AutoMigrate {
// Check if the DBMS AuthID fix has been applied?
version := migrate.FirstOrCreateVersion(db, migrate.NewVersion("DBMS AuthID Fix", "Any Editions"))
if version.NeedsMigration() {
if err := migrate.ConvertDBMSAuthIDDataTypes(db); err != nil {
log.Errorf("migrate: could not apply dbms auth_id fix : %v", err)
version.Error = err.Error()
version.Save(db)
} else {
version.Migrated(db)
log.Debug("migrate: DBMS AuthID fix migrated")
}
} else {
log.Debug("migrate: DBMS AuthID fix skipped")
}
for name, entity = range list {
if err := db.AutoMigrate(entity).Error; err != nil {
log.Debugf("migrate: %s (waiting 1s)", err.Error())

View file

@ -0,0 +1,170 @@
package migrate
import (
"fmt"
"strings"
"github.com/jinzhu/gorm"
)
// ConvertDBMSAuthIDDataTypes applies the data type conversion for auth_id columns for SQLite
// Conversion is from VARBINARY(255) DEFAULT ” to TEXT NOT NULL COLLATE BINARY DEFAULT ”
// This can't be done in a script as the order of columns is not known, and is determined by the age of the database
func ConvertDBMSAuthIDDataTypes(db *gorm.DB) (err error) {
switch db.Dialect().GetName() {
case SQLite3:
// These create statements will get out of date, but that is ok, as the main migrate path will add any missing columns/indexes in later.
authSessionsCreate := `CREATE TABLE "auth_sessions" ("id" VARBINARY(2048),"user_uid" VARBINARY(42) DEFAULT '',"user_name" varchar(200),"client_uid" VARBINARY(42) DEFAULT '',"client_name" varchar(200) DEFAULT '',"client_ip" varchar(64),"auth_provider" VARBINARY(128) DEFAULT '',"auth_method" VARBINARY(128) DEFAULT '',"auth_issuer" VARBINARY(255) DEFAULT '',"auth_id" TEXT NOT NULL COLLATE BINARY DEFAULT '',"auth_scope" varchar(1024) DEFAULT '',"grant_type" VARBINARY(64) DEFAULT '',"last_active" bigint,"sess_expires" bigint,"sess_timeout" bigint,"preview_token" VARBINARY(64) DEFAULT '',"download_token" VARBINARY(64) DEFAULT '',"access_token" VARBINARY(4096) DEFAULT '',"refresh_token" VARBINARY(2048) DEFAULT '',"id_token" VARBINARY(2048) DEFAULT '',"user_agent" varchar(512),"data_json" VARBINARY(4096),"ref_id" VARBINARY(16) DEFAULT '',"login_ip" varchar(64),"login_at" datetime,"created_at" datetime,"updated_at" datetime , PRIMARY KEY ("id"))`
authUsersCreate := `CREATE TABLE "auth_users" ("id" integer primary key autoincrement,"user_uuid" VARBINARY(64),"user_uid" VARBINARY(42),"auth_provider" VARBINARY(128) DEFAULT '',"auth_method" VARBINARY(128) DEFAULT '',"auth_issuer" VARBINARY(255) DEFAULT '',"auth_id" TEXT NOT NULL COLLATE BINARY DEFAULT '',"user_name" varchar(200),"display_name" varchar(200),"user_email" varchar(255),"backup_email" varchar(255),"user_role" varchar(64) DEFAULT '',"user_scope" varchar(1024) DEFAULT '*',"user_attr" varchar(1024) DEFAULT '',"super_admin" bool,"can_login" bool,"login_at" datetime,"expires_at" datetime,"webdav" bool,"base_path" VARBINARY(1024),"upload_path" VARBINARY(1024),"can_invite" bool,"invite_token" VARBINARY(64),"invited_by" varchar(64),"verify_token" VARBINARY(64),"verified_at" datetime,"consent_at" datetime,"born_at" datetime,"reset_token" VARBINARY(64),"preview_token" VARBINARY(64),"download_token" VARBINARY(64),"thumb" VARBINARY(128) DEFAULT '',"thumb_src" VARBINARY(8) DEFAULT '',"ref_id" VARBINARY(16),"created_at" datetime,"updated_at" datetime,"deleted_at" datetime )`
type resultIndex struct {
Name string
}
type pragmaTable struct {
Cid int
Name string
Type string
Notnull int
DfltValue string
Pk int
}
if !db.HasTable("auth_sessions") {
if err := db.Exec(authSessionsCreate).Error; err != nil {
return fmt.Errorf("migrate: error creating auth_sessions %w", err)
}
} else {
// Data Migration here, by rename, create new, data transfer, drop indexes
if err := db.Exec(`ALTER TABLE "auth_sessions" RENAME TO "migrate_auth_sessions"`).Error; err != nil {
return fmt.Errorf("migrate: error renaming auth_sessions %w", err)
}
if err := db.Exec(authSessionsCreate).Error; err != nil {
return fmt.Errorf("migrate: error creating auth_sessions %w", err)
}
// Get the columns of both old and new table, and find the columns that are in the old and new table
var oldPragmaColumns []pragmaTable
var newPragmaColumns []pragmaTable
oldColumns := make(map[string]bool)
if err := db.Raw("PRAGMA table_info(migrate_auth_sessions)").Scan(&oldPragmaColumns).Error; err != nil {
return fmt.Errorf("migrate: error getting column list for migrate_auth_sessions with %w", err)
}
for _, pragma := range oldPragmaColumns {
oldColumns[pragma.Name] = false
}
if err := db.Raw("PRAGMA table_info(auth_sessions)").Scan(&newPragmaColumns).Error; err != nil {
return fmt.Errorf("migrate: error getting column list for auth_sessions with %w", err)
}
for _, pragma := range newPragmaColumns {
if _, present := oldColumns[pragma.Name]; present {
oldColumns[pragma.Name] = true
}
}
// Build the select into statement
var columns []string
for key, value := range oldColumns {
if value {
columns = append(columns, key)
}
}
populateStmt := fmt.Sprintf("INSERT INTO auth_sessions (%s) SELECT %s FROM migrate_auth_sessions", strings.Join(columns, ", "), strings.Join(columns, ", "))
if err := db.Exec(populateStmt).Error; err != nil {
return fmt.Errorf("migrate: error migrating with stmt %s with %w", populateStmt, err)
}
var indexes []resultIndex
if err := db.Raw("SELECT name FROM sqlite_master WHERE type = 'index' AND tbl_name = ? AND sql IS NOT NULL", "migrate_auth_sessions").Scan(&indexes).Error; err != nil {
return fmt.Errorf("migrate: error getting index list %w", err)
}
for _, index := range indexes {
dropStatement := fmt.Sprintf(`DROP INDEX IF EXISTS "%s"`, index.Name)
if err := db.Exec(dropStatement).Error; err != nil {
return fmt.Errorf("migrate: error dropping index %s was %w", index.Name, err)
}
}
}
if !db.HasTable("auth_users") {
if err := db.Exec(authUsersCreate).Error; err != nil {
return fmt.Errorf("migrate: error creating auth_users %w", err)
}
} else {
// Data Migration here, by rename, create new, data transfer, drop indexes
if err := db.Exec(`ALTER TABLE "auth_users" RENAME TO "migrate_auth_users"`).Error; err != nil {
return fmt.Errorf("migrate: error renaming auth_users %w", err)
}
if err := db.Exec(authUsersCreate).Error; err != nil {
return fmt.Errorf("migrate: error creating auth_users %w", err)
}
// Get the columns of both old and new table, and find the columns that are in the old and new table
var oldPragmaColumns []pragmaTable
var newPragmaColumns []pragmaTable
oldColumns := make(map[string]bool)
if err := db.Raw("PRAGMA table_info(migrate_auth_users)").Scan(&oldPragmaColumns).Error; err != nil {
return fmt.Errorf("migrate: error getting column list for migrate_auth_users with %w", err)
}
for _, pragma := range oldPragmaColumns {
oldColumns[pragma.Name] = false
}
if err := db.Raw("PRAGMA table_info(auth_users)").Scan(&newPragmaColumns).Error; err != nil {
return fmt.Errorf("migrate: error getting column list for auth_users with %w", err)
}
for _, pragma := range newPragmaColumns {
if _, present := oldColumns[pragma.Name]; present {
oldColumns[pragma.Name] = true
}
}
// Build the select into statement
var columns []string
for key, value := range oldColumns {
if value {
columns = append(columns, key)
}
}
populateStmt := fmt.Sprintf("INSERT INTO auth_users (%s) SELECT %s FROM migrate_auth_users", strings.Join(columns, ", "), strings.Join(columns, ", "))
if err := db.Exec(populateStmt).Error; err != nil {
return fmt.Errorf("migrate: error migrating with stmt %s with %w", populateStmt, err)
}
var indexes []resultIndex
if err := db.Raw("SELECT name FROM sqlite_master WHERE type = 'index' AND tbl_name = ? AND sql IS NOT NULL", "migrate_auth_users").Scan(&indexes).Error; err != nil {
return fmt.Errorf("migrate: error getting index list %w", err)
}
for _, index := range indexes {
dropStatement := fmt.Sprintf(`DROP INDEX IF EXISTS "%s"`, index.Name)
if err := db.Exec(dropStatement).Error; err != nil {
return fmt.Errorf("migrate: error dropping index %s was %w", index.Name, err)
}
}
}
// case MySQL: // MySQL
// Nothing required for Gorm V1. Statements left in comments for Gorm V2 implementation.
// // These create statements will get out of date, but that is ok, as the main migrate path will add any missing columns/indexes in later.
// authSessionsCreate := "CREATE TABLE `auth_sessions` (`id` VARBINARY(2048),`user_uid` VARBINARY(42) DEFAULT '',`user_name` varchar(200),`client_uid` VARBINARY(42) DEFAULT '',`client_name` varchar(200) DEFAULT '',`client_ip` varchar(64),`auth_provider` VARBINARY(128) DEFAULT '',`auth_method` VARBINARY(128) DEFAULT '',`auth_issuer` VARBINARY(255) DEFAULT '',`auth_id` VARBINARY(255) DEFAULT '',`auth_scope` varchar(1024) DEFAULT '',`grant_type` VARBINARY(64) DEFAULT '',`last_active` bigint,`sess_expires` bigint,`sess_timeout` bigint,`preview_token` VARBINARY(64) DEFAULT '',`download_token` VARBINARY(64) DEFAULT '',`access_token` VARBINARY(4096) DEFAULT '',`refresh_token` VARBINARY(2048) DEFAULT '',`id_token` VARBINARY(2048) DEFAULT '',`user_agent` varchar(512),`data_json` VARBINARY(4096),`ref_id` VARBINARY(16) DEFAULT '',`login_ip` varchar(64),`login_at` DATETIME NULL,`created_at` DATETIME NULL,`updated_at` DATETIME NULL , PRIMARY KEY (`id`))"
// authUsersCreate := "CREATE TABLE `auth_users` (`id` int AUTO_INCREMENT,`user_uuid` VARBINARY(64),`user_uid` VARBINARY(42),`auth_provider` VARBINARY(128) DEFAULT '',`auth_method` VARBINARY(128) DEFAULT '',`auth_issuer` VARBINARY(255) DEFAULT '',`auth_id` VARBINARY(255) DEFAULT '',`user_name` varchar(200),`display_name` varchar(200),`user_email` varchar(255),`backup_email` varchar(255),`user_role` varchar(64) DEFAULT '',`user_scope` varchar(1024) DEFAULT '*',`user_attr` varchar(1024) DEFAULT '',`super_admin` boolean,`can_login` boolean,`login_at` DATETIME NULL,`expires_at` DATETIME NULL,`webdav` boolean,`base_path` VARBINARY(1024),`upload_path` VARBINARY(1024),`can_invite` boolean,`invite_token` VARBINARY(64),`invited_by` varchar(64),`verify_token` VARBINARY(64),`verified_at` DATETIME NULL,`consent_at` DATETIME NULL,`born_at` DATETIME NULL,`reset_token` VARBINARY(64),`preview_token` VARBINARY(64),`download_token` VARBINARY(64),`thumb` VARBINARY(128) DEFAULT '',`thumb_src` VARBINARY(8) DEFAULT '',`ref_id` VARBINARY(16),`created_at` DATETIME NULL,`updated_at` DATETIME NULL,`deleted_at` DATETIME NULL , PRIMARY KEY (`id`))"
// if !db.HasTable("auth_sessions") {
// if err := db.Exec(authSessionsCreate).Error; err != nil {
// return fmt.Errorf("migrate: error creating auth_sessions %w", err)
// }
// }
// if !db.HasTable("auth_users") {
// if err := db.Exec(authUsersCreate).Error; err != nil {
// return fmt.Errorf("migrate: error creating auth_users %w", err)
// }
// }
// // There are no migration needs for MariaDB as the structure is not being manipulated.
// case Postgres:
// Nothing required for Gorm V1
default:
}
return nil
}