feat: 拉取key数据增加初始标记
This commit is contained in:
13
.gitignore
vendored
13
.gitignore
vendored
@@ -84,21 +84,10 @@ node_modules/
|
|||||||
.cache
|
.cache
|
||||||
.parcel-cache
|
.parcel-cache
|
||||||
|
|
||||||
# Next.js build output
|
|
||||||
.next
|
|
||||||
|
|
||||||
# Nuxt.js build / generate output
|
|
||||||
.nuxt
|
|
||||||
dist
|
|
||||||
|
|
||||||
# Gatsby files
|
# Gatsby files
|
||||||
.cache/
|
.cache/
|
||||||
public
|
public
|
||||||
|
|
||||||
# Storybook build outputs
|
|
||||||
.out
|
|
||||||
.storybook-out
|
|
||||||
|
|
||||||
# Temporary folders
|
# Temporary folders
|
||||||
tmp/
|
tmp/
|
||||||
temp/
|
temp/
|
||||||
@@ -226,5 +215,3 @@ charts/*.tgz
|
|||||||
run-local.sh
|
run-local.sh
|
||||||
start-local.sh
|
start-local.sh
|
||||||
dev-setup.sh
|
dev-setup.sh
|
||||||
|
|
||||||
cmd/gpt-load/dist
|
|
||||||
|
@@ -187,13 +187,26 @@ func (p *KeyProvider) handleFailure(keyID uint, keyHashKey, activeKeysListKey st
|
|||||||
|
|
||||||
// LoadKeysFromDB 从数据库加载所有分组和密钥,并填充到 Store 中。
|
// LoadKeysFromDB 从数据库加载所有分组和密钥,并填充到 Store 中。
|
||||||
func (p *KeyProvider) LoadKeysFromDB() error {
|
func (p *KeyProvider) LoadKeysFromDB() error {
|
||||||
|
initFlagKey := "initialization:db_keys_loaded"
|
||||||
|
|
||||||
|
exists, err := p.store.Exists(initFlagKey)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to check initialization flag: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
logrus.Debug("Keys have already been loaded into the store. Skipping.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
logrus.Debug("First time startup, loading keys from DB...")
|
||||||
|
|
||||||
// 1. 分批从数据库加载并使用 Pipeline 写入 Redis
|
// 1. 分批从数据库加载并使用 Pipeline 写入 Redis
|
||||||
allActiveKeyIDs := make(map[uint][]any)
|
allActiveKeyIDs := make(map[uint][]any)
|
||||||
batchSize := 1000
|
batchSize := 1000
|
||||||
var batchKeys []*models.APIKey
|
var batchKeys []*models.APIKey
|
||||||
|
|
||||||
err := p.db.Model(&models.APIKey{}).FindInBatches(&batchKeys, batchSize, func(tx *gorm.DB, batch int) error {
|
err = p.db.Model(&models.APIKey{}).FindInBatches(&batchKeys, batchSize, func(tx *gorm.DB, batch int) error {
|
||||||
logrus.Debugf("Processing batch %d with %d keys...", batch, len(batchKeys))
|
logrus.Debugf("Processing batch %d with %d keys...", batch, len(batchKeys))
|
||||||
|
|
||||||
var pipeline store.Pipeliner
|
var pipeline store.Pipeliner
|
||||||
@@ -242,6 +255,10 @@ func (p *KeyProvider) LoadKeysFromDB() error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := p.store.Set(initFlagKey, []byte("1"), 0); err != nil {
|
||||||
|
logrus.WithField("flagKey", initFlagKey).Error("Failed to set initialization flag after loading keys")
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user