0
Fork 0
mirror of https://github.com/project-zot/zot.git synced 2024-12-16 21:56:37 -05:00

fix: work around AWS S3 limits (#2629)

Fixes issue #2627

We get/put metadata in dynamodb and it appears there are limits enforced
by AWS S3 APIs.

https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchGetItem.html

If you request more than 100 items, BatchGetItem returns a
ValidationException with the message "Too many items requested for the
BatchGetItem call."

Signed-off-by: Ramkumar Chinchani <rchincha@cisco.com>
This commit is contained in:
Ramkumar Chinchani 2024-09-12 08:26:43 -07:00 committed by GitHub
parent 58c9c9c29b
commit 98c8e2801c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1970,12 +1970,22 @@ func (dwr DynamoDB) DeleteUserData(ctx context.Context) error {
return err
}
const AwsS3BatchLimit = 100
func (dwr *DynamoDB) fetchImageMetaAttributesByDigest(ctx context.Context, digests []string,
) ([]map[string]types.AttributeValue, error) {
// AWS S3 as a limit (=100) on number of keys that can retrieved in one
// request, so break it up
batchedResp := []map[string]types.AttributeValue{}
for start := 0; start < len(digests); {
size := min(len(digests)-start, AwsS3BatchLimit)
end := start + size
resp, err := dwr.Client.BatchGetItem(ctx, &dynamodb.BatchGetItemInput{
RequestItems: map[string]types.KeysAndAttributes{
dwr.ImageMetaTablename: {
Keys: getBatchImageKeys(digests),
Keys: getBatchImageKeys(digests[start:end]),
},
},
})
@ -1983,11 +1993,15 @@ func (dwr *DynamoDB) fetchImageMetaAttributesByDigest(ctx context.Context, diges
return nil, err
}
if len(resp.Responses[dwr.ImageMetaTablename]) != len(digests) {
if len(resp.Responses[dwr.ImageMetaTablename]) != size {
return nil, zerr.ErrImageMetaNotFound
}
return resp.Responses[dwr.ImageMetaTablename], nil
batchedResp = append(batchedResp, resp.Responses[dwr.ImageMetaTablename]...)
start = end
}
return batchedResp, nil
}
func getBatchImageKeys(digests []string) []map[string]types.AttributeValue {