0
Fork 0
mirror of https://codeberg.org/forgejo/forgejo.git synced 2024-12-22 07:13:02 -05:00

Add support for indexing arch files

This commit is contained in:
dragon 2024-12-06 14:51:49 +08:00
parent c287a15996
commit 37a300b105
5 changed files with 96 additions and 29 deletions

View file

@ -4,6 +4,7 @@
package arch
import (
"archive/tar"
"bufio"
"bytes"
"encoding/hex"
@ -25,7 +26,9 @@ import (
// https://man.archlinux.org/man/PKGBUILD.5
const (
PropertyDescription = "arch.description"
PropertyDescription = "arch.description"
PropertyFiles = "arch.files"
PropertyArch = "arch.architecture"
PropertyDistribution = "arch.distribution"
@ -85,6 +88,8 @@ type FileMetadata struct {
Packager string `json:"packager"`
Arch string `json:"arch"`
PgpSigned string `json:"pgp"`
Files []string `json:"files,omitempty"`
}
// ParsePackage Function that receives arch package archive data and returns it's metadata.
@ -127,6 +132,8 @@ func ParsePackage(r *packages.HashedBuffer) (*Package, error) {
var pkg *Package
var mTree bool
files := make([]string, 0)
for {
f, err := tarball.Read()
if err == io.EOF {
@ -135,6 +142,11 @@ func ParsePackage(r *packages.HashedBuffer) (*Package, error) {
if err != nil {
return nil, err
}
// ref:https://gitlab.archlinux.org/pacman/pacman/-/blob/91546004903eea5d5267d59898a6029ba1d64031/lib/libalpm/add.c#L529-L533
if !strings.HasPrefix(f.Name(), ".") {
files = append(files, (f.Header.(*tar.Header)).Name)
}
switch f.Name() {
case ".PKGINFO":
pkg, err = ParsePackageInfo(tarballType, f)
@ -155,7 +167,7 @@ func ParsePackage(r *packages.HashedBuffer) (*Package, error) {
if !mTree {
return nil, util.NewInvalidArgumentErrorf(".MTREE file not found")
}
pkg.FileMetadata.Files = files
pkg.FileMetadata.CompressedSize = r.Size()
pkg.FileMetadata.MD5 = hex.EncodeToString(md5)
pkg.FileMetadata.SHA256 = hex.EncodeToString(sha256)
@ -339,3 +351,12 @@ func (p *Package) Desc() string {
}
return buf.String()
}
func (p *Package) Files() string {
var buf bytes.Buffer
buf.WriteString("%FILES%\n")
for _, item := range p.FileMetadata.Files {
_, _ = fmt.Fprintf(&buf, "%s\n", item)
}
return buf.String()
}

View file

@ -344,8 +344,8 @@ func TestValidatePackageSpec(t *testing.T) {
})
}
func TestDescString(t *testing.T) {
const pkgdesc = `%FILENAME%
func TestDescAndFileString(t *testing.T) {
const pkgDesc = `%FILENAME%
zstd-1.5.5-1-x86_64.pkg.tar.zst
%NAME%
@ -415,6 +415,12 @@ ninja
dummy5
dummy6
`
const pkgFiles = `%FILES%
usr/
usr/bin/
usr/bin/zstd
`
md := &Package{
@ -441,7 +447,9 @@ dummy6
BuildDate: 1681646714,
Packager: "Jelle van der Waa <jelle@archlinux.org>",
Arch: "x86_64",
Files: []string{"usr/", "usr/bin/", "usr/bin/zstd"},
},
}
require.Equal(t, pkgdesc, md.Desc())
require.Equal(t, pkgDesc, md.Desc())
require.Equal(t, pkgFiles, md.Files())
}

View file

@ -26,7 +26,7 @@ import (
var (
archPkgOrSig = regexp.MustCompile(`^.*\.pkg\.tar\.\w+(\.sig)*$`)
archDBOrSig = regexp.MustCompile(`^.*.db(\.tar\.gz)*(\.sig)*$`)
archDBOrSig = regexp.MustCompile(`^.*.(db|files)(\.tar\.gz)*(\.sig)*$`)
locker = sync.NewExclusivePool()
)
@ -115,6 +115,7 @@ func PushPackage(ctx *context.Context) {
properties := map[string]string{
arch_module.PropertyDescription: p.Desc(),
arch_module.PropertyFiles: p.Files(),
arch_module.PropertyArch: p.FileMetadata.Arch,
arch_module.PropertyDistribution: group,
}

View file

@ -225,22 +225,44 @@ func createDB(ctx context.Context, ownerID int64, group, arch string) (*packages
if err != nil {
return nil, err
}
if len(pps) >= 1 {
meta := []byte(pps[0].Value)
if len(pps) == 0 {
continue
}
pkgDesc := []byte(pps[0].Value)
header := &tar.Header{
Name: pkg.Name + "-" + ver.Version + "/desc",
Size: int64(len(pkgDesc)),
Mode: int64(os.ModePerm),
}
if err = tw.WriteHeader(header); err != nil {
return nil, err
}
if _, err := tw.Write(pkgDesc); err != nil {
return nil, err
}
pfs, err := packages_model.GetPropertiesByName(
ctx, packages_model.PropertyTypeFile, pf.ID, arch_module.PropertyFiles,
)
if err != nil {
return nil, err
}
if len(pfs) >= 1 {
pkgFiles := []byte(pfs[0].Value)
header := &tar.Header{
Name: pkg.Name + "-" + ver.Version + "/desc",
Size: int64(len(meta)),
Name: pkg.Name + "-" + ver.Version + "/files",
Size: int64(len(pkgFiles)),
Mode: int64(os.ModePerm),
}
if err = tw.WriteHeader(header); err != nil {
return nil, err
}
if _, err := tw.Write(meta); err != nil {
if _, err := tw.Write(pkgFiles); err != nil {
return nil, err
}
count++
break
}
count++
break
}
}
if count == 0 {

View file

@ -223,8 +223,14 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
t.Run(fmt.Sprintf("RepositoryDB[%s]", group), func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
req := NewRequest(t, "GET", rootURL+"/repository.key")
respPub := MakeRequest(t, req, http.StatusOK)
req := NewRequest(t, "GET", groupURL+"/x86_64/base.db.tar.gz")
MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", groupURL+"/x86_64/base.files")
MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", groupURL+"/x86_64/base.files.tar.gz")
MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", groupURL+"/x86_64/base.db")
respPkg := MakeRequest(t, req, http.StatusOK)
@ -232,23 +238,32 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
req = NewRequest(t, "GET", groupURL+"/x86_64/base.db.sig")
respSig := MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", rootURL+"/repository.key")
respPub := MakeRequest(t, req, http.StatusOK)
if err := gpgVerify(respPub.Body.Bytes(), respSig.Body.Bytes(), respPkg.Body.Bytes()); err != nil {
t.Fatal(err)
}
files, err := listTarGzFiles(respPkg.Body.Bytes())
require.NoError(t, err)
require.Len(t, files, 1)
require.Len(t, files, 2)
for s, d := range files {
name := getProperty(string(d.Data), "NAME")
ver := getProperty(string(d.Data), "VERSION")
require.Equal(t, name+"-"+ver+"/desc", s)
fn := getProperty(string(d.Data), "FILENAME")
pgp := getProperty(string(d.Data), "PGPSIG")
req = NewRequest(t, "GET", groupURL+"/x86_64/"+fn+".sig")
respSig := MakeRequest(t, req, http.StatusOK)
decodeString, err := base64.StdEncoding.DecodeString(pgp)
require.NoError(t, err)
require.Equal(t, respSig.Body.Bytes(), decodeString)
if strings.HasSuffix(s, "/desc") {
name := getProperty(string(d.Data), "NAME")
ver := getProperty(string(d.Data), "VERSION")
require.Equal(t, name+"-"+ver+"/desc", s)
fn := getProperty(string(d.Data), "FILENAME")
pgp := getProperty(string(d.Data), "PGPSIG")
req = NewRequest(t, "GET", groupURL+"/x86_64/"+fn+".sig")
respSig := MakeRequest(t, req, http.StatusOK)
decodeString, err := base64.StdEncoding.DecodeString(pgp)
require.NoError(t, err)
require.Equal(t, respSig.Body.Bytes(), decodeString)
} else if strings.HasSuffix(s, "/files") {
require.True(t, strings.HasPrefix(string(d.Data), "%FILES%"))
} else {
require.Failf(t, "unknown item", "fileName:%s", s)
}
}
})
@ -275,7 +290,7 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
respPkg := MakeRequest(t, req, http.StatusOK)
files, err := listTarGzFiles(respPkg.Body.Bytes())
require.NoError(t, err)
require.Len(t, files, 1)
require.Len(t, files, 2)
req = NewRequestWithBody(t, "DELETE", groupURL+"/test2/1.0.0-1/any", nil).
AddBasicAuth(user.Name)
@ -347,7 +362,7 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
files, err := listTarGzFiles(respPkg.Body.Bytes())
require.NoError(t, err)
require.Len(t, files, 1)
require.Len(t, files, 2)
req = NewRequestWithBody(t, "PUT", rootURL, bytes.NewReader(pkgs["otherXZ"])).
AddBasicAuth(user.Name)
@ -358,7 +373,7 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
files, err = listTarGzFiles(respPkg.Body.Bytes())
require.NoError(t, err)
require.Len(t, files, 2)
require.Len(t, files, 4)
})
}