mirror of
https://github.com/project-zot/zot.git
synced 2024-12-16 21:56:37 -05:00
Update to graphql 1.17.13
We encountered some problems with using the existing folder structure, but it looks like running the tooling with the latest versions works after we regenerated the project using 'gql init' and refactoring to separate the login previously in resolvers.go. - the autogenerated code is now under the gql_generated folder - the file resolvers.go now contains only the code which is not rewritten by the gqlgen framework - the file schema.resolvers.go is rewritten when gqlgen runs, and we'll only keep there the actual resolvers matching query names Changes we observed to schema.resolvers.go when gqlgen runs include reordering methods, and renaming function parameters to match the names used in schema.graphql - we now have a gqlgen.yaml config file which governs the behavior of gqlgen (can be tweaked to restructure the folder structure of the generated code in the future) Looks like the new graphql server has better validation 1 Returns 422 instead of 200 for missing query string - had to update tests 2 Correctly uncovered an error in a test for a bad `%` in query string. As as result of 2, a `masked` bug was found in the way we check if images are signed with Notary, the signatures were reasched for with the media type of the image manifest itself instead of the media type for notation. Fixed this bug, and improved error messages. This bug would have also been reproducible with main branch if the bad `%` in the test would have fixed. Updated the linter to ignore some issues with the code which is always rewritten when running: `go run github.com/99designs/gqlgen@v0.17.13 generate` Add a workflow to test gqlgen works and has no uncommitted changes Signed-off-by: Andrei Aaron <andaaron@cisco.com>
This commit is contained in:
parent
76b811b029
commit
43160dcc43
17 changed files with 3230 additions and 1601 deletions
36
.github/workflows/gqlgen.yaml
vendored
Normal file
36
.github/workflows/gqlgen.yaml
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
name: "GQL generation"
|
||||
|
||||
# Validate gqlgen works
|
||||
# Validate there are no uncommitted changes after running gqlgen
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches: [main]
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
gqlgen:
|
||||
name: Check GQL generation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.18.x
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE
|
||||
go mod download
|
||||
- name: Run gqlgen
|
||||
run: |
|
||||
make gqlgen
|
||||
- name: Verify uncommitted files
|
||||
run: |
|
||||
make verify-gql-committed
|
15
Makefile
15
Makefile
|
@ -198,6 +198,21 @@ verify-config-commited: _verify-config
|
|||
exit 1;\
|
||||
fi; \
|
||||
|
||||
.PHONY: gqlgen
|
||||
gqlgen:
|
||||
cd pkg/extensions/search;\
|
||||
go run github.com/99designs/gqlgen version;\
|
||||
go run github.com/99designs/gqlgen generate
|
||||
|
||||
.PHONY: verify-gql-committed
|
||||
verify-gql-committed:
|
||||
$(eval UNCOMMITED_FILES = $(shell git status --porcelain | grep -c extensions/search))
|
||||
@if [ $(UNCOMMITED_FILES) != 0 ]; then \
|
||||
echo "Updated gql files uncommitted, make sure all gql files are committed:";\
|
||||
git status;\
|
||||
exit 1;\
|
||||
fi; \
|
||||
|
||||
.PHONY: binary-container
|
||||
binary-container:
|
||||
${CONTAINER_RUNTIME} build ${BUILD_ARGS} -f Dockerfile -t zot-build:latest .
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
ignore:
|
||||
- "*_gen.go"
|
||||
- "./pkg/extensions/search/*_gen.go"
|
||||
- "./pkg/extensions/search/generated.go"
|
||||
- "./pkg/extensions/search/gql_generated"
|
||||
- "./pkg/extensions/minimal.go"
|
||||
- "./pkg/cli/minimal.go"
|
||||
- "./cmd/zb/*.go"
|
||||
|
|
4
go.mod
4
go.mod
|
@ -3,7 +3,7 @@ module zotregistry.io/zot
|
|||
go 1.18
|
||||
|
||||
require (
|
||||
github.com/99designs/gqlgen v0.17.2
|
||||
github.com/99designs/gqlgen v0.17.13
|
||||
github.com/Masterminds/semver v1.5.0
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751
|
||||
github.com/apex/log v1.9.0
|
||||
|
@ -47,7 +47,7 @@ require (
|
|||
github.com/stretchr/testify v1.8.0
|
||||
github.com/swaggo/swag v1.8.3
|
||||
github.com/urfave/cli/v2 v2.8.1
|
||||
github.com/vektah/gqlparser/v2 v2.4.5
|
||||
github.com/vektah/gqlparser/v2 v2.4.6
|
||||
go.etcd.io/bbolt v1.3.6
|
||||
golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e
|
||||
gopkg.in/resty.v1 v1.12.0
|
||||
|
|
13
go.sum
13
go.sum
|
@ -111,8 +111,8 @@ filippo.io/edwards25519 v1.0.0-rc.1/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7
|
|||
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
||||
git.apache.org/thrift.git v0.12.0/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
||||
github.com/14rcole/gopopulate v0.0.0-20180821133914-b175b219e774/go.mod h1:6/0dYRLLXyJjbkIPeeGyoJ/eKOSI0eU6eTlCBYibgd0=
|
||||
github.com/99designs/gqlgen v0.17.2 h1:yczvlwMsfcVu/JtejqfrLwXuSP0yZFhmcss3caEvHw8=
|
||||
github.com/99designs/gqlgen v0.17.2/go.mod h1:K5fzLKwtph+FFgh9j7nFbRUdBKvTcGnsta51fsMTn3o=
|
||||
github.com/99designs/gqlgen v0.17.13 h1:ETUEqvRg5Zvr1lXtpoRdj026fzVay0ZlJPwI33qXLIw=
|
||||
github.com/99designs/gqlgen v0.17.13/go.mod h1:w1brbeOdqVyNJI553BGwtwdVcYu1LKeYE1opLWN9RgQ=
|
||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20210401092550-0a8691dafd0d/go.mod h1:CzsSbkDixRphAF5hS6wbMKq0eI6ccJRb7/A0M6JBnwg=
|
||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20210715213245-6c3934b029d8/go.mod h1:CzsSbkDixRphAF5hS6wbMKq0eI6ccJRb7/A0M6JBnwg=
|
||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20211102141018-f7be0cbad29c h1:9K6I0yCgGSneuHCoIlJl0O09UjqqWduCwd+ZL1nHFWc=
|
||||
|
@ -323,7 +323,6 @@ github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki
|
|||
github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE=
|
||||
github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
||||
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
|
||||
github.com/agnivade/levenshtein v1.1.0/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
|
||||
github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8=
|
||||
github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
|
||||
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
|
||||
|
@ -1773,7 +1772,7 @@ github.com/masahiro331/go-mvn-version v0.0.0-20210429150710-d3157d602a08/go.mod
|
|||
github.com/matoous/godox v0.0.0-20190911065817-5d6d842e92eb/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s=
|
||||
github.com/matryer/is v1.2.0 h1:92UTHpy8CDwaJ08GqLDzhhuixiBUUD1p3AU6PHddz4A=
|
||||
github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA=
|
||||
github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE=
|
||||
github.com/matryer/moq v0.2.7/go.mod h1:kITsx543GOENm48TUAQyJ9+SAvFSr7iGQXPoth/VUBk=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
|
||||
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||
|
@ -1856,7 +1855,6 @@ github.com/mitchellh/hashstructure v1.0.0/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1D
|
|||
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
|
||||
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.2.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/mapstructure v1.3.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
|
@ -2463,9 +2461,9 @@ github.com/vbauerster/mpb/v7 v7.4.1 h1:NhLMWQ3gNg2KJR8oeA9lO8Xvq+eNPmixDmB6JEQOU
|
|||
github.com/vbauerster/mpb/v7 v7.4.1/go.mod h1:Ygg2mV9Vj9sQBWqsK2m2pidcf9H3s6bNKtqd3/M4gBo=
|
||||
github.com/vdemeester/k8s-pkg-credentialprovider v1.17.4/go.mod h1:inCTmtUdr5KJbreVojo06krnTgaeAz/Z7lynpPk/Q2c=
|
||||
github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw=
|
||||
github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
|
||||
github.com/vektah/gqlparser/v2 v2.4.5 h1:C02NsyEsL4TXJB7ndonqTfuQOL4XPIu0aAWugdmTgmc=
|
||||
github.com/vektah/gqlparser/v2 v2.4.5/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
|
||||
github.com/vektah/gqlparser/v2 v2.4.6 h1:Yjzp66g6oVq93Jihbi0qhGnf/6zIWjcm8H6gA27zstE=
|
||||
github.com/vektah/gqlparser/v2 v2.4.6/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
|
||||
github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk=
|
||||
github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=
|
||||
github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho=
|
||||
|
@ -3220,7 +3218,6 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY
|
|||
golang.org/x/tools v0.0.0-20200717024301-6ddee64345a6/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200815165600-90abf76919f3/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=
|
||||
golang.org/x/tools v0.0.0-20200916195026-c9a70fc28ce3/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU=
|
||||
|
|
|
@ -60,3 +60,10 @@ linters-settings:
|
|||
- go.etcd.io/etcd/v3
|
||||
- go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc
|
||||
- go.opentelemetry.io/otel/exporters/otlp/otlptrace
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
- path: pkg/extensions/search/schema.resolvers.go
|
||||
linters:
|
||||
- lll
|
||||
- varnamelen
|
|
@ -322,7 +322,7 @@ func TestServerCVEResponse(t *testing.T) {
|
|||
// wait till ready
|
||||
for {
|
||||
res, err := resty.R().Get(url + constants.ExtSearchPrefix)
|
||||
if err == nil && res.StatusCode() == 200 {
|
||||
if err == nil && res.StatusCode() == 422 {
|
||||
break
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ import (
|
|||
"zotregistry.io/zot/pkg/api/constants"
|
||||
"zotregistry.io/zot/pkg/extensions/search"
|
||||
cveinfo "zotregistry.io/zot/pkg/extensions/search/cve"
|
||||
"zotregistry.io/zot/pkg/extensions/search/gql_generated"
|
||||
"zotregistry.io/zot/pkg/log"
|
||||
"zotregistry.io/zot/pkg/storage"
|
||||
)
|
||||
|
@ -62,7 +63,7 @@ func SetupSearchRoutes(config *config.Config, router *mux.Router, storeControlle
|
|||
log.Info().Msg("setting up search routes")
|
||||
|
||||
if config.Extensions.Search != nil && *config.Extensions.Search.Enable {
|
||||
var resConfig search.Config
|
||||
var resConfig gql_generated.Config
|
||||
|
||||
if config.Extensions.Search.CVE != nil {
|
||||
resConfig = search.GetResolverConfig(log, storeController, true)
|
||||
|
@ -71,7 +72,7 @@ func SetupSearchRoutes(config *config.Config, router *mux.Router, storeControlle
|
|||
}
|
||||
|
||||
router.PathPrefix(constants.ExtSearchPrefix).Methods("OPTIONS", "GET", "POST").
|
||||
Handler(gqlHandler.NewDefaultServer(search.NewExecutableSchema(resConfig)))
|
||||
Handler(gqlHandler.NewDefaultServer(gql_generated.NewExecutableSchema(resConfig)))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -163,7 +163,7 @@ func signUsingNotary(port string) error {
|
|||
}
|
||||
|
||||
// sign the image
|
||||
image := fmt.Sprintf("localhost:%s/%s:%s", port, "zot-cve-test", "0.0.1")
|
||||
image := fmt.Sprintf("localhost:%s/%s:%s", port, "zot-test", "0.0.1")
|
||||
|
||||
cmd = exec.Command("notation", "sign", "--key", "notation-sign-test", "--plain-http", image)
|
||||
|
||||
|
@ -311,7 +311,7 @@ func TestLatestTagSearchHTTP(t *testing.T) {
|
|||
resp, err = resty.R().Get(baseURL + graphqlQueryPrefix)
|
||||
So(resp, ShouldNotBeNil)
|
||||
So(err, ShouldBeNil)
|
||||
So(resp.StatusCode(), ShouldEqual, 200)
|
||||
So(resp.StatusCode(), ShouldEqual, 422)
|
||||
|
||||
resp, err = resty.R().Get(baseURL + graphqlQueryPrefix + "?query={ImageListWithLatestTag(){Name%20Latest}}")
|
||||
So(resp, ShouldNotBeNil)
|
||||
|
@ -451,7 +451,7 @@ func TestExpandedRepoInfo(t *testing.T) {
|
|||
resp, err = resty.R().Get(baseURL + graphqlQueryPrefix)
|
||||
So(resp, ShouldNotBeNil)
|
||||
So(err, ShouldBeNil)
|
||||
So(resp.StatusCode(), ShouldEqual, 200)
|
||||
So(resp.StatusCode(), ShouldEqual, 422)
|
||||
|
||||
query := "{ExpandedRepoInfo(repo:\"zot-cve-test\"){Manifests%20{Digest%20IsSigned%20Tag%20Layers%20{Size%20Digest}}}}"
|
||||
|
||||
|
@ -466,11 +466,14 @@ func TestExpandedRepoInfo(t *testing.T) {
|
|||
So(err, ShouldBeNil)
|
||||
So(len(responseStruct.ExpandedRepoInfo.RepoInfo.Manifests), ShouldNotEqual, 0)
|
||||
So(len(responseStruct.ExpandedRepoInfo.RepoInfo.Manifests[0].Layers), ShouldNotEqual, 0)
|
||||
found := false
|
||||
for _, m := range responseStruct.ExpandedRepoInfo.RepoInfo.Manifests {
|
||||
if m.Digest == "63a795ca90aa6e7cca60941e826810a4cd0a2e73ea02bf458241df2a5c973e29" {
|
||||
found = true
|
||||
So(m.IsSigned, ShouldEqual, false)
|
||||
}
|
||||
}
|
||||
So(found, ShouldEqual, true)
|
||||
|
||||
err = signUsingCosign(port)
|
||||
So(err, ShouldBeNil)
|
||||
|
@ -484,11 +487,14 @@ func TestExpandedRepoInfo(t *testing.T) {
|
|||
So(err, ShouldBeNil)
|
||||
So(len(responseStruct.ExpandedRepoInfo.RepoInfo.Manifests), ShouldNotEqual, 0)
|
||||
So(len(responseStruct.ExpandedRepoInfo.RepoInfo.Manifests[0].Layers), ShouldNotEqual, 0)
|
||||
found = false
|
||||
for _, m := range responseStruct.ExpandedRepoInfo.RepoInfo.Manifests {
|
||||
if m.Digest == "63a795ca90aa6e7cca60941e826810a4cd0a2e73ea02bf458241df2a5c973e29" {
|
||||
found = true
|
||||
So(m.IsSigned, ShouldEqual, true)
|
||||
}
|
||||
}
|
||||
So(found, ShouldEqual, true)
|
||||
|
||||
query = "{ExpandedRepoInfo(repo:\"\"){Manifests%20{Digest%20Tag%20IsSigned%20Layers%20{Size%20Digest}}}}"
|
||||
|
||||
|
@ -497,7 +503,7 @@ func TestExpandedRepoInfo(t *testing.T) {
|
|||
So(err, ShouldBeNil)
|
||||
So(resp.StatusCode(), ShouldEqual, 200)
|
||||
|
||||
query = "{ExpandedRepoInfo(repo:\"zot-test\"){Manifests%20{Digest%20Tag%20IsSigned%20%Layers%20{Size%20Digest}}}}"
|
||||
query = "{ExpandedRepoInfo(repo:\"zot-test\"){Manifests%20{Digest%20Tag%20IsSigned%20Layers%20{Size%20Digest}}}}"
|
||||
resp, err = resty.R().Get(baseURL + graphqlQueryPrefix + "?query=" + query)
|
||||
So(resp, ShouldNotBeNil)
|
||||
So(err, ShouldBeNil)
|
||||
|
@ -507,11 +513,14 @@ func TestExpandedRepoInfo(t *testing.T) {
|
|||
So(err, ShouldBeNil)
|
||||
So(len(responseStruct.ExpandedRepoInfo.RepoInfo.Manifests), ShouldNotEqual, 0)
|
||||
So(len(responseStruct.ExpandedRepoInfo.RepoInfo.Manifests[0].Layers), ShouldNotEqual, 0)
|
||||
found = false
|
||||
for _, m := range responseStruct.ExpandedRepoInfo.RepoInfo.Manifests {
|
||||
if m.Digest == "2bacca16b9df395fc855c14ccf50b12b58d35d468b8e7f25758aff90f89bf396" {
|
||||
found = true
|
||||
So(m.IsSigned, ShouldEqual, false)
|
||||
}
|
||||
}
|
||||
So(found, ShouldEqual, true)
|
||||
|
||||
err = signUsingNotary(port)
|
||||
So(err, ShouldBeNil)
|
||||
|
@ -525,11 +534,14 @@ func TestExpandedRepoInfo(t *testing.T) {
|
|||
So(err, ShouldBeNil)
|
||||
So(len(responseStruct.ExpandedRepoInfo.RepoInfo.Manifests), ShouldNotEqual, 0)
|
||||
So(len(responseStruct.ExpandedRepoInfo.RepoInfo.Manifests[0].Layers), ShouldNotEqual, 0)
|
||||
found = false
|
||||
for _, m := range responseStruct.ExpandedRepoInfo.RepoInfo.Manifests {
|
||||
if m.Digest == "2bacca16b9df395fc855c14ccf50b12b58d35d468b8e7f25758aff90f89bf396" {
|
||||
found = true
|
||||
So(m.IsSigned, ShouldEqual, true)
|
||||
}
|
||||
}
|
||||
So(found, ShouldEqual, true)
|
||||
|
||||
var manifestDigest digest.Digest
|
||||
manifestDigest, _, _ = GetOciLayoutDigests("../../../../test/data/zot-test")
|
||||
|
|
|
@ -12,6 +12,7 @@ import (
|
|||
|
||||
v1 "github.com/google/go-containerregistry/pkg/v1"
|
||||
"github.com/google/go-containerregistry/pkg/v1/types"
|
||||
notreg "github.com/notaryproject/notation/pkg/registry"
|
||||
godigest "github.com/opencontainers/go-digest"
|
||||
ispec "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"zotregistry.io/zot/errors"
|
||||
|
@ -202,13 +203,14 @@ func (olu OciLayoutUtils) GetImageTagsWithTimestamp(repo string) ([]TagInfo, err
|
|||
}
|
||||
|
||||
// check notary signature corresponding to repo name, manifest digest and mediatype.
|
||||
func (olu OciLayoutUtils) checkNotarySignature(name, digest, mediaType string) bool {
|
||||
func (olu OciLayoutUtils) checkNotarySignature(name string, digest godigest.Digest) bool {
|
||||
imageStore := olu.StoreController.GetImageStore(name)
|
||||
mediaType := notreg.ArtifactTypeNotation
|
||||
|
||||
_, err := imageStore.GetReferrers(name, digest, mediaType)
|
||||
_, err := imageStore.GetReferrers(name, digest.String(), mediaType)
|
||||
if err != nil {
|
||||
olu.Log.Info().Str("repo", name).Str("digest",
|
||||
digest).Str("mediatype", mediaType).Msg("invalid notary signature")
|
||||
olu.Log.Info().Err(err).Str("repo", name).Str("digest",
|
||||
digest.String()).Str("mediatype", mediaType).Msg("invalid notary signature")
|
||||
|
||||
return false
|
||||
}
|
||||
|
@ -217,17 +219,17 @@ func (olu OciLayoutUtils) checkNotarySignature(name, digest, mediaType string) b
|
|||
}
|
||||
|
||||
// check cosign signature corresponding to manifest.
|
||||
func (olu OciLayoutUtils) checkCosignSignature(name, digest string) bool {
|
||||
func (olu OciLayoutUtils) checkCosignSignature(name string, digest godigest.Digest) bool {
|
||||
imageStore := olu.StoreController.GetImageStore(name)
|
||||
|
||||
// if manifest is signed using cosign mechanism, cosign adds a new manifest.
|
||||
// new manifest is tagged as sha256-<manifest-digest>.sig.
|
||||
reference := fmt.Sprintf("sha256-%s.sig", digest)
|
||||
reference := fmt.Sprintf("sha256-%s.sig", digest.Encoded())
|
||||
|
||||
_, _, _, err := imageStore.GetImageManifest(name, reference) // nolint: dogsled
|
||||
if err != nil {
|
||||
olu.Log.Info().Str("repo", name).Str("digest",
|
||||
digest).Msg("invalid cosign signature")
|
||||
olu.Log.Info().Err(err).Str("repo", name).Str("digest",
|
||||
digest.String()).Msg("invalid cosign signature")
|
||||
|
||||
return false
|
||||
}
|
||||
|
@ -238,9 +240,9 @@ func (olu OciLayoutUtils) checkCosignSignature(name, digest string) bool {
|
|||
// checks if manifest is signed or not
|
||||
// checks for notary or cosign signature
|
||||
// if cosign signature found it does not looks for notary signature.
|
||||
func (olu OciLayoutUtils) checkManifestSignature(name, digest, mediaType string) bool {
|
||||
func (olu OciLayoutUtils) checkManifestSignature(name string, digest godigest.Digest) bool {
|
||||
if !olu.checkCosignSignature(name, digest) {
|
||||
return olu.checkNotarySignature(name, digest, mediaType)
|
||||
return olu.checkNotarySignature(name, digest)
|
||||
}
|
||||
|
||||
return true
|
||||
|
@ -279,7 +281,7 @@ func (olu OciLayoutUtils) GetExpandedRepoInfo(name string) (RepoInfo, error) {
|
|||
return RepoInfo{}, err
|
||||
}
|
||||
|
||||
manifestInfo.IsSigned = olu.checkManifestSignature(name, man.Digest.Encoded(), man.MediaType)
|
||||
manifestInfo.IsSigned = olu.checkManifestSignature(name, man.Digest)
|
||||
|
||||
layers := make([]Layer, 0)
|
||||
|
||||
|
|
|
@ -449,7 +449,7 @@ func TestCVESearch(t *testing.T) {
|
|||
|
||||
resp, _ = resty.R().SetBasicAuth(username, passphrase).Get(baseURL + constants.ExtSearchPrefix)
|
||||
So(resp, ShouldNotBeNil)
|
||||
So(resp.StatusCode(), ShouldEqual, 200)
|
||||
So(resp.StatusCode(), ShouldEqual, 422)
|
||||
|
||||
resp, _ = resty.R().SetBasicAuth(username, passphrase).Get(baseURL + constants.ExtSearchPrefix + "?query={CVEListForImage(image:\"zot-test:0.0.1\"){Tag%20CVEList{Id%20Description%20Severity%20PackageList{Name%20InstalledVersion%20FixedVersion}}}}")
|
||||
So(resp, ShouldNotBeNil)
|
||||
|
|
|
@ -194,7 +194,7 @@ func TestDigestSearchHTTP(t *testing.T) {
|
|||
resp, err = resty.R().Get(baseURL + constants.ExtSearchPrefix)
|
||||
So(resp, ShouldNotBeNil)
|
||||
So(err, ShouldBeNil)
|
||||
So(resp.StatusCode(), ShouldEqual, 200)
|
||||
So(resp.StatusCode(), ShouldEqual, 422)
|
||||
|
||||
// "sha" should match all digests in all images
|
||||
resp, err = resty.R().Get(baseURL + constants.ExtSearchPrefix +
|
||||
|
@ -347,7 +347,7 @@ func TestDigestSearchHTTPSubPaths(t *testing.T) {
|
|||
resp, err = resty.R().Get(baseURL + constants.ExtSearchPrefix)
|
||||
So(resp, ShouldNotBeNil)
|
||||
So(err, ShouldBeNil)
|
||||
So(resp.StatusCode(), ShouldEqual, 200)
|
||||
So(resp.StatusCode(), ShouldEqual, 422)
|
||||
|
||||
resp, err = resty.R().Get(baseURL + constants.ExtSearchPrefix +
|
||||
"?query={ImageListForDigest(id:\"sha\"){Name%20Tags}}")
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
|
||||
|
||||
package search
|
||||
package gql_generated
|
||||
|
||||
import (
|
||||
"time"
|
63
pkg/extensions/search/gqlgen.yml
Normal file
63
pkg/extensions/search/gqlgen.yml
Normal file
|
@ -0,0 +1,63 @@
|
|||
# Where are all the schema files located? globs are supported eg src/**/*.graphqls
|
||||
schema:
|
||||
- ./*.graphql
|
||||
|
||||
# Where should the generated server code go?
|
||||
exec:
|
||||
filename: gql_generated/generated.go
|
||||
package: gql_generated
|
||||
|
||||
# Uncomment to enable federation
|
||||
# federation:
|
||||
# filename: graph/gql_generated/federation.go
|
||||
# package: generated
|
||||
|
||||
# Where should any generated models go?
|
||||
model:
|
||||
filename: gql_generated/models_gen.go
|
||||
package: gql_generated
|
||||
|
||||
# Where should the resolver implementations go?
|
||||
resolver:
|
||||
layout: follow-schema
|
||||
dir: ./
|
||||
package: search
|
||||
|
||||
# Optional: turn on use ` + "`" + `gqlgen:"fieldName"` + "`" + ` tags in your models
|
||||
# struct_tag: json
|
||||
|
||||
# Optional: turn on to use []Thing instead of []*Thing
|
||||
# omit_slice_element_pointers: false
|
||||
|
||||
# Optional: turn off to make struct-type struct fields not use pointers
|
||||
# e.g. type Thing struct { FieldA OtherThing } instead of { FieldA *OtherThing }
|
||||
# struct_fields_always_pointers: true
|
||||
|
||||
# Optional: turn off to make resolvers return values instead of pointers for structs
|
||||
# resolvers_always_return_pointers: true
|
||||
|
||||
# Optional: set to speed up generation time by not performing a final validation pass.
|
||||
# skip_validation: true
|
||||
|
||||
# gqlgen will search for any type names in the schema in these go packages
|
||||
# if they match it will use them, otherwise it will generate them.
|
||||
autobind:
|
||||
# - "zotregistry.io/zot/pkg/extensions/search/gql_generated"
|
||||
|
||||
# This section declares type mapping between the GraphQL and go type systems
|
||||
#
|
||||
# The first line in each type will be used as defaults for resolver arguments and
|
||||
# modelgen, the others will be allowed when binding to fields. Configure them to
|
||||
# your liking
|
||||
models:
|
||||
ID:
|
||||
model:
|
||||
- github.com/99designs/gqlgen/graphql.ID
|
||||
- github.com/99designs/gqlgen/graphql.Int
|
||||
- github.com/99designs/gqlgen/graphql.Int64
|
||||
- github.com/99designs/gqlgen/graphql.Int32
|
||||
Int:
|
||||
model:
|
||||
- github.com/99designs/gqlgen/graphql.Int
|
||||
- github.com/99designs/gqlgen/graphql.Int64
|
||||
- github.com/99designs/gqlgen/graphql.Int32
|
|
@ -1,12 +1,11 @@
|
|||
package search
|
||||
|
||||
//go:generate go run github.com/99designs/gqlgen
|
||||
// This file will not be regenerated automatically.
|
||||
//
|
||||
// It serves as dependency injection for your app, add any dependencies you require here.
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
godigest "github.com/opencontainers/go-digest"
|
||||
"zotregistry.io/zot/pkg/log" // nolint: gci
|
||||
|
@ -14,6 +13,7 @@ import (
|
|||
"zotregistry.io/zot/pkg/extensions/search/common"
|
||||
cveinfo "zotregistry.io/zot/pkg/extensions/search/cve"
|
||||
digestinfo "zotregistry.io/zot/pkg/extensions/search/digest"
|
||||
"zotregistry.io/zot/pkg/extensions/search/gql_generated"
|
||||
"zotregistry.io/zot/pkg/storage"
|
||||
) // THIS CODE IS A STARTING POINT ONLY. IT WILL NOT BE UPDATED WITH SCHEMA CHANGES.
|
||||
|
||||
|
@ -25,22 +25,15 @@ type Resolver struct {
|
|||
log log.Logger
|
||||
}
|
||||
|
||||
// Query ...
|
||||
func (r *Resolver) Query() QueryResolver {
|
||||
return &queryResolver{r}
|
||||
}
|
||||
|
||||
type queryResolver struct{ *Resolver }
|
||||
|
||||
type cveDetail struct {
|
||||
Title string
|
||||
Description string
|
||||
Severity string
|
||||
PackageList []*PackageInfo
|
||||
PackageList []*gql_generated.PackageInfo
|
||||
}
|
||||
|
||||
// GetResolverConfig ...
|
||||
func GetResolverConfig(log log.Logger, storeController storage.StoreController, enableCVE bool) Config {
|
||||
func GetResolverConfig(log log.Logger, storeController storage.StoreController, enableCVE bool) gql_generated.Config {
|
||||
var cveInfo *cveinfo.CveInfo
|
||||
|
||||
var err error
|
||||
|
@ -56,200 +49,16 @@ func GetResolverConfig(log log.Logger, storeController storage.StoreController,
|
|||
|
||||
resConfig := &Resolver{cveInfo: cveInfo, storeController: storeController, digestInfo: digestInfo, log: log}
|
||||
|
||||
return Config{
|
||||
Resolvers: resConfig, Directives: DirectiveRoot{},
|
||||
Complexity: ComplexityRoot{},
|
||||
return gql_generated.Config{
|
||||
Resolvers: resConfig, Directives: gql_generated.DirectiveRoot{},
|
||||
Complexity: gql_generated.ComplexityRoot{},
|
||||
}
|
||||
}
|
||||
|
||||
func (r *queryResolver) ExpandedRepoInfo(ctx context.Context, name string) (*RepoInfo, error) {
|
||||
olu := common.NewOciLayoutUtils(r.storeController, r.log)
|
||||
|
||||
repo, err := olu.GetExpandedRepoInfo(name)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("error getting repos")
|
||||
|
||||
return &RepoInfo{}, err
|
||||
}
|
||||
|
||||
// repos type is of common deep copy this to search
|
||||
repoInfo := &RepoInfo{}
|
||||
|
||||
manifests := make([]*ManifestInfo, 0)
|
||||
|
||||
for _, manifest := range repo.Manifests {
|
||||
tag := manifest.Tag
|
||||
|
||||
digest := manifest.Digest
|
||||
|
||||
isSigned := manifest.IsSigned
|
||||
|
||||
manifestInfo := &ManifestInfo{Tag: &tag, Digest: &digest, IsSigned: &isSigned}
|
||||
|
||||
layers := make([]*LayerInfo, 0)
|
||||
|
||||
for _, l := range manifest.Layers {
|
||||
size := l.Size
|
||||
|
||||
digest := l.Digest
|
||||
|
||||
layerInfo := &LayerInfo{Digest: &digest, Size: &size}
|
||||
|
||||
layers = append(layers, layerInfo)
|
||||
}
|
||||
|
||||
manifestInfo.Layers = layers
|
||||
|
||||
manifests = append(manifests, manifestInfo)
|
||||
}
|
||||
|
||||
repoInfo.Manifests = manifests
|
||||
|
||||
return repoInfo, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) CVEListForImage(ctx context.Context, image string) (*CVEResultForImage, error) {
|
||||
trivyCtx := r.cveInfo.GetTrivyContext(image)
|
||||
|
||||
r.log.Info().Str("image", image).Msg("scanning image")
|
||||
|
||||
isValidImage, err := r.cveInfo.LayoutUtils.IsValidImageFormat(image)
|
||||
if !isValidImage {
|
||||
r.log.Debug().Str("image", image).Msg("image media type not supported for scanning")
|
||||
|
||||
return &CVEResultForImage{}, err
|
||||
}
|
||||
|
||||
report, err := cveinfo.ScanImage(trivyCtx.Ctx)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to scan image repository")
|
||||
|
||||
return &CVEResultForImage{}, err
|
||||
}
|
||||
|
||||
var copyImgTag string
|
||||
|
||||
if strings.Contains(image, ":") {
|
||||
copyImgTag = strings.Split(image, ":")[1]
|
||||
}
|
||||
|
||||
cveidMap := make(map[string]cveDetail)
|
||||
|
||||
for _, result := range report.Results {
|
||||
for _, vulnerability := range result.Vulnerabilities {
|
||||
pkgName := vulnerability.PkgName
|
||||
|
||||
installedVersion := vulnerability.InstalledVersion
|
||||
|
||||
var fixedVersion string
|
||||
if vulnerability.FixedVersion != "" {
|
||||
fixedVersion = vulnerability.FixedVersion
|
||||
} else {
|
||||
fixedVersion = "Not Specified"
|
||||
}
|
||||
|
||||
_, ok := cveidMap[vulnerability.VulnerabilityID]
|
||||
if ok {
|
||||
cveDetailStruct := cveidMap[vulnerability.VulnerabilityID]
|
||||
|
||||
pkgList := cveDetailStruct.PackageList
|
||||
|
||||
pkgList = append(pkgList,
|
||||
&PackageInfo{Name: &pkgName, InstalledVersion: &installedVersion, FixedVersion: &fixedVersion})
|
||||
|
||||
cveDetailStruct.PackageList = pkgList
|
||||
|
||||
cveidMap[vulnerability.VulnerabilityID] = cveDetailStruct
|
||||
} else {
|
||||
newPkgList := make([]*PackageInfo, 0)
|
||||
|
||||
newPkgList = append(newPkgList,
|
||||
&PackageInfo{Name: &pkgName, InstalledVersion: &installedVersion, FixedVersion: &fixedVersion})
|
||||
|
||||
cveidMap[vulnerability.VulnerabilityID] = cveDetail{
|
||||
Title: vulnerability.Title,
|
||||
Description: vulnerability.Description, Severity: vulnerability.Severity, PackageList: newPkgList,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cveids := []*Cve{}
|
||||
|
||||
for id, cveDetail := range cveidMap {
|
||||
vulID := id
|
||||
|
||||
desc := cveDetail.Description
|
||||
|
||||
title := cveDetail.Title
|
||||
|
||||
severity := cveDetail.Severity
|
||||
|
||||
pkgList := cveDetail.PackageList
|
||||
|
||||
cveids = append(cveids,
|
||||
&Cve{ID: &vulID, Title: &title, Description: &desc, Severity: &severity, PackageList: pkgList})
|
||||
}
|
||||
|
||||
return &CVEResultForImage{Tag: ©ImgTag, CVEList: cveids}, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ImageListForCve(ctx context.Context, cvid string) ([]*ImgResultForCve, error) {
|
||||
finalCveResult := []*ImgResultForCve{}
|
||||
|
||||
r.log.Info().Msg("extracting repositories")
|
||||
|
||||
defaultStore := r.storeController.DefaultStore
|
||||
|
||||
defaultTrivyCtx := r.cveInfo.CveTrivyController.DefaultCveConfig
|
||||
|
||||
repoList, err := defaultStore.GetRepositories()
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to search repositories")
|
||||
|
||||
return finalCveResult, err
|
||||
}
|
||||
|
||||
r.cveInfo.Log.Info().Msg("scanning each global repository")
|
||||
|
||||
cveResult, err := r.getImageListForCVE(repoList, cvid, defaultStore, defaultTrivyCtx)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("error getting cve list for global repositories")
|
||||
|
||||
return finalCveResult, err
|
||||
}
|
||||
|
||||
finalCveResult = append(finalCveResult, cveResult...)
|
||||
|
||||
subStore := r.storeController.SubStore
|
||||
|
||||
for route, store := range subStore {
|
||||
subRepoList, err := store.GetRepositories()
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to search repositories")
|
||||
|
||||
return cveResult, err
|
||||
}
|
||||
|
||||
subTrivyCtx := r.cveInfo.CveTrivyController.SubCveConfig[route]
|
||||
|
||||
subCveResult, err := r.getImageListForCVE(subRepoList, cvid, store, subTrivyCtx)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to get cve result for sub repositories")
|
||||
|
||||
return finalCveResult, err
|
||||
}
|
||||
|
||||
finalCveResult = append(finalCveResult, subCveResult...)
|
||||
}
|
||||
|
||||
return finalCveResult, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) getImageListForCVE(repoList []string, cvid string, imgStore storage.ImageStore,
|
||||
trivyCtx *cveinfo.TrivyCtx,
|
||||
) ([]*ImgResultForCve, error) {
|
||||
cveResult := []*ImgResultForCve{}
|
||||
) ([]*gql_generated.ImgResultForCve, error) {
|
||||
cveResult := []*gql_generated.ImgResultForCve{}
|
||||
|
||||
for _, repo := range repoList {
|
||||
r.log.Info().Str("repo", repo).Msg("extracting list of tags available in image repo")
|
||||
|
@ -264,142 +73,17 @@ func (r *queryResolver) getImageListForCVE(repoList []string, cvid string, imgSt
|
|||
}
|
||||
|
||||
if len(tags) != 0 {
|
||||
cveResult = append(cveResult, &ImgResultForCve{Name: &name, Tags: tags})
|
||||
cveResult = append(cveResult, &gql_generated.ImgResultForCve{Name: &name, Tags: tags})
|
||||
}
|
||||
}
|
||||
|
||||
return cveResult, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ImageListWithCVEFixed(ctx context.Context, cvid, image string) (*ImgResultForFixedCve, error) { // nolint: lll
|
||||
imgResultForFixedCVE := &ImgResultForFixedCve{}
|
||||
|
||||
r.log.Info().Str("image", image).Msg("extracting list of tags available in image")
|
||||
|
||||
tagsInfo, err := r.cveInfo.LayoutUtils.GetImageTagsWithTimestamp(image)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to read image tags")
|
||||
|
||||
return imgResultForFixedCVE, err
|
||||
}
|
||||
|
||||
infectedTags := make([]common.TagInfo, 0)
|
||||
|
||||
var hasCVE bool
|
||||
|
||||
for _, tag := range tagsInfo {
|
||||
image := fmt.Sprintf("%s:%s", image, tag.Name)
|
||||
|
||||
isValidImage, _ := r.cveInfo.LayoutUtils.IsValidImageFormat(image)
|
||||
if !isValidImage {
|
||||
r.log.Debug().Str("image",
|
||||
fmt.Sprintf("%s:%s", image, tag.Name)).
|
||||
Msg("image media type not supported for scanning, adding as an infected image")
|
||||
|
||||
infectedTags = append(infectedTags, common.TagInfo{Name: tag.Name, Timestamp: tag.Timestamp})
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
trivyCtx := r.cveInfo.GetTrivyContext(image)
|
||||
|
||||
r.cveInfo.Log.Info().Str("image", fmt.Sprintf("%s:%s", image, tag.Name)).Msg("scanning image")
|
||||
|
||||
report, err := cveinfo.ScanImage(trivyCtx.Ctx)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).
|
||||
Str("image", fmt.Sprintf("%s:%s", image, tag.Name)).Msg("unable to scan image")
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
hasCVE = false
|
||||
|
||||
for _, result := range report.Results {
|
||||
for _, vulnerability := range result.Vulnerabilities {
|
||||
if vulnerability.VulnerabilityID == cvid {
|
||||
hasCVE = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if hasCVE {
|
||||
infectedTags = append(infectedTags, common.TagInfo{Name: tag.Name, Timestamp: tag.Timestamp, Digest: tag.Digest})
|
||||
}
|
||||
}
|
||||
|
||||
var finalTagList []*TagInfo
|
||||
|
||||
if len(infectedTags) != 0 {
|
||||
r.log.Info().Msg("comparing fixed tags timestamp")
|
||||
|
||||
fixedTags := common.GetFixedTags(tagsInfo, infectedTags)
|
||||
|
||||
finalTagList = getGraphqlCompatibleTags(fixedTags)
|
||||
} else {
|
||||
r.log.Info().Str("image", image).Str("cve-id", cvid).Msg("image does not contain any tag that have given cve")
|
||||
|
||||
finalTagList = getGraphqlCompatibleTags(tagsInfo)
|
||||
}
|
||||
|
||||
imgResultForFixedCVE = &ImgResultForFixedCve{Tags: finalTagList}
|
||||
|
||||
return imgResultForFixedCVE, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ImageListForDigest(ctx context.Context, digestID string) ([]*ImgResultForDigest, error) {
|
||||
imgResultForDigest := []*ImgResultForDigest{}
|
||||
|
||||
r.log.Info().Msg("extracting repositories")
|
||||
|
||||
defaultStore := r.storeController.DefaultStore
|
||||
|
||||
repoList, err := defaultStore.GetRepositories()
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to search repositories")
|
||||
|
||||
return imgResultForDigest, err
|
||||
}
|
||||
|
||||
r.log.Info().Msg("scanning each global repository")
|
||||
|
||||
partialImgResultForDigest, err := r.getImageListForDigest(repoList, digestID)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to get image and tag list for global repositories")
|
||||
|
||||
return imgResultForDigest, err
|
||||
}
|
||||
|
||||
imgResultForDigest = append(imgResultForDigest, partialImgResultForDigest...)
|
||||
|
||||
subStore := r.storeController.SubStore
|
||||
for _, store := range subStore {
|
||||
subRepoList, err := store.GetRepositories()
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to search sub-repositories")
|
||||
|
||||
return imgResultForDigest, err
|
||||
}
|
||||
|
||||
partialImgResultForDigest, err = r.getImageListForDigest(subRepoList, digestID)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to get image and tag list for sub-repositories")
|
||||
|
||||
return imgResultForDigest, err
|
||||
}
|
||||
|
||||
imgResultForDigest = append(imgResultForDigest, partialImgResultForDigest...)
|
||||
}
|
||||
|
||||
return imgResultForDigest, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) getImageListForDigest(repoList []string,
|
||||
digest string,
|
||||
) ([]*ImgResultForDigest, error) {
|
||||
imgResultForDigest := []*ImgResultForDigest{}
|
||||
) ([]*gql_generated.ImgResultForDigest, error) {
|
||||
imgResultForDigest := []*gql_generated.ImgResultForDigest{}
|
||||
|
||||
var errResult error
|
||||
|
||||
|
@ -418,51 +102,15 @@ func (r *queryResolver) getImageListForDigest(repoList []string,
|
|||
if len(tags) != 0 {
|
||||
name := repo
|
||||
|
||||
imgResultForDigest = append(imgResultForDigest, &ImgResultForDigest{Name: &name, Tags: tags})
|
||||
imgResultForDigest = append(imgResultForDigest, &gql_generated.ImgResultForDigest{Name: &name, Tags: tags})
|
||||
}
|
||||
}
|
||||
|
||||
return imgResultForDigest, errResult
|
||||
}
|
||||
|
||||
func (r *queryResolver) ImageListWithLatestTag(ctx context.Context) ([]*ImageInfo, error) {
|
||||
r.log.Info().Msg("extension api: finding image list")
|
||||
|
||||
imageList := make([]*ImageInfo, 0)
|
||||
|
||||
defaultStore := r.storeController.DefaultStore
|
||||
|
||||
dsImageList, err := r.getImageListWithLatestTag(defaultStore)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("extension api: error extracting default store image list")
|
||||
|
||||
return imageList, err
|
||||
}
|
||||
|
||||
if len(dsImageList) != 0 {
|
||||
imageList = append(imageList, dsImageList...)
|
||||
}
|
||||
|
||||
subStore := r.storeController.SubStore
|
||||
|
||||
for _, store := range subStore {
|
||||
ssImageList, err := r.getImageListWithLatestTag(store)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("extension api: error extracting default store image list")
|
||||
|
||||
return imageList, err
|
||||
}
|
||||
|
||||
if len(ssImageList) != 0 {
|
||||
imageList = append(imageList, ssImageList...)
|
||||
}
|
||||
}
|
||||
|
||||
return imageList, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) getImageListWithLatestTag(store storage.ImageStore) ([]*ImageInfo, error) {
|
||||
results := make([]*ImageInfo, 0)
|
||||
func (r *queryResolver) getImageListWithLatestTag(store storage.ImageStore) ([]*gql_generated.ImageInfo, error) {
|
||||
results := make([]*gql_generated.ImageInfo, 0)
|
||||
|
||||
repoList, err := store.GetRepositories()
|
||||
if err != nil {
|
||||
|
@ -528,7 +176,7 @@ func (r *queryResolver) getImageListWithLatestTag(store storage.ImageStore) ([]*
|
|||
// Read categories
|
||||
categories := common.GetCategories(labels)
|
||||
|
||||
results = append(results, &ImageInfo{
|
||||
results = append(results, &gql_generated.ImageInfo{
|
||||
Name: &name, Latest: &latestTag.Name,
|
||||
Description: &desc, Licenses: &license, Vendor: &vendor,
|
||||
Labels: &categories, Size: &size, LastUpdated: &latestTag.Timestamp,
|
||||
|
@ -538,14 +186,14 @@ func (r *queryResolver) getImageListWithLatestTag(store storage.ImageStore) ([]*
|
|||
return results, nil
|
||||
}
|
||||
|
||||
func getGraphqlCompatibleTags(fixedTags []common.TagInfo) []*TagInfo {
|
||||
finalTagList := make([]*TagInfo, 0)
|
||||
func getGraphqlCompatibleTags(fixedTags []common.TagInfo) []*gql_generated.TagInfo {
|
||||
finalTagList := make([]*gql_generated.TagInfo, 0)
|
||||
|
||||
for _, tag := range fixedTags {
|
||||
fixTag := tag
|
||||
|
||||
finalTagList = append(finalTagList,
|
||||
&TagInfo{Name: &fixTag.Name, Digest: &fixTag.Digest, Timestamp: &fixTag.Timestamp})
|
||||
&gql_generated.TagInfo{Name: &fixTag.Name, Digest: &fixTag.Digest, Timestamp: &fixTag.Timestamp})
|
||||
}
|
||||
|
||||
return finalTagList
|
||||
|
|
370
pkg/extensions/search/schema.resolvers.go
Normal file
370
pkg/extensions/search/schema.resolvers.go
Normal file
|
@ -0,0 +1,370 @@
|
|||
package search
|
||||
|
||||
// This file will be automatically regenerated based on the schema, any resolver implementations
|
||||
// will be copied through when generating and any unknown code will be moved to the end.
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"zotregistry.io/zot/pkg/extensions/search/common"
|
||||
cveinfo "zotregistry.io/zot/pkg/extensions/search/cve"
|
||||
"zotregistry.io/zot/pkg/extensions/search/gql_generated"
|
||||
)
|
||||
|
||||
// CVEListForImage is the resolver for the CVEListForImage field.
|
||||
func (r *queryResolver) CVEListForImage(ctx context.Context, image string) (*gql_generated.CVEResultForImage, error) {
|
||||
trivyCtx := r.cveInfo.GetTrivyContext(image)
|
||||
|
||||
r.log.Info().Str("image", image).Msg("scanning image")
|
||||
|
||||
isValidImage, err := r.cveInfo.LayoutUtils.IsValidImageFormat(image)
|
||||
if !isValidImage {
|
||||
r.log.Debug().Str("image", image).Msg("image media type not supported for scanning")
|
||||
|
||||
return &gql_generated.CVEResultForImage{}, err
|
||||
}
|
||||
|
||||
report, err := cveinfo.ScanImage(trivyCtx.Ctx)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to scan image repository")
|
||||
|
||||
return &gql_generated.CVEResultForImage{}, err
|
||||
}
|
||||
|
||||
var copyImgTag string
|
||||
|
||||
if strings.Contains(image, ":") {
|
||||
copyImgTag = strings.Split(image, ":")[1]
|
||||
}
|
||||
|
||||
cveidMap := make(map[string]cveDetail)
|
||||
|
||||
for _, result := range report.Results {
|
||||
for _, vulnerability := range result.Vulnerabilities {
|
||||
pkgName := vulnerability.PkgName
|
||||
|
||||
installedVersion := vulnerability.InstalledVersion
|
||||
|
||||
var fixedVersion string
|
||||
if vulnerability.FixedVersion != "" {
|
||||
fixedVersion = vulnerability.FixedVersion
|
||||
} else {
|
||||
fixedVersion = "Not Specified"
|
||||
}
|
||||
|
||||
_, ok := cveidMap[vulnerability.VulnerabilityID]
|
||||
if ok {
|
||||
cveDetailStruct := cveidMap[vulnerability.VulnerabilityID]
|
||||
|
||||
pkgList := cveDetailStruct.PackageList
|
||||
|
||||
pkgList = append(pkgList,
|
||||
&gql_generated.PackageInfo{Name: &pkgName, InstalledVersion: &installedVersion, FixedVersion: &fixedVersion})
|
||||
|
||||
cveDetailStruct.PackageList = pkgList
|
||||
|
||||
cveidMap[vulnerability.VulnerabilityID] = cveDetailStruct
|
||||
} else {
|
||||
newPkgList := make([]*gql_generated.PackageInfo, 0)
|
||||
|
||||
newPkgList = append(newPkgList,
|
||||
&gql_generated.PackageInfo{Name: &pkgName, InstalledVersion: &installedVersion, FixedVersion: &fixedVersion})
|
||||
|
||||
cveidMap[vulnerability.VulnerabilityID] = cveDetail{
|
||||
Title: vulnerability.Title,
|
||||
Description: vulnerability.Description, Severity: vulnerability.Severity, PackageList: newPkgList,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cveids := []*gql_generated.Cve{}
|
||||
|
||||
for id, cveDetail := range cveidMap {
|
||||
vulID := id
|
||||
|
||||
desc := cveDetail.Description
|
||||
|
||||
title := cveDetail.Title
|
||||
|
||||
severity := cveDetail.Severity
|
||||
|
||||
pkgList := cveDetail.PackageList
|
||||
|
||||
cveids = append(cveids,
|
||||
&gql_generated.Cve{ID: &vulID, Title: &title, Description: &desc, Severity: &severity, PackageList: pkgList})
|
||||
}
|
||||
|
||||
return &gql_generated.CVEResultForImage{Tag: ©ImgTag, CVEList: cveids}, nil
|
||||
}
|
||||
|
||||
// ImageListForCve is the resolver for the ImageListForCVE field.
|
||||
func (r *queryResolver) ImageListForCve(ctx context.Context, id string) ([]*gql_generated.ImgResultForCve, error) {
|
||||
finalCveResult := []*gql_generated.ImgResultForCve{}
|
||||
|
||||
r.log.Info().Msg("extracting repositories")
|
||||
|
||||
defaultStore := r.storeController.DefaultStore
|
||||
|
||||
defaultTrivyCtx := r.cveInfo.CveTrivyController.DefaultCveConfig
|
||||
|
||||
repoList, err := defaultStore.GetRepositories()
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to search repositories")
|
||||
|
||||
return finalCveResult, err
|
||||
}
|
||||
|
||||
r.cveInfo.Log.Info().Msg("scanning each global repository")
|
||||
|
||||
cveResult, err := r.getImageListForCVE(repoList, id, defaultStore, defaultTrivyCtx)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("error getting cve list for global repositories")
|
||||
|
||||
return finalCveResult, err
|
||||
}
|
||||
|
||||
finalCveResult = append(finalCveResult, cveResult...)
|
||||
|
||||
subStore := r.storeController.SubStore
|
||||
|
||||
for route, store := range subStore {
|
||||
subRepoList, err := store.GetRepositories()
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to search repositories")
|
||||
|
||||
return cveResult, err
|
||||
}
|
||||
|
||||
subTrivyCtx := r.cveInfo.CveTrivyController.SubCveConfig[route]
|
||||
|
||||
subCveResult, err := r.getImageListForCVE(subRepoList, id, store, subTrivyCtx)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to get cve result for sub repositories")
|
||||
|
||||
return finalCveResult, err
|
||||
}
|
||||
|
||||
finalCveResult = append(finalCveResult, subCveResult...)
|
||||
}
|
||||
|
||||
return finalCveResult, nil
|
||||
}
|
||||
|
||||
// ImageListWithCVEFixed is the resolver for the ImageListWithCVEFixed field.
|
||||
func (r *queryResolver) ImageListWithCVEFixed(ctx context.Context, id string, image string) (*gql_generated.ImgResultForFixedCve, error) {
|
||||
imgResultForFixedCVE := &gql_generated.ImgResultForFixedCve{}
|
||||
|
||||
r.log.Info().Str("image", image).Msg("extracting list of tags available in image")
|
||||
|
||||
tagsInfo, err := r.cveInfo.LayoutUtils.GetImageTagsWithTimestamp(image)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to read image tags")
|
||||
|
||||
return imgResultForFixedCVE, err
|
||||
}
|
||||
|
||||
infectedTags := make([]common.TagInfo, 0)
|
||||
|
||||
var hasCVE bool
|
||||
|
||||
for _, tag := range tagsInfo {
|
||||
image := fmt.Sprintf("%s:%s", image, tag.Name)
|
||||
|
||||
isValidImage, _ := r.cveInfo.LayoutUtils.IsValidImageFormat(image)
|
||||
if !isValidImage {
|
||||
r.log.Debug().Str("image",
|
||||
fmt.Sprintf("%s:%s", image, tag.Name)).
|
||||
Msg("image media type not supported for scanning, adding as an infected image")
|
||||
|
||||
infectedTags = append(infectedTags, common.TagInfo{Name: tag.Name, Timestamp: tag.Timestamp})
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
trivyCtx := r.cveInfo.GetTrivyContext(image)
|
||||
|
||||
r.cveInfo.Log.Info().Str("image", fmt.Sprintf("%s:%s", image, tag.Name)).Msg("scanning image")
|
||||
|
||||
report, err := cveinfo.ScanImage(trivyCtx.Ctx)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).
|
||||
Str("image", fmt.Sprintf("%s:%s", image, tag.Name)).Msg("unable to scan image")
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
hasCVE = false
|
||||
|
||||
for _, result := range report.Results {
|
||||
for _, vulnerability := range result.Vulnerabilities {
|
||||
if vulnerability.VulnerabilityID == id {
|
||||
hasCVE = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if hasCVE {
|
||||
infectedTags = append(infectedTags, common.TagInfo{Name: tag.Name, Timestamp: tag.Timestamp, Digest: tag.Digest})
|
||||
}
|
||||
}
|
||||
|
||||
var finalTagList []*gql_generated.TagInfo
|
||||
|
||||
if len(infectedTags) != 0 {
|
||||
r.log.Info().Msg("comparing fixed tags timestamp")
|
||||
|
||||
fixedTags := common.GetFixedTags(tagsInfo, infectedTags)
|
||||
|
||||
finalTagList = getGraphqlCompatibleTags(fixedTags)
|
||||
} else {
|
||||
r.log.Info().Str("image", image).Str("cve-id", id).Msg("image does not contain any tag that have given cve")
|
||||
|
||||
finalTagList = getGraphqlCompatibleTags(tagsInfo)
|
||||
}
|
||||
|
||||
imgResultForFixedCVE = &gql_generated.ImgResultForFixedCve{Tags: finalTagList}
|
||||
|
||||
return imgResultForFixedCVE, nil
|
||||
}
|
||||
|
||||
// ImageListForDigest is the resolver for the ImageListForDigest field.
|
||||
func (r *queryResolver) ImageListForDigest(ctx context.Context, id string) ([]*gql_generated.ImgResultForDigest, error) {
|
||||
imgResultForDigest := []*gql_generated.ImgResultForDigest{}
|
||||
|
||||
r.log.Info().Msg("extracting repositories")
|
||||
|
||||
defaultStore := r.storeController.DefaultStore
|
||||
|
||||
repoList, err := defaultStore.GetRepositories()
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to search repositories")
|
||||
|
||||
return imgResultForDigest, err
|
||||
}
|
||||
|
||||
r.log.Info().Msg("scanning each global repository")
|
||||
|
||||
partialImgResultForDigest, err := r.getImageListForDigest(repoList, id)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to get image and tag list for global repositories")
|
||||
|
||||
return imgResultForDigest, err
|
||||
}
|
||||
|
||||
imgResultForDigest = append(imgResultForDigest, partialImgResultForDigest...)
|
||||
|
||||
subStore := r.storeController.SubStore
|
||||
for _, store := range subStore {
|
||||
subRepoList, err := store.GetRepositories()
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to search sub-repositories")
|
||||
|
||||
return imgResultForDigest, err
|
||||
}
|
||||
|
||||
partialImgResultForDigest, err = r.getImageListForDigest(subRepoList, id)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("unable to get image and tag list for sub-repositories")
|
||||
|
||||
return imgResultForDigest, err
|
||||
}
|
||||
|
||||
imgResultForDigest = append(imgResultForDigest, partialImgResultForDigest...)
|
||||
}
|
||||
|
||||
return imgResultForDigest, nil
|
||||
}
|
||||
|
||||
// ImageListWithLatestTag is the resolver for the ImageListWithLatestTag field.
|
||||
func (r *queryResolver) ImageListWithLatestTag(ctx context.Context) ([]*gql_generated.ImageInfo, error) {
|
||||
r.log.Info().Msg("extension api: finding image list")
|
||||
|
||||
imageList := make([]*gql_generated.ImageInfo, 0)
|
||||
|
||||
defaultStore := r.storeController.DefaultStore
|
||||
|
||||
dsImageList, err := r.getImageListWithLatestTag(defaultStore)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("extension api: error extracting default store image list")
|
||||
|
||||
return imageList, err
|
||||
}
|
||||
|
||||
if len(dsImageList) != 0 {
|
||||
imageList = append(imageList, dsImageList...)
|
||||
}
|
||||
|
||||
subStore := r.storeController.SubStore
|
||||
|
||||
for _, store := range subStore {
|
||||
ssImageList, err := r.getImageListWithLatestTag(store)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msg("extension api: error extracting default store image list")
|
||||
|
||||
return imageList, err
|
||||
}
|
||||
|
||||
if len(ssImageList) != 0 {
|
||||
imageList = append(imageList, ssImageList...)
|
||||
}
|
||||
}
|
||||
|
||||
return imageList, nil
|
||||
}
|
||||
|
||||
// ExpandedRepoInfo is the resolver for the ExpandedRepoInfo field.
|
||||
func (r *queryResolver) ExpandedRepoInfo(ctx context.Context, repo string) (*gql_generated.RepoInfo, error) {
|
||||
olu := common.NewOciLayoutUtils(r.storeController, r.log)
|
||||
|
||||
origRepoInfo, err := olu.GetExpandedRepoInfo(repo)
|
||||
if err != nil {
|
||||
r.log.Error().Err(err).Msgf("error getting repo '%s'", repo)
|
||||
|
||||
return &gql_generated.RepoInfo{}, err
|
||||
}
|
||||
|
||||
// repos type is of common deep copy this to search
|
||||
repoInfo := &gql_generated.RepoInfo{}
|
||||
|
||||
manifests := make([]*gql_generated.ManifestInfo, 0)
|
||||
|
||||
for _, manifest := range origRepoInfo.Manifests {
|
||||
tag := manifest.Tag
|
||||
|
||||
digest := manifest.Digest
|
||||
|
||||
isSigned := manifest.IsSigned
|
||||
|
||||
manifestInfo := &gql_generated.ManifestInfo{Tag: &tag, Digest: &digest, IsSigned: &isSigned}
|
||||
|
||||
layers := make([]*gql_generated.LayerInfo, 0)
|
||||
|
||||
for _, l := range manifest.Layers {
|
||||
size := l.Size
|
||||
|
||||
digest := l.Digest
|
||||
|
||||
layerInfo := &gql_generated.LayerInfo{Digest: &digest, Size: &size}
|
||||
|
||||
layers = append(layers, layerInfo)
|
||||
}
|
||||
|
||||
manifestInfo.Layers = layers
|
||||
|
||||
manifests = append(manifests, manifestInfo)
|
||||
}
|
||||
|
||||
repoInfo.Manifests = manifests
|
||||
|
||||
return repoInfo, nil
|
||||
}
|
||||
|
||||
// Query returns gql_generated.QueryResolver implementation.
|
||||
func (r *Resolver) Query() gql_generated.QueryResolver { return &queryResolver{r} }
|
||||
|
||||
type queryResolver struct{ *Resolver }
|
Loading…
Reference in a new issue