feat(cache): add analyzer versions (fanal#156)

* feat(cache): add analyzer versions

* feat(artifact): add analyzer versions

* feat(analyzer): add analyzer versions

* feat(analyzer): add Version()

* feat(yaml): add Version()

* fix(analyzer): use consts
This commit is contained in:
Teppei Fukuda
2021-02-24 07:25:01 +02:00
committed by GitHub
parent c813a60b6f
commit 059deda653
40 changed files with 630 additions and 153 deletions

View File

@@ -31,12 +31,14 @@ type AnalysisTarget struct {
type analyzer interface {
Type() Type
Version() int
Analyze(input AnalysisTarget) (*AnalysisResult, error)
Required(filePath string, info os.FileInfo) bool
}
type configAnalyzer interface {
Type() Type
Version() int
Analyze(targetOS types.OS, content []byte) ([]types.Package, error)
Required(osFound types.OS) bool
}
@@ -108,15 +110,55 @@ func (r *AnalysisResult) Merge(new *AnalysisResult) {
}
}
func AnalyzeFile(wg *sync.WaitGroup, result *AnalysisResult, filePath string, info os.FileInfo, opener Opener,
disabledAnalyzers []Type) error {
type Analyzer struct {
drivers []analyzer
configDrivers []configAnalyzer
}
func NewAnalyzer(disabledAnalyzers []Type) Analyzer {
var drivers []analyzer
for _, a := range analyzers {
if isDisabled(a.Type(), disabledAnalyzers) {
continue
}
drivers = append(drivers, a)
}
var configDrivers []configAnalyzer
for _, a := range configAnalyzers {
if isDisabled(a.Type(), disabledAnalyzers) {
continue
}
configDrivers = append(configDrivers, a)
}
return Analyzer{
drivers: drivers,
configDrivers: configDrivers,
}
}
func (a Analyzer) AnalyzerVersions() map[string]int {
versions := map[string]int{}
for _, d := range a.drivers {
versions[string(d.Type())] = d.Version()
}
return versions
}
func (a Analyzer) ImageConfigAnalyzerVersions() map[string]int {
versions := map[string]int{}
for _, d := range a.configDrivers {
versions[string(d.Type())] = d.Version()
}
return versions
}
func (a Analyzer) AnalyzeFile(wg *sync.WaitGroup, result *AnalysisResult, filePath string, info os.FileInfo,
opener Opener) error {
for _, d := range a.drivers {
// filepath extracted from tar file doesn't have the prefix "/"
if !a.Required(strings.TrimLeft(filePath, "/"), info) {
if !d.Required(strings.TrimLeft(filePath, "/"), info) {
continue
}
b, err := opener()
@@ -133,22 +175,18 @@ func AnalyzeFile(wg *sync.WaitGroup, result *AnalysisResult, filePath string, in
return
}
result.Merge(ret)
}(a, AnalysisTarget{FilePath: filePath, Content: b})
}(d, AnalysisTarget{FilePath: filePath, Content: b})
}
return nil
}
func AnalyzeConfig(targetOS types.OS, configBlob []byte, disabledAnalyzers []Type) []types.Package {
for _, a := range configAnalyzers {
if isDisabled(a.Type(), disabledAnalyzers) {
func (a Analyzer) AnalyzeImageConfig(targetOS types.OS, configBlob []byte) []types.Package {
for _, d := range a.configDrivers {
if !d.Required(targetOS) {
continue
}
if !a.Required(targetOS) {
continue
}
pkgs, err := a.Analyze(targetOS, configBlob)
pkgs, err := d.Analyze(targetOS, configBlob)
if err != nil {
continue
}

View File

@@ -338,8 +338,8 @@ func TestAnalyzeFile(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
var wg sync.WaitGroup
got := new(analyzer.AnalysisResult)
err := analyzer.AnalyzeFile(&wg, got, tt.args.filePath, tt.args.info, tt.args.opener,
tt.args.disabledAnalyzers)
a := analyzer.NewAnalyzer(tt.args.disabledAnalyzers)
err := a.AnalyzeFile(&wg, got, tt.args.filePath, tt.args.info, tt.args.opener)
wg.Wait()
if tt.wantErr != "" {
@@ -373,6 +373,10 @@ func (mockConfigAnalyzer) Type() analyzer.Type {
return "mock"
}
func (mockConfigAnalyzer) Version() int {
return 1
}
func TestAnalyzeConfig(t *testing.T) {
analyzer.RegisterConfigAnalyzer(mockConfigAnalyzer{})
@@ -422,7 +426,8 @@ func TestAnalyzeConfig(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := analyzer.AnalyzeConfig(tt.args.targetOS, tt.args.configBlob, tt.args.disabledAnalyzers)
a := analyzer.NewAnalyzer(tt.args.disabledAnalyzers)
got := a.AnalyzeImageConfig(tt.args.targetOS, tt.args.configBlob)
assert.Equal(t, tt.want, got)
})
}

View File

@@ -19,7 +19,10 @@ import (
"github.com/aquasecurity/fanal/types"
)
const envApkIndexArchiveURL = "FANAL_APK_INDEX_ARCHIVE_URL"
const (
envApkIndexArchiveURL = "FANAL_APK_INDEX_ARCHIVE_URL"
analyzerVersion = 1
)
var apkIndexArchiveURL = "https://raw.githubusercontent.com/knqyf263/apkIndex-archive/master/alpine/v%s/main/x86_64/history.json"
@@ -273,3 +276,7 @@ func (a alpineCmdAnalyzer) Required(targetOS types.OS) bool {
func (a alpineCmdAnalyzer) Type() analyzer.Type {
return analyzer.TypeApkCommand
}
func (a alpineCmdAnalyzer) Version() int {
return analyzerVersion
}

View File

@@ -18,9 +18,9 @@ func init() {
})
}
var (
requiredExts = []string{".yaml", ".yml"}
)
const version = 1
var requiredExts = []string{".yaml", ".yml"}
type yamlConfigAnalyzer struct {
parser *yaml.Parser
@@ -53,3 +53,7 @@ func (a yamlConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a yamlConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeYaml
}
func (a yamlConfigAnalyzer) Version() int {
return version
}

View File

@@ -16,6 +16,8 @@ func init() {
analyzer.RegisterAnalyzer(&bundlerLibraryAnalyzer{})
}
const version = 1
var (
requiredFiles = []string{"Gemfile.lock"}
)
@@ -38,3 +40,7 @@ func (a bundlerLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a bundlerLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeBundler
}
func (a bundlerLibraryAnalyzer) Version() int {
return version
}

View File

@@ -15,6 +15,8 @@ func init() {
analyzer.RegisterAnalyzer(&cargoLibraryAnalyzer{})
}
const version = 1
var requiredFiles = []string{"Cargo.lock"}
type cargoLibraryAnalyzer struct{}
@@ -35,3 +37,7 @@ func (a cargoLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a cargoLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeCargo
}
func (a cargoLibraryAnalyzer) Version() int {
return version
}

View File

@@ -17,9 +17,9 @@ func init() {
analyzer.RegisterAnalyzer(&composerLibraryAnalyzer{})
}
var (
requiredFiles = []string{"composer.lock"}
)
const version = 1
var requiredFiles = []string{"composer.lock"}
type composerLibraryAnalyzer struct{}
@@ -39,3 +39,7 @@ func (a composerLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a composerLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeComposer
}
func (a composerLibraryAnalyzer) Version() int {
return version
}

View File

@@ -17,9 +17,9 @@ func init() {
analyzer.RegisterAnalyzer(&javaLibraryAnalyzer{})
}
var (
requiredExtensions = []string{".jar", ".war", ".ear"}
)
const version = 1
var requiredExtensions = []string{".jar", ".war", ".ear"}
// javaLibraryAnalyzer analyzes jar/war/ear files
type javaLibraryAnalyzer struct{}
@@ -47,3 +47,7 @@ func (a javaLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a javaLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeJar
}
func (a javaLibraryAnalyzer) Version() int {
return version
}

View File

@@ -17,6 +17,8 @@ func init() {
analyzer.RegisterAnalyzer(&npmLibraryAnalyzer{})
}
const version = 1
var requiredFiles = []string{"package-lock.json"}
type npmLibraryAnalyzer struct{}
@@ -37,3 +39,7 @@ func (a npmLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a npmLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeNpm
}
func (a npmLibraryAnalyzer) Version() int {
return version
}

View File

@@ -16,6 +16,8 @@ func init() {
analyzer.RegisterAnalyzer(&nugetLibraryAnalyzer{})
}
const version = 1
var requiredFiles = []string{"packages.lock.json"}
type nugetLibraryAnalyzer struct{}
@@ -36,3 +38,7 @@ func (a nugetLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a nugetLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeNuget
}
func (a nugetLibraryAnalyzer) Version() int {
return version
}

View File

@@ -17,6 +17,8 @@ func init() {
analyzer.RegisterAnalyzer(&pipenvLibraryAnalyzer{})
}
const version = 1
var requiredFiles = []string{"Pipfile.lock"}
type pipenvLibraryAnalyzer struct{}
@@ -37,3 +39,7 @@ func (a pipenvLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a pipenvLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypePipenv
}
func (a pipenvLibraryAnalyzer) Version() int {
return version
}

View File

@@ -16,6 +16,8 @@ func init() {
analyzer.RegisterAnalyzer(&poetryLibraryAnalyzer{})
}
const version = 1
var requiredFiles = []string{"poetry.lock"}
type poetryLibraryAnalyzer struct{}
@@ -36,3 +38,7 @@ func (a poetryLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a poetryLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypePoetry
}
func (a poetryLibraryAnalyzer) Version() int {
return version
}

View File

@@ -18,6 +18,8 @@ func init() {
analyzer.RegisterAnalyzer(&yarnLibraryAnalyzer{})
}
const version = 1
var requiredFiles = []string{"yarn.lock"}
type yarnLibraryAnalyzer struct{}
@@ -38,3 +40,7 @@ func (a yarnLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a yarnLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeYarn
}
func (a yarnLibraryAnalyzer) Version() int {
return version
}

View File

@@ -17,6 +17,8 @@ func init() {
analyzer.RegisterAnalyzer(&alpineOSAnalyzer{})
}
const version = 1
var requiredFiles = []string{"etc/alpine-release"}
type alpineOSAnalyzer struct{}
@@ -39,3 +41,7 @@ func (a alpineOSAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a alpineOSAnalyzer) Type() analyzer.Type {
return analyzer.TypeAlpine
}
func (a alpineOSAnalyzer) Version() int {
return version
}

View File

@@ -22,6 +22,8 @@ func init() {
analyzer.RegisterAnalyzer(&amazonlinuxOSAnalyzer{})
}
const version = 1
var requiredFiles = []string{"etc/system-release"}
type amazonlinuxOSAnalyzer struct{}
@@ -67,3 +69,7 @@ func (a amazonlinuxOSAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a amazonlinuxOSAnalyzer) Type() analyzer.Type {
return analyzer.TypeAmazon
}
func (a amazonlinuxOSAnalyzer) Version() int {
return version
}

View File

@@ -17,6 +17,8 @@ func init() {
analyzer.RegisterAnalyzer(&debianOSAnalyzer{})
}
const version = 1
var requiredFiles = []string{"etc/debian_version"}
type debianOSAnalyzer struct{}
@@ -39,3 +41,7 @@ func (a debianOSAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a debianOSAnalyzer) Type() analyzer.Type {
return analyzer.TypeDebian
}
func (a debianOSAnalyzer) Version() int {
return version
}

View File

@@ -21,6 +21,8 @@ func init() {
analyzer.RegisterAnalyzer(&photonOSAnalyzer{})
}
const version = 1
var requiredFiles = []string{
"usr/lib/os-release",
"etc/os-release",
@@ -57,3 +59,7 @@ func (a photonOSAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a photonOSAnalyzer) Type() analyzer.Type {
return analyzer.TypePhoton
}
func (a photonOSAnalyzer) Version() int {
return version
}

View File

@@ -14,14 +14,12 @@ import (
"golang.org/x/xerrors"
)
const centosAnalyzerVersion = 1
func init() {
analyzer.RegisterAnalyzer(&centOSAnalyzer{})
}
var (
requiredFiles = []string{"etc/centos-release"}
)
type centOSAnalyzer struct{}
func (a centOSAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
@@ -45,7 +43,7 @@ func (a centOSAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.Analy
}
func (a centOSAnalyzer) Required(filePath string, _ os.FileInfo) bool {
return utils.StringInSlice(filePath, requiredFiles)
return utils.StringInSlice(filePath, a.requiredFiles())
}
func (a centOSAnalyzer) requiredFiles() []string {
@@ -55,3 +53,7 @@ func (a centOSAnalyzer) requiredFiles() []string {
func (a centOSAnalyzer) Type() analyzer.Type {
return analyzer.TypeCentOS
}
func (a centOSAnalyzer) Version() int {
return centosAnalyzerVersion
}

View File

@@ -14,6 +14,8 @@ import (
"golang.org/x/xerrors"
)
const fedoraAnalyzerVersion = 1
func init() {
analyzer.RegisterAnalyzer(&fedoraOSAnalyzer{})
}
@@ -53,3 +55,7 @@ func (a fedoraOSAnalyzer) requiredFiles() []string {
func (a fedoraOSAnalyzer) Type() analyzer.Type {
return analyzer.TypeFedora
}
func (a fedoraOSAnalyzer) Version() int {
return fedoraAnalyzerVersion
}

View File

@@ -15,6 +15,8 @@ import (
"golang.org/x/xerrors"
)
const oracleAnalyzerVersion = 1
func init() {
analyzer.RegisterAnalyzer(&oracleOSAnalyzer{})
}
@@ -48,3 +50,7 @@ func (a oracleOSAnalyzer) requiredFiles() []string {
func (a oracleOSAnalyzer) Type() analyzer.Type {
return analyzer.TypeOracle
}
func (a oracleOSAnalyzer) Version() int {
return oracleAnalyzerVersion
}

View File

@@ -18,6 +18,8 @@ import (
"github.com/aquasecurity/fanal/analyzer"
)
const redhatAnalyzerVersion = 1
func init() {
analyzer.RegisterAnalyzer(&redhatOSAnalyzer{})
}
@@ -71,3 +73,7 @@ func (a redhatOSAnalyzer) requiredFiles() []string {
func (a redhatOSAnalyzer) Type() analyzer.Type {
return analyzer.TypeRedHatBase
}
func (a redhatOSAnalyzer) Version() int {
return redhatAnalyzerVersion
}

View File

@@ -18,6 +18,8 @@ func init() {
analyzer.RegisterAnalyzer(&suseOSAnalyzer{})
}
const version = 1
var requiredFiles = []string{
"usr/lib/os-release",
"etc/os-release",
@@ -64,3 +66,7 @@ func (a suseOSAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a suseOSAnalyzer) Type() analyzer.Type {
return analyzer.TypeSUSE
}
func (a suseOSAnalyzer) Version() int {
return version
}

View File

@@ -18,6 +18,8 @@ func init() {
analyzer.RegisterAnalyzer(&ubuntuOSAnalyzer{})
}
const version = 1
var requiredFiles = []string{"etc/lsb-release"}
type ubuntuOSAnalyzer struct{}
@@ -51,3 +53,7 @@ func (a ubuntuOSAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a ubuntuOSAnalyzer) Type() analyzer.Type {
return analyzer.TypeUbuntu
}
func (a ubuntuOSAnalyzer) Version() int {
return version
}

View File

@@ -17,6 +17,8 @@ func init() {
analyzer.RegisterAnalyzer(&alpinePkgAnalyzer{})
}
const version = 1
var requiredFiles = []string{"lib/apk/db/installed"}
type alpinePkgAnalyzer struct{}
@@ -92,3 +94,7 @@ func (a alpinePkgAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a alpinePkgAnalyzer) Type() analyzer.Type {
return analyzer.TypeApk
}
func (a alpinePkgAnalyzer) Version() int {
return version
}

View File

@@ -20,16 +20,16 @@ import (
debVersion "github.com/knqyf263/go-deb-version"
)
var (
dpkgSrcCaptureRegexp = regexp.MustCompile(`Source: (?P<name>[^\s]*)( \((?P<version>.*)\))?`)
dpkgSrcCaptureRegexpNames = dpkgSrcCaptureRegexp.SubexpNames()
)
func init() {
analyzer.RegisterAnalyzer(&debianPkgAnalyzer{})
}
const version = 1
var (
dpkgSrcCaptureRegexp = regexp.MustCompile(`Source: (?P<name>[^\s]*)( \((?P<version>.*)\))?`)
dpkgSrcCaptureRegexpNames = dpkgSrcCaptureRegexp.SubexpNames()
requiredFiles = []string{"var/lib/dpkg/status"}
requiredDirs = []string{"var/lib/dpkg/status.d/"}
)
@@ -172,3 +172,7 @@ func (a debianPkgAnalyzer) Required(filePath string, fileInfo os.FileInfo) bool
func (a debianPkgAnalyzer) Type() analyzer.Type {
return analyzer.TypeDpkg
}
func (a debianPkgAnalyzer) Version() int {
return version
}

View File

@@ -18,6 +18,8 @@ func init() {
analyzer.RegisterAnalyzer(&rpmPkgAnalyzer{})
}
const version = 1
var requiredFiles = []string{
"usr/lib/sysimage/rpm/Packages",
"var/lib/rpm/Packages",
@@ -129,3 +131,7 @@ func (a rpmPkgAnalyzer) Required(filePath string, _ os.FileInfo) bool {
func (a rpmPkgAnalyzer) Type() analyzer.Type {
return analyzer.TypeRpm
}
func (a rpmPkgAnalyzer) Version() int {
return version
}

View File

@@ -20,16 +20,16 @@ import (
)
type Artifact struct {
image image.Image
cache cache.ArtifactCache
disableAnalyzers []analyzer.Type
image image.Image
cache cache.ArtifactCache
analyzer analyzer.Analyzer
}
func NewArtifact(img image.Image, c cache.ArtifactCache, disabled []analyzer.Type) artifact.Artifact {
return Artifact{
image: img,
cache: c,
disableAnalyzers: disabled,
image: img,
cache: c,
analyzer: analyzer.NewAnalyzer(disabled),
}
}
@@ -44,7 +44,8 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error)
return types.ArtifactReference{}, xerrors.Errorf("unable to get layer IDs: %w", err)
}
missingImage, missingLayers, err := a.cache.MissingBlobs(imageID, diffIDs)
missingImage, missingLayers, err := a.cache.MissingBlobs(imageID, diffIDs,
a.analyzer.AnalyzerVersions(), a.analyzer.ImageConfigAnalyzerVersions())
if err != nil {
return types.ArtifactReference{}, xerrors.Errorf("unable to get missing layers: %w", err)
}
@@ -116,7 +117,7 @@ func (a Artifact) inspectLayer(diffID string) (types.BlobInfo, error) {
result := new(analyzer.AnalysisResult)
opqDirs, whFiles, err := walker.WalkLayerTar(r, func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
if err = analyzer.AnalyzeFile(&wg, result, filePath, info, opener, a.disableAnalyzers); err != nil {
if err = a.analyzer.AnalyzeFile(&wg, result, filePath, info, opener); err != nil {
return xerrors.Errorf("failed to analyze %s: %w", filePath, err)
}
return nil
@@ -131,15 +132,16 @@ func (a Artifact) inspectLayer(diffID string) (types.BlobInfo, error) {
result.Sort()
layerInfo := types.BlobInfo{
Digest: layerDigest,
DiffID: diffID,
SchemaVersion: types.BlobJSONSchemaVersion,
OS: result.OS,
PackageInfos: result.PackageInfos,
Applications: result.Applications,
Configs: result.Configs,
OpaqueDirs: opqDirs,
WhiteoutFiles: whFiles,
SchemaVersion: types.BlobJSONSchemaVersion,
AnalyzerVersions: a.analyzer.AnalyzerVersions(),
Digest: layerDigest,
DiffID: diffID,
OS: result.OS,
PackageInfos: result.PackageInfos,
Applications: result.Applications,
Configs: result.Configs,
OpaqueDirs: opqDirs,
WhiteoutFiles: whFiles,
}
return layerInfo, nil
}
@@ -185,7 +187,7 @@ func (a Artifact) inspectConfig(imageID string, osFound types.OS) error {
return xerrors.Errorf("unable to get config blob: %w", err)
}
pkgs := analyzer.AnalyzeConfig(osFound, configBlob, a.disableAnalyzers)
pkgs := a.analyzer.AnalyzeImageConfig(osFound, configBlob)
var s1 v1.ConfigFile
if err := json.Unmarshal(configBlob, &s1); err != nil {
@@ -193,12 +195,13 @@ func (a Artifact) inspectConfig(imageID string, osFound types.OS) error {
}
info := types.ArtifactInfo{
SchemaVersion: types.ArtifactJSONSchemaVersion,
Architecture: s1.Architecture,
Created: s1.Created.Time,
DockerVersion: s1.DockerVersion,
OS: s1.OS,
HistoryPackages: pkgs,
SchemaVersion: types.ArtifactJSONSchemaVersion,
AnalyzerVersions: a.analyzer.ImageConfigAnalyzerVersions(),
Architecture: s1.Architecture,
Created: s1.Created.Time,
DockerVersion: s1.DockerVersion,
OS: s1.OS,
HistoryPackages: pkgs,
}
if err := a.cache.PutArtifact(imageID, info); err != nil {

View File

@@ -43,6 +43,17 @@ func TestArtifact_Inspect(t *testing.T) {
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
BlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203"},
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
ConfigAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingArtifact: true,
@@ -55,8 +66,16 @@ func TestArtifact_Inspect(t *testing.T) {
BlobID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203",
BlobInfo: types.BlobInfo{
SchemaVersion: 1,
Digest: "",
DiffID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203",
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
Digest: "",
DiffID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203",
OS: &types.OS{
Family: "alpine",
Name: "3.11.5",
@@ -94,6 +113,9 @@ func TestArtifact_Inspect(t *testing.T) {
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
ArtifactInfo: types.ArtifactInfo{
SchemaVersion: 1,
AnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
Architecture: "amd64",
Created: time.Date(2020, 3, 23, 21, 19, 34, 196162891, time.UTC),
DockerVersion: "18.09.7",
@@ -120,6 +142,17 @@ func TestArtifact_Inspect(t *testing.T) {
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
"sha256:a4595c43a874856bf95f3bfc4fbf78bbaa04c92c726276d4f64193a47ced0566",
},
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
ConfigAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingBlobIDs: []string{
@@ -135,9 +168,17 @@ func TestArtifact_Inspect(t *testing.T) {
BlobID: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
BlobInfo: types.BlobInfo{
SchemaVersion: 1,
Digest: "",
DiffID: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
OS: &types.OS{Family: "debian", Name: "9.9"},
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
Digest: "",
DiffID: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
OS: &types.OS{Family: "debian", Name: "9.9"},
PackageInfos: []types.PackageInfo{
{
FilePath: "var/lib/dpkg/status.d/base",
@@ -179,8 +220,16 @@ func TestArtifact_Inspect(t *testing.T) {
BlobID: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
BlobInfo: types.BlobInfo{
SchemaVersion: 1,
Digest: "",
DiffID: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
Digest: "",
DiffID: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
PackageInfos: []types.PackageInfo{
{
FilePath: "var/lib/dpkg/status.d/libc6",
@@ -209,8 +258,16 @@ func TestArtifact_Inspect(t *testing.T) {
BlobID: "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
BlobInfo: types.BlobInfo{
SchemaVersion: 1,
Digest: "",
DiffID: "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
Digest: "",
DiffID: "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
Applications: []types.Application{{Type: "composer", FilePath: "php-app/composer.lock",
Libraries: []types.LibraryInfo{
{Library: depTypes.Library{Name: "guzzlehttp/guzzle", Version: "6.2.0"}},
@@ -258,6 +315,14 @@ func TestArtifact_Inspect(t *testing.T) {
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
"sha256:a4595c43a874856bf95f3bfc4fbf78bbaa04c92c726276d4f64193a47ced0566",
},
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeUbuntu): 1,
},
ConfigAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingBlobIDs: []string{
@@ -273,8 +338,13 @@ func TestArtifact_Inspect(t *testing.T) {
BlobID: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
BlobInfo: types.BlobInfo{
SchemaVersion: 1,
Digest: "",
DiffID: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeUbuntu): 1,
},
Digest: "",
DiffID: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
},
},
},
@@ -283,8 +353,13 @@ func TestArtifact_Inspect(t *testing.T) {
BlobID: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
BlobInfo: types.BlobInfo{
SchemaVersion: 1,
Digest: "",
DiffID: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeUbuntu): 1,
},
Digest: "",
DiffID: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
},
},
},
@@ -293,9 +368,14 @@ func TestArtifact_Inspect(t *testing.T) {
BlobID: "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
BlobInfo: types.BlobInfo{
SchemaVersion: 1,
Digest: "",
DiffID: "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
OpaqueDirs: []string{"php-app/"},
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeUbuntu): 1,
},
Digest: "",
DiffID: "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
OpaqueDirs: []string{"php-app/"},
},
},
},
@@ -318,6 +398,17 @@ func TestArtifact_Inspect(t *testing.T) {
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
BlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203"},
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
ConfigAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
Err: xerrors.New("MissingBlobs failed"),
@@ -332,6 +423,17 @@ func TestArtifact_Inspect(t *testing.T) {
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
BlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203"},
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
ConfigAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingBlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203"},
@@ -343,8 +445,16 @@ func TestArtifact_Inspect(t *testing.T) {
BlobID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203",
BlobInfo: types.BlobInfo{
SchemaVersion: 1,
Digest: "",
DiffID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203",
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
Digest: "",
DiffID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203",
OS: &types.OS{
Family: "alpine",
Name: "3.11.5",
@@ -387,6 +497,17 @@ func TestArtifact_Inspect(t *testing.T) {
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
BlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203"},
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
ConfigAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingArtifact: true,
@@ -399,8 +520,16 @@ func TestArtifact_Inspect(t *testing.T) {
BlobID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203",
BlobInfo: types.BlobInfo{
SchemaVersion: 1,
Digest: "",
DiffID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203",
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
string(analyzer.TypeComposer): 1,
string(analyzer.TypeDebian): 1,
string(analyzer.TypeDpkg): 1,
string(analyzer.TypeUbuntu): 1,
},
Digest: "",
DiffID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203",
OS: &types.OS{
Family: "alpine",
Name: "3.11.5",
@@ -434,19 +563,22 @@ func TestArtifact_Inspect(t *testing.T) {
},
putArtifactExpectations: []cache.ArtifactCachePutArtifactExpectation{
{
Returns: cache.ArtifactCachePutArtifactReturns{
Err: errors.New("put artifact failed"),
},
Args: cache.ArtifactCachePutArtifactArgs{
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
ArtifactInfo: types.ArtifactInfo{
SchemaVersion: 1,
AnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
Architecture: "amd64",
Created: time.Date(2020, 3, 23, 21, 19, 34, 196162891, time.UTC),
DockerVersion: "18.09.7",
OS: "linux",
},
},
Returns: cache.ArtifactCachePutArtifactReturns{
Err: errors.New("put artifact failed"),
},
},
},
wantErr: "put artifact failed",

View File

@@ -23,16 +23,16 @@ import (
)
type Artifact struct {
dir string
cache cache.ArtifactCache
disableAnalyzers []analyzer.Type
dir string
cache cache.ArtifactCache
analyzer analyzer.Analyzer
}
func NewArtifact(dir string, c cache.ArtifactCache, disabled []analyzer.Type) artifact.Artifact {
return Artifact{
dir: dir,
cache: c,
disableAnalyzers: disabled,
dir: dir,
cache: c,
analyzer: analyzer.NewAnalyzer(disabled),
}
}
@@ -44,7 +44,7 @@ func (a Artifact) Inspect(_ context.Context) (types.ArtifactReference, error) {
if err != nil {
return err
}
if err = analyzer.AnalyzeFile(&wg, result, filePath, info, opener, a.disableAnalyzers); err != nil {
if err = a.analyzer.AnalyzeFile(&wg, result, filePath, info, opener); err != nil {
return err
}
return nil
@@ -59,11 +59,12 @@ func (a Artifact) Inspect(_ context.Context) (types.ArtifactReference, error) {
result.Sort()
blobInfo := types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
OS: result.OS,
PackageInfos: result.PackageInfos,
Applications: result.Applications,
Configs: result.Configs,
SchemaVersion: types.BlobJSONSchemaVersion,
AnalyzerVersions: a.analyzer.AnalyzerVersions(),
OS: result.OS,
PackageInfos: result.PackageInfos,
Applications: result.Applications,
Configs: result.Configs,
}
// calculate hash of JSON and use it as pseudo artifactID and blobID

View File

@@ -32,10 +32,14 @@ func TestArtifact_Inspect(t *testing.T) {
},
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e",
BlobID: "sha256:e1fee9054eea6ee450a2a3f2b0e49117a70e3a0968ac43ff90caf977e6ee71f4",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
DiffID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e",
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
},
DiffID: "sha256:e1fee9054eea6ee450a2a3f2b0e49117a70e3a0968ac43ff90caf977e6ee71f4",
OS: &types.OS{
Family: "alpine",
Name: "3.11.6",
@@ -54,9 +58,9 @@ func TestArtifact_Inspect(t *testing.T) {
},
want: types.ArtifactReference{
Name: "host",
ID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e",
ID: "sha256:e1fee9054eea6ee450a2a3f2b0e49117a70e3a0968ac43ff90caf977e6ee71f4",
BlobIDs: []string{
"sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e",
"sha256:e1fee9054eea6ee450a2a3f2b0e49117a70e3a0968ac43ff90caf977e6ee71f4",
},
},
},
@@ -70,8 +74,9 @@ func TestArtifact_Inspect(t *testing.T) {
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:3404e98968ad338dc60ef74c0dd5bdd893478415cd2296b0c265a5650b3ae4d6",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
DiffID: "sha256:3404e98968ad338dc60ef74c0dd5bdd893478415cd2296b0c265a5650b3ae4d6",
SchemaVersion: types.BlobJSONSchemaVersion,
AnalyzerVersions: map[string]int{},
DiffID: "sha256:3404e98968ad338dc60ef74c0dd5bdd893478415cd2296b0c265a5650b3ae4d6",
},
},
Returns: cache.ArtifactCachePutBlobReturns{},
@@ -91,10 +96,14 @@ func TestArtifact_Inspect(t *testing.T) {
},
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e",
BlobID: "sha256:e1fee9054eea6ee450a2a3f2b0e49117a70e3a0968ac43ff90caf977e6ee71f4",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
DiffID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e",
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
string(analyzer.TypeApk): 1,
},
DiffID: "sha256:e1fee9054eea6ee450a2a3f2b0e49117a70e3a0968ac43ff90caf977e6ee71f4",
OS: &types.OS{
Family: "alpine",
Name: "3.11.6",

19
cache/cache.go vendored
View File

@@ -21,7 +21,8 @@ type Cache interface {
// ArtifactCache uses local or remote cache
type ArtifactCache interface {
// MissingBlobs returns missing blob IDs such as layer IDs in cache
MissingBlobs(artifactID string, blobIDs []string) (missingArtifact bool, missingBlobIDs []string, err error)
MissingBlobs(artifactID string, blobIDs []string, analyzerVersions, configAnalyzerVersions map[string]int) (
missingArtifact bool, missingBlobIDs []string, err error)
// PutArtifact stores artifact information such as image metadata in cache
PutArtifact(artifactID string, artifactInfo types.ArtifactInfo) (err error)
@@ -44,3 +45,19 @@ type LocalArtifactCache interface {
// Clear deletes the local database
Clear() (err error)
}
// isStale checks if the cache is stale or not.
// e.g. When {"alpine": 1, "python": 2} is cached and {"alpine": 2, "python": 2} is sent, the cache is stale.
// Also, {"python": 2} cache must be replaced by {"alpine": 1, "python": 2}.
func isStale(current, cached map[string]int) bool {
for analyzerType, currentVersion := range current {
cachedVersion, ok := cached[analyzerType]
if !ok {
return true
}
if cachedVersion < currentVersion {
return true
}
}
return false
}

9
cache/fs.go vendored
View File

@@ -140,7 +140,8 @@ func (fs FSCache) PutArtifact(artifactID string, artifactInfo types.ArtifactInfo
}
// MissingBlobs returns missing blob IDs such as layer IDs
func (fs FSCache) MissingBlobs(artifactID string, blobIDs []string) (bool, []string, error) {
func (fs FSCache) MissingBlobs(artifactID string, blobIDs []string,
analyzerVersions, configAnalyzerVersions map[string]int) (bool, []string, error) {
var missingArtifact bool
var missingBlobIDs []string
err := fs.db.View(func(tx *bolt.Tx) error {
@@ -152,7 +153,8 @@ func (fs FSCache) MissingBlobs(artifactID string, blobIDs []string) (bool, []str
missingBlobIDs = append(missingBlobIDs, blobID)
continue
}
if blobInfo.SchemaVersion != types.BlobJSONSchemaVersion {
if blobInfo.SchemaVersion != types.BlobJSONSchemaVersion ||
isStale(analyzerVersions, blobInfo.AnalyzerVersions) {
missingBlobIDs = append(missingBlobIDs, blobID)
}
}
@@ -168,7 +170,8 @@ func (fs FSCache) MissingBlobs(artifactID string, blobIDs []string) (bool, []str
// error means cache missed artifact info
return true, missingBlobIDs, nil
}
if artifactInfo.SchemaVersion != types.ArtifactJSONSchemaVersion {
if artifactInfo.SchemaVersion != types.ArtifactJSONSchemaVersion ||
isStale(configAnalyzerVersions, artifactInfo.AnalyzerVersions) {
missingArtifact = true
}
return missingArtifact, missingBlobIDs, nil

57
cache/fs_test.go vendored
View File

@@ -13,6 +13,7 @@ import (
"github.com/stretchr/testify/require"
bolt "go.etcd.io/bbolt"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
depTypes "github.com/aquasecurity/go-dep-parser/pkg/types"
)
@@ -82,6 +83,9 @@ func TestFSCache_GetLayer(t *testing.T) {
},
want: types.BlobInfo{
SchemaVersion: 2,
AnalyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 2,
},
OS: &types.OS{
Family: "alpine",
Name: "3.10",
@@ -370,8 +374,10 @@ func TestFSCache_PutImage(t *testing.T) {
func TestFSCache_MissingLayers(t *testing.T) {
type args struct {
imageID string
layerIDs []string
imageID string
layerIDs []string
analyzerVersions map[string]int
configAnalyzerVersions map[string]int
}
tests := []struct {
name string
@@ -391,6 +397,12 @@ func TestFSCache_MissingLayers(t *testing.T) {
"sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
"sha256:dab15cac9ebd43beceeeda3ce95c574d6714ed3d3969071caead678c065813ec",
},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 2,
},
configAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
wantMissingImage: false,
wantMissingLayerIDs: []string{
@@ -407,6 +419,12 @@ func TestFSCache_MissingLayers(t *testing.T) {
layerIDs: []string{
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 2,
},
configAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
wantMissingImage: true,
wantMissingLayerIDs: []string{
@@ -421,6 +439,12 @@ func TestFSCache_MissingLayers(t *testing.T) {
layerIDs: []string{
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 2,
},
configAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
wantMissingImage: true,
wantMissingLayerIDs: []string{
@@ -435,6 +459,32 @@ func TestFSCache_MissingLayers(t *testing.T) {
layerIDs: []string{
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 2,
},
configAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
wantMissingImage: true,
wantMissingLayerIDs: []string{
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
},
},
{
name: "happy path: new config analyzer",
dbPath: "testdata/fanal.db",
args: args{
imageID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4",
layerIDs: []string{
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
},
configAnalyzerVersions: map[string]int{
"something_new": 1,
},
},
wantMissingImage: true,
wantMissingLayerIDs: []string{
@@ -452,7 +502,8 @@ func TestFSCache_MissingLayers(t *testing.T) {
require.NoError(t, err)
defer fs.Clear()
gotMissingImage, gotMissingLayerIDs, err := fs.MissingBlobs(tt.args.imageID, tt.args.layerIDs)
gotMissingImage, gotMissingLayerIDs, err := fs.MissingBlobs(tt.args.imageID, tt.args.layerIDs,
tt.args.analyzerVersions, tt.args.configAnalyzerVersions)
if tt.wantErr != "" {
require.NotNil(t, err, tt.name)
assert.Contains(t, err.Error(), tt.wantErr, tt.name)

View File

@@ -13,10 +13,14 @@ type MockArtifactCache struct {
}
type ArtifactCacheMissingBlobsArgs struct {
ArtifactID string
ArtifactIDAnything bool
BlobIDs []string
BlobIDsAnything bool
ArtifactID string
ArtifactIDAnything bool
BlobIDs []string
BlobIDsAnything bool
AnalyzerVersions map[string]int
AnalyzerVersionsAnything bool
ConfigAnalyzerVersions map[string]int
ConfigAnalyzerVersionsAnything bool
}
type ArtifactCacheMissingBlobsReturns struct {
@@ -42,6 +46,16 @@ func (_m *MockArtifactCache) ApplyMissingBlobsExpectation(e ArtifactCacheMissing
} else {
args = append(args, e.Args.BlobIDs)
}
if e.Args.AnalyzerVersionsAnything {
args = append(args, mock.Anything)
} else {
args = append(args, e.Args.AnalyzerVersions)
}
if e.Args.ConfigAnalyzerVersionsAnything {
args = append(args, mock.Anything)
} else {
args = append(args, e.Args.ConfigAnalyzerVersions)
}
_m.On("MissingBlobs", args...).Return(e.Returns.MissingArtifact, e.Returns.MissingBlobIDs, e.Returns.Err)
}
@@ -51,20 +65,20 @@ func (_m *MockArtifactCache) ApplyMissingBlobsExpectations(expectations []Artifa
}
}
// MissingBlobs provides a mock function with given fields: artifactID, blobIDs
func (_m *MockArtifactCache) MissingBlobs(artifactID string, blobIDs []string) (bool, []string, error) {
ret := _m.Called(artifactID, blobIDs)
// MissingBlobs provides a mock function with given fields: artifactID, blobIDs, analyzerVersions, configAnalyzerVersions
func (_m *MockArtifactCache) MissingBlobs(artifactID string, blobIDs []string, analyzerVersions map[string]int, configAnalyzerVersions map[string]int) (bool, []string, error) {
ret := _m.Called(artifactID, blobIDs, analyzerVersions, configAnalyzerVersions)
var r0 bool
if rf, ok := ret.Get(0).(func(string, []string) bool); ok {
r0 = rf(artifactID, blobIDs)
if rf, ok := ret.Get(0).(func(string, []string, map[string]int, map[string]int) bool); ok {
r0 = rf(artifactID, blobIDs, analyzerVersions, configAnalyzerVersions)
} else {
r0 = ret.Get(0).(bool)
}
var r1 []string
if rf, ok := ret.Get(1).(func(string, []string) []string); ok {
r1 = rf(artifactID, blobIDs)
if rf, ok := ret.Get(1).(func(string, []string, map[string]int, map[string]int) []string); ok {
r1 = rf(artifactID, blobIDs, analyzerVersions, configAnalyzerVersions)
} else {
if ret.Get(1) != nil {
r1 = ret.Get(1).([]string)
@@ -72,8 +86,8 @@ func (_m *MockArtifactCache) MissingBlobs(artifactID string, blobIDs []string) (
}
var r2 error
if rf, ok := ret.Get(2).(func(string, []string) error); ok {
r2 = rf(artifactID, blobIDs)
if rf, ok := ret.Get(2).(func(string, []string, map[string]int, map[string]int) error); ok {
r2 = rf(artifactID, blobIDs, analyzerVersions, configAnalyzerVersions)
} else {
r2 = ret.Error(2)
}

9
cache/redis.go vendored
View File

@@ -88,7 +88,8 @@ func (c RedisCache) GetBlob(blobID string) (types.BlobInfo, error) {
return blobInfo, nil
}
func (c RedisCache) MissingBlobs(artifactID string, blobIDs []string) (bool, []string, error) {
func (c RedisCache) MissingBlobs(artifactID string, blobIDs []string,
analyzerVersions, configAnalyzerVersions map[string]int) (bool, []string, error) {
var missingArtifact bool
var missingBlobIDs []string
for _, blobID := range blobIDs {
@@ -98,7 +99,8 @@ func (c RedisCache) MissingBlobs(artifactID string, blobIDs []string) (bool, []s
missingBlobIDs = append(missingBlobIDs, blobID)
continue
}
if blobInfo.SchemaVersion != types.BlobJSONSchemaVersion {
if blobInfo.SchemaVersion != types.BlobJSONSchemaVersion ||
isStale(analyzerVersions, blobInfo.AnalyzerVersions) {
missingBlobIDs = append(missingBlobIDs, blobID)
}
}
@@ -108,7 +110,8 @@ func (c RedisCache) MissingBlobs(artifactID string, blobIDs []string) (bool, []s
if err != nil {
return true, missingBlobIDs, nil
}
if artifactInfo.SchemaVersion != types.ArtifactJSONSchemaVersion {
if artifactInfo.SchemaVersion != types.ArtifactJSONSchemaVersion ||
isStale(configAnalyzerVersions, artifactInfo.AnalyzerVersions) {
missingArtifact = true
}
return missingArtifact, missingBlobIDs, nil

64
cache/redis_test.go vendored
View File

@@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/cache"
"github.com/aquasecurity/fanal/types"
)
@@ -361,8 +362,10 @@ func TestRedisCache_GetBlob(t *testing.T) {
func TestRedisCache_MissingBlobs(t *testing.T) {
type args struct {
artifactID string
blobIDs []string
artifactID string
blobIDs []string
analyzerVersions map[string]int
configAnalyzerVersions map[string]int
}
tests := []struct {
name string
@@ -388,6 +391,9 @@ func TestRedisCache_MissingBlobs(t *testing.T) {
args: args{
artifactID: "sha256:961769676411f082461f9ef46626dd7a2d1e2b2a38e6a44364bcbecf51e66dd4",
blobIDs: []string{"sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0"},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 2,
},
},
wantMissingArtifact: true,
},
@@ -407,6 +413,9 @@ func TestRedisCache_MissingBlobs(t *testing.T) {
args: args{
artifactID: "sha256:be4e4bea2c2e15b403bb321562e78ea84b501fb41497472e91ecb41504e8a27c",
blobIDs: []string{"sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0"},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
},
},
wantMissingArtifact: true,
},
@@ -416,10 +425,55 @@ func TestRedisCache_MissingBlobs(t *testing.T) {
args: args{
artifactID: "sha256:8652b9f0cb4c0599575e5a003f5906876e10c1ceb2ab9fe1786712dac14a50cf",
blobIDs: []string{"sha256:174f5685490326fc0a1c0f5570b8663732189b327007e47ff13d2ca59673db02"},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 2,
},
},
wantMissingArtifact: false,
wantMissingBlobIDs: []string{"sha256:174f5685490326fc0a1c0f5570b8663732189b327007e47ff13d2ca59673db02"},
},
{
name: "old analyzer versions",
setupRedis: true,
args: args{
artifactID: "sha256:8652b9f0cb4c0599575e5a003f5906876e10c1ceb2ab9fe1786712dac14a50cf",
blobIDs: []string{"sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0"},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
},
},
wantMissingArtifact: false,
wantMissingBlobIDs: nil,
},
{
name: "missing blobs with stale cache",
setupRedis: true,
args: args{
artifactID: "sha256:8652b9f0cb4c0599575e5a003f5906876e10c1ceb2ab9fe1786712dac14a50cf",
blobIDs: []string{"sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0"},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 3,
},
},
wantMissingArtifact: false,
wantMissingBlobIDs: []string{"sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0"},
},
{
name: "missing artifact with newly enabled analyzers",
setupRedis: true,
args: args{
artifactID: "sha256:8652b9f0cb4c0599575e5a003f5906876e10c1ceb2ab9fe1786712dac14a50cf",
blobIDs: []string{"sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0"},
analyzerVersions: map[string]int{
string(analyzer.TypeAlpine): 1,
},
configAnalyzerVersions: map[string]int{
string(analyzer.TypeApkCommand): 1,
},
},
wantMissingArtifact: true,
wantMissingBlobIDs: nil,
},
}
// Set up Redis test server
@@ -429,7 +483,8 @@ func TestRedisCache_MissingBlobs(t *testing.T) {
s.Set("fanal::artifact::sha256:8652b9f0cb4c0599575e5a003f5906876e10c1ceb2ab9fe1786712dac14a50cf", `{"SchemaVersion": 1}`)
s.Set("fanal::artifact::sha256:be4e4bea2c2e15b403bb321562e78ea84b501fb41497472e91ecb41504e8a27c", `{"SchemaVersion": 2}`)
s.Set("fanal::blob::sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0", `{"SchemaVersion": 1}`)
s.Set("fanal::blob::sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0",
`{"SchemaVersion": 1, "AnalyzerVersions": {"alpine": 2}}`)
s.Set("fanal::blob::sha256:174f5685490326fc0a1c0f5570b8663732189b327007e47ff13d2ca59673db02", `{"SchemaVersion": 2}`)
for _, tt := range tests {
@@ -443,7 +498,8 @@ func TestRedisCache_MissingBlobs(t *testing.T) {
Addr: addr,
})
missingArtifact, missingBlobIDs, err := c.MissingBlobs(tt.args.artifactID, tt.args.blobIDs)
missingArtifact, missingBlobIDs, err := c.MissingBlobs(tt.args.artifactID, tt.args.blobIDs,
tt.args.analyzerVersions, tt.args.configAnalyzerVersions)
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)

10
cache/s3.go vendored
View File

@@ -4,6 +4,7 @@ import (
"bytes"
"encoding/json"
"fmt"
"github.com/aquasecurity/fanal/types"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/s3"
@@ -119,7 +120,8 @@ func (c S3Cache) getIndex(key string, keyType string) error {
return nil
}
func (c S3Cache) MissingBlobs(artifactID string, blobIDs []string) (bool, []string, error) {
func (c S3Cache) MissingBlobs(artifactID string, blobIDs []string,
analyzerVersions, configAnalyzerVersions map[string]int) (bool, []string, error) {
var missingArtifact bool
var missingBlobIDs []string
for _, blobID := range blobIDs {
@@ -133,7 +135,8 @@ func (c S3Cache) MissingBlobs(artifactID string, blobIDs []string) (bool, []stri
if err != nil {
return true, missingBlobIDs, xerrors.Errorf("the blob object (%s) doesn't exist in S3 even though the index file exists: %w", blobID, err)
}
if blobInfo.SchemaVersion != types.BlobJSONSchemaVersion {
if blobInfo.SchemaVersion != types.BlobJSONSchemaVersion ||
isStale(analyzerVersions, blobInfo.AnalyzerVersions) {
missingBlobIDs = append(missingBlobIDs, blobID)
}
}
@@ -147,7 +150,8 @@ func (c S3Cache) MissingBlobs(artifactID string, blobIDs []string) (bool, []stri
if err != nil {
return true, missingBlobIDs, xerrors.Errorf("the artifact object (%s) doesn't exist in S3 even though the index file exists: %w", artifactID, err)
}
if artifactInfo.SchemaVersion != types.ArtifactJSONSchemaVersion {
if artifactInfo.SchemaVersion != types.ArtifactJSONSchemaVersion ||
isStale(configAnalyzerVersions, artifactInfo.AnalyzerVersions) {
missingArtifact = true
}
return missingArtifact, missingBlobIDs, nil

20
cache/s3_test.go vendored
View File

@@ -1,14 +1,15 @@
package cache
import (
"reflect"
"testing"
"time"
"github.com/aquasecurity/fanal/types"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3iface"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"golang.org/x/xerrors"
"reflect"
"testing"
"time"
)
type mockS3Client struct {
@@ -188,8 +189,10 @@ func TestS3Cache_MissingBlobs(t *testing.T) {
Prefix string
}
type args struct {
artifactID string
blobIDs []string
artifactID string
blobIDs []string
analyzerVersions map[string]int
configAnalyzerVersions map[string]int
}
tests := []struct {
name string
@@ -205,7 +208,10 @@ func TestS3Cache_MissingBlobs(t *testing.T) {
BucketName: "test",
Prefix: "prefix",
},
args: args{artifactID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4", blobIDs: []string{"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7"}},
args: args{
artifactID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4",
blobIDs: []string{"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7"},
},
want: true,
wantStringSlice: []string{"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7"},
wantErr: false,
@@ -214,7 +220,7 @@ func TestS3Cache_MissingBlobs(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := NewS3Cache(tt.fields.BucketName, tt.fields.Prefix, tt.fields.S3, tt.fields.Downloader)
got, got1, err := c.MissingBlobs(tt.args.artifactID, tt.args.blobIDs)
got, got1, err := c.MissingBlobs(tt.args.artifactID, tt.args.blobIDs, nil, nil)
if (err != nil) != tt.wantErr {
t.Errorf("S3Cache.MissingBlobs() error = %v, wantErr %v", err, tt.wantErr)
return

Binary file not shown.

View File

@@ -70,11 +70,12 @@ type ArtifactReference struct {
// ArtifactInfo is stored in cache
type ArtifactInfo struct {
SchemaVersion int
Architecture string
Created time.Time
DockerVersion string
OS string
SchemaVersion int
AnalyzerVersions map[string]int `json:",omitempty"`
Architecture string
Created time.Time
DockerVersion string
OS string
// HistoryPackages are packages extracted from RUN instructions
HistoryPackages []Package `json:",omitempty"`
@@ -82,15 +83,16 @@ type ArtifactInfo struct {
// BlobInfo is stored in cache
type BlobInfo struct {
SchemaVersion int
Digest string `json:",omitempty"`
DiffID string `json:",omitempty"`
OS *OS `json:",omitempty"`
PackageInfos []PackageInfo `json:",omitempty"`
Applications []Application `json:",omitempty"`
Configs []Config `json:",omitempty"`
OpaqueDirs []string `json:",omitempty"`
WhiteoutFiles []string `json:",omitempty"`
SchemaVersion int
AnalyzerVersions map[string]int `json:",omitempty"`
Digest string `json:",omitempty"`
DiffID string `json:",omitempty"`
OS *OS `json:",omitempty"`
PackageInfos []PackageInfo `json:",omitempty"`
Applications []Application `json:",omitempty"`
Configs []Config `json:",omitempty"`
OpaqueDirs []string `json:",omitempty"`
WhiteoutFiles []string `json:",omitempty"`
}
// ArtifactDetail is generated by applying blobs