Merge pull request #664 from vaikas-google/master

Add search support
pull/666/head
vaikas-google 8 years ago
commit a4f00f08b9

@ -2,10 +2,6 @@ package main
import (
"fmt"
// "io"
// "net/http"
//"net/url"
// "os"
"testing"
)

@ -24,10 +24,10 @@ var searchCmd = &cobra.Command{
func search(cmd *cobra.Command, args []string) error {
if len(args) == 0 {
return errors.New("This command needs at least one argument")
return errors.New("This command needs at least one argument (search string)")
}
results, err := searchCacheForPattern(args[0])
results, err := searchCacheForPattern(cacheDirectory(), args[0])
if err != nil {
return err
}
@ -38,9 +38,25 @@ func search(cmd *cobra.Command, args []string) error {
return nil
}
func searchCacheForPattern(name string) ([]string, error) {
func searchChartRefsForPattern(search string, chartRefs map[string]*repo.ChartRef) []string {
matches := []string{}
for k, c := range chartRefs {
if strings.Contains(c.Name, search) {
matches = append(matches, k)
continue
}
for _, keyword := range c.Keywords {
if strings.Contains(keyword, search) {
matches = append(matches, k)
}
}
}
return matches
}
func searchCacheForPattern(dir string, search string) ([]string, error) {
fileList := []string{}
filepath.Walk(cacheDirectory(), func(path string, f os.FileInfo, err error) error {
filepath.Walk(dir, func(path string, f os.FileInfo, err error) error {
if !f.IsDir() {
fileList = append(fileList, path)
}
@ -49,11 +65,10 @@ func searchCacheForPattern(name string) ([]string, error) {
matches := []string{}
for _, f := range fileList {
cache, _ := repo.LoadCacheFile(f)
repoName := filepath.Base(strings.TrimRight(f, "-cache.txt"))
for k := range cache.Entries {
if strings.Contains(k, name) {
matches = append(matches, repoName+"/"+k)
}
m := searchChartRefsForPattern(search, cache.Entries)
repoName := strings.TrimSuffix(filepath.Base(f), "-cache.yaml")
for _, c := range m {
matches = append(matches, repoName+"/"+c)
}
}
return matches, nil

@ -0,0 +1,79 @@
package main
import (
"github.com/kubernetes/helm/pkg/repo"
"testing"
)
const testDir = "testdata/"
const testFile = "testdata/local-cache.yaml"
type searchTestCase struct {
in string
expectedOut []string
}
var searchTestCases = []searchTestCase{
{"foo", []string{}},
{"alpine", []string{"alpine-1.0.0"}},
{"sumtin", []string{"alpine-1.0.0"}},
{"web", []string{"nginx-0.1.0"}},
}
var searchCacheTestCases = []searchTestCase{
{"notthere", []string{}},
{"odd", []string{"foobar/oddness-1.2.3"}},
{"sumtin", []string{"local/alpine-1.0.0", "foobar/oddness-1.2.3"}},
{"foobar", []string{"foobar/foobar-0.1.0"}},
{"web", []string{"local/nginx-0.1.0"}},
}
func validateEntries(t *testing.T, in string, found []string, expected []string) {
if len(found) != len(expected) {
t.Errorf("Failed to search %s: Expected: %#v got: %#v", in, expected, found)
}
foundCount := 0
for _, exp := range expected {
for _, f := range found {
if exp == f {
foundCount = foundCount + 1
continue
}
}
}
if foundCount != len(expected) {
t.Errorf("Failed to find expected items for %s: Expected: %#v got: %#v", in, expected, found)
}
}
func searchTestRunner(t *testing.T, tc searchTestCase) {
cf, err := repo.LoadCacheFile(testFile)
if err != nil {
t.Errorf("Failed to load cache file : %s : %s", testFile, err)
}
u := searchChartRefsForPattern(tc.in, cf.Entries)
validateEntries(t, tc.in, u, tc.expectedOut)
}
func searchCacheTestRunner(t *testing.T, tc searchTestCase) {
u, err := searchCacheForPattern(testDir, tc.in)
if err != nil {
t.Errorf("searchCacheForPattern failed: %#v", err)
}
validateEntries(t, tc.in, u, tc.expectedOut)
}
func TestSearches(t *testing.T) {
for _, tc := range searchTestCases {
searchTestRunner(t, tc)
}
}
func TestCacheSearches(t *testing.T) {
for _, tc := range searchCacheTestCases {
searchCacheTestRunner(t, tc)
}
}

@ -0,0 +1,18 @@
foobar-0.1.0:
url: http://storage.googleapis.com/kubernetes-charts/nginx-0.1.0.tgz
name: foobar
description: string
version: 0.1.0
home: https://github.com/foo
keywords:
- dummy
- hokey
oddness-1.2.3:
url: http://storage.googleapis.com/kubernetes-charts/alpine-1.0.0.tgz
name: oddness
description: string
version: 1.2.3
home: https://github.com/something
keywords:
- duck
- sumtin

@ -0,0 +1,22 @@
nginx-0.1.0:
url: http://storage.googleapis.com/kubernetes-charts/nginx-0.1.0.tgz
name: nginx
description: string
version: 0.1.0
home: https://github.com/something
keywords:
- popular
- web server
- proxy
alpine-1.0.0:
url: http://storage.googleapis.com/kubernetes-charts/alpine-1.0.0.tgz
name: alpine
description: string
version: 1.0.0
home: https://github.com/something
keywords:
- linux
- alpine
- small
- sumtin

@ -20,8 +20,10 @@ type CacheFile struct {
// ChartRef represents a chart entry in the CacheFile
type ChartRef struct {
Name string
URL string
Name string `yaml:"name"`
URL string `yaml:"url"`
Keywords []string `yaml:"keywords"`
Removed bool `yaml:"removed,omitempty"`
}
// StartLocalRepo starts a web server and serves files from the given path

@ -0,0 +1,41 @@
package repo
import (
"testing"
)
const testfile = "testdata/local-cache.yaml"
func TestLoadCacheFile(t *testing.T) {
cf, err := LoadCacheFile(testfile)
if err != nil {
t.Errorf("Failed to load cachefile: %s", err)
}
if len(cf.Entries) != 2 {
t.Errorf("Expected 2 entries in the cache file, but got %d", len(cf.Entries))
}
nginx := false
alpine := false
for k, e := range cf.Entries {
if k == "nginx-0.1.0" {
if e.Name == "nginx" {
if len(e.Keywords) == 3 {
nginx = true
}
}
}
if k == "alpine-1.0.0" {
if e.Name == "alpine" {
if len(e.Keywords) == 4 {
alpine = true
}
}
}
}
if !nginx {
t.Errorf("nginx entry was not decoded properly")
}
if !alpine {
t.Errorf("alpine entry was not decoded properly")
}
}

@ -0,0 +1,22 @@
nginx-0.1.0:
url: http://storage.googleapis.com/kubernetes-charts/nginx-0.1.0.tgz
name: nginx
description: string
version: 0.1.0
home: https://github.com/something
keywords:
- popular
- web server
- proxy
alpine-1.0.0:
url: http://storage.googleapis.com/kubernetes-charts/alpine-1.0.0.tgz
name: alpine
description: string
version: 1.0.0
home: https://github.com/something
keywords:
- linux
- alpine
- small
- sumtin
Loading…
Cancel
Save