From 6789b4334bbe340f3706f109bfc5490d5103d9ed Mon Sep 17 00:00:00 2001 From: Matt Butcher Date: Wed, 21 Dec 2016 14:19:33 -0700 Subject: [PATCH] docs(install_faq): document tiller panic for auth failure This documents an issue reported in Slack that turned out to be related to security configuration in Kubernetes. --- docs/install_faq.md | 52 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/docs/install_faq.md b/docs/install_faq.md index 484bfc5f1..d1209da59 100644 --- a/docs/install_faq.md +++ b/docs/install_faq.md @@ -133,6 +133,58 @@ A: Helm uses the Kubernetes proxy service to connect to the Tiller server. If the command `kubectl proxy` does not work for you, neither will Helm. Typically, the error is related to a missing `socat` service. +**Q: Tiller crashes with a panic** + +When I run a command on Helm, Tiller crahes with an error like this: + +``` +Tiller is listening on :44134 +Probes server is listening on :44135 +Storage driver is ConfigMap +Cannot initialize Kubernetes connection: the server has asked for the client to provide credentials 2016-12-20 15:18:40.545739 I | storage.go:37: Getting release "bailing-chinchilla" (v1) from storage +panic: runtime error: invalid memory address or nil pointer dereference +[signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x8053d5] + +goroutine 77 [running]: +panic(0x1abbfc0, 0xc42000a040) + /usr/local/go/src/runtime/panic.go:500 +0x1a1 +k8s.io/helm/vendor/k8s.io/kubernetes/pkg/client/unversioned.(*ConfigMaps).Get(0xc4200c6200, 0xc420536100, 0x15, 0x1ca7431, 0x6, 0xc42016b6a0) + /home/ubuntu/.go_workspace/src/k8s.io/helm/vendor/k8s.io/kubernetes/pkg/client/unversioned/configmap.go:58 +0x75 +k8s.io/helm/pkg/storage/driver.(*ConfigMaps).Get(0xc4201d6190, 0xc420536100, 0x15, 0xc420536100, 0x15, 0xc4205360c0) + /home/ubuntu/.go_workspace/src/k8s.io/helm/pkg/storage/driver/cfgmaps.go:69 +0x62 +k8s.io/helm/pkg/storage.(*Storage).Get(0xc4201d61a0, 0xc4205360c0, 0x12, 0xc400000001, 0x12, 0x0, 0xc420200070) + /home/ubuntu/.go_workspace/src/k8s.io/helm/pkg/storage/storage.go:38 +0x160 +k8s.io/helm/pkg/tiller.(*ReleaseServer).uniqName(0xc42002a000, 0x0, 0x0, 0xc42016b800, 0xd66a13, 0xc42055a040, 0xc420558050, 0xc420122001) + /home/ubuntu/.go_workspace/src/k8s.io/helm/pkg/tiller/release_server.go:577 +0xd7 +k8s.io/helm/pkg/tiller.(*ReleaseServer).prepareRelease(0xc42002a000, 0xc42027c1e0, 0xc42002a001, 0xc42016bad0, 0xc42016ba08) + /home/ubuntu/.go_workspace/src/k8s.io/helm/pkg/tiller/release_server.go:630 +0x71 +k8s.io/helm/pkg/tiller.(*ReleaseServer).InstallRelease(0xc42002a000, 0x7f284c434068, 0xc420250c00, 0xc42027c1e0, 0x0, 0x31a9, 0x31a9) + /home/ubuntu/.go_workspace/src/k8s.io/helm/pkg/tiller/release_server.go:604 +0x78 +k8s.io/helm/pkg/proto/hapi/services._ReleaseService_InstallRelease_Handler(0x1c51f80, 0xc42002a000, 0x7f284c434068, 0xc420250c00, 0xc42027c190, 0x0, 0x0, 0x0, 0x0, 0x0) + /home/ubuntu/.go_workspace/src/k8s.io/helm/pkg/proto/hapi/services/tiller.pb.go:747 +0x27d +k8s.io/helm/vendor/google.golang.org/grpc.(*Server).processUnaryRPC(0xc4202f3ea0, 0x28610a0, 0xc420078000, 0xc420264690, 0xc420166150, 0x288cbe8, 0xc420250bd0, 0x0, 0x0) + /home/ubuntu/.go_workspace/src/k8s.io/helm/vendor/google.golang.org/grpc/server.go:608 +0xc50 +k8s.io/helm/vendor/google.golang.org/grpc.(*Server).handleStream(0xc4202f3ea0, 0x28610a0, 0xc420078000, 0xc420264690, 0xc420250bd0) + /home/ubuntu/.go_workspace/src/k8s.io/helm/vendor/google.golang.org/grpc/server.go:766 +0x6b0 +k8s.io/helm/vendor/google.golang.org/grpc.(*Server).serveStreams.func1.1(0xc420124710, 0xc4202f3ea0, 0x28610a0, 0xc420078000, 0xc420264690) + /home/ubuntu/.go_workspace/src/k8s.io/helm/vendor/google.golang.org/grpc/server.go:419 +0xab +created by k8s.io/helm/vendor/google.golang.org/grpc.(*Server).serveStreams.func1 + /home/ubuntu/.go_workspace/src/k8s.io/helm/vendor/google.golang.org/grpc/server.go:420 +0xa3 +``` + +A: Check your security settings for Kubernetes. + +A panic in Tiller is almost always the result of a failure to negotiate with the +Kubernetes API server (at which point Tiller can no longer do anything useful, so +it panics and exits). + +Often, this is a result of authentication failing because the Pod in which Tiller +is running does not have the right token. + +To fix this, you will need to change your Kubernetes configuration. Make sure +that `--service-account-private-key-file` from `controller-manager` and +`--service-account-key-file` from apiserver point to the _same_ x509 RSA key. + ## Upgrading