From dca0ca6df23098b63a347e12ceb0dca028a20572 Mon Sep 17 00:00:00 2001 From: David Otto Date: Wed, 7 Jul 2021 12:54:38 +0200 Subject: [PATCH 01/24] Fix load_config: expand ~ --- config/kube_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/kube_config.py b/config/kube_config.py index 584b8a4..040234d 100644 --- a/config/kube_config.py +++ b/config/kube_config.py @@ -45,7 +45,7 @@ pass EXPIRY_SKEW_PREVENTION_DELAY = datetime.timedelta(minutes=5) -KUBE_CONFIG_DEFAULT_LOCATION = os.environ.get('KUBECONFIG', '~/.kube/config') +KUBE_CONFIG_DEFAULT_LOCATION = os.path.expanduser(os.environ.get('KUBECONFIG', '~/.kube/config')) ENV_KUBECONFIG_PATH_SEPARATOR = ';' if platform.system() == 'Windows' else ':' _temp_files = {} From e2ba3fb9fc2cd9dfdb873ab5d48255a1e7c2d26a Mon Sep 17 00:00:00 2001 From: David Otto Date: Mon, 12 Jul 2021 11:21:13 +0200 Subject: [PATCH 02/24] do expanduser in load_config --- config/__init__.py | 5 ++--- config/kube_config.py | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/config/__init__.py b/config/__init__.py index 76297f8..e1bf7f5 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os +from os.path import exists, expanduser from .config_exception import ConfigException from .incluster_config import load_incluster_config @@ -33,8 +33,7 @@ def load_config(**kwargs): can be passed to either load_kube_config or load_incluster_config functions. """ - if "kube_config_path" in kwargs.keys() or os.path.exists( - KUBE_CONFIG_DEFAULT_LOCATION): + if "kube_config_path" in kwargs.keys() or exists(expanduser(KUBE_CONFIG_DEFAULT_LOCATION)): load_kube_config(**kwargs) else: print( diff --git a/config/kube_config.py b/config/kube_config.py index 040234d..584b8a4 100644 --- a/config/kube_config.py +++ b/config/kube_config.py @@ -45,7 +45,7 @@ pass EXPIRY_SKEW_PREVENTION_DELAY = datetime.timedelta(minutes=5) -KUBE_CONFIG_DEFAULT_LOCATION = os.path.expanduser(os.environ.get('KUBECONFIG', '~/.kube/config')) +KUBE_CONFIG_DEFAULT_LOCATION = os.environ.get('KUBECONFIG', '~/.kube/config') ENV_KUBECONFIG_PATH_SEPARATOR = ';' if platform.system() == 'Windows' else ':' _temp_files = {} From 6f9e3327a8fbdb791a654afffe94840081390189 Mon Sep 17 00:00:00 2001 From: Andrei Marin Date: Sun, 27 Jun 2021 18:26:50 +0300 Subject: [PATCH 03/24] Fix replication controller pods delete in tests --- dynamic/test_client.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/dynamic/test_client.py b/dynamic/test_client.py index 54e41bb..78be0cc 100644 --- a/dynamic/test_client.py +++ b/dynamic/test_client.py @@ -318,8 +318,10 @@ def test_replication_controller_apis(self): self.assertEqual(name, resp.metadata.name) self.assertEqual(2, resp.spec.replicas) - resp = api.delete( - name=name, body={}, namespace='default') + api.delete( + name=name, + namespace='default', + propagation_policy='Background') def test_configmap_apis(self): client = DynamicClient(api_client.ApiClient(configuration=self.config)) @@ -357,9 +359,12 @@ def test_configmap_apis(self): resp = api.delete( name=name, body={}, namespace='default') - resp = api.get(namespace='default', pretty=True, label_selector="e2e-test=true") + resp = api.get( + namespace='default', + pretty=True, + label_selector="e2e-test=true") self.assertEqual([], resp.items) - + def test_node_apis(self): client = DynamicClient(api_client.ApiClient(configuration=self.config)) api = client.resources.get(api_version='v1', kind='Node') @@ -367,19 +372,23 @@ def test_node_apis(self): for item in api.get().items: node = api.get(name=item.metadata.name) self.assertTrue(len(dict(node.metadata.labels)) > 0) - - # test_node_apis_partial_object_metadata lists all nodes in the cluster, but only retrieves object metadata + + # test_node_apis_partial_object_metadata lists all nodes in the cluster, + # but only retrieves object metadata def test_node_apis_partial_object_metadata(self): client = DynamicClient(api_client.ApiClient(configuration=self.config)) api = client.resources.get(api_version='v1', kind='Node') - - params = {'header_params': {'Accept': 'application/json;as=PartialObjectMetadataList;v=v1;g=meta.k8s.io'}} + + params = { + 'header_params': { + 'Accept': 'application/json;as=PartialObjectMetadataList;v=v1;g=meta.k8s.io'}} resp = api.get(**params) self.assertEqual('PartialObjectMetadataList', resp.kind) self.assertEqual('meta.k8s.io/v1', resp.apiVersion) - params = {'header_params': {'aCcePt': 'application/json;as=PartialObjectMetadataList;v=v1;g=meta.k8s.io'}} + params = { + 'header_params': { + 'aCcePt': 'application/json;as=PartialObjectMetadataList;v=v1;g=meta.k8s.io'}} resp = api.get(**params) self.assertEqual('PartialObjectMetadataList', resp.kind) self.assertEqual('meta.k8s.io/v1', resp.apiVersion) - From cbb71698d775c910d6b18432f48a06dd35449e76 Mon Sep 17 00:00:00 2001 From: Mike Graves Date: Mon, 26 Jul 2021 13:23:57 -0400 Subject: [PATCH 04/24] Add support for dryRun parameter --- dynamic/client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dynamic/client.py b/dynamic/client.py index f692650..72b3811 100644 --- a/dynamic/client.py +++ b/dynamic/client.py @@ -215,6 +215,8 @@ def request(self, method, path, body=None, **params): query_params.append(('propagationPolicy', params['propagation_policy'])) if params.get('orphan_dependents') is not None: query_params.append(('orphanDependents', params['orphan_dependents'])) + if params.get('dry_run') is not None: + query_params.append(('dryRun', params['dry_run'])) header_params = params.get('header_params', {}) form_params = [] From b0b0ddeedc0338df0aa36c0e16d277ab8165ad1c Mon Sep 17 00:00:00 2001 From: Fabian von Feilitzsch Date: Thu, 29 Jul 2021 16:56:44 -0400 Subject: [PATCH 05/24] Add fabianvf to reviewers --- OWNERS | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/OWNERS b/OWNERS index c331e68..47444bf 100644 --- a/OWNERS +++ b/OWNERS @@ -5,4 +5,5 @@ approvers: - roycaihw emeritus_approvers: - mbohlool - +reviewers: + - fabianvf From 66a45cd081b17041afd62712c5c213d310fa30b3 Mon Sep 17 00:00:00 2001 From: piglei Date: Sun, 22 Aug 2021 11:20:59 +0800 Subject: [PATCH 06/24] Make duck-typing in serialize_body method more restrictive --- dynamic/client.py | 7 ++++++- dynamic/test_client.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/dynamic/client.py b/dynamic/client.py index 9d32770..353a481 100644 --- a/dynamic/client.py +++ b/dynamic/client.py @@ -98,7 +98,12 @@ def ensure_namespace(self, resource, namespace, body): return namespace def serialize_body(self, body): - if hasattr(body, 'to_dict'): + """Serialize body to raw dict so apiserver can handle it + + :param body: kubernetes resource body, current support: Union[Dict, ResourceInstance] + """ + # This should match any `ResourceInstance` instances + if callable(getattr(body, 'to_dict', None)): return body.to_dict() return body or {} diff --git a/dynamic/test_client.py b/dynamic/test_client.py index 78be0cc..ab1df93 100644 --- a/dynamic/test_client.py +++ b/dynamic/test_client.py @@ -20,6 +20,7 @@ from kubernetes.client import api_client from . import DynamicClient +from .resource import ResourceInstance, ResourceField from .exceptions import ResourceNotFoundError @@ -392,3 +393,32 @@ def test_node_apis_partial_object_metadata(self): resp = api.get(**params) self.assertEqual('PartialObjectMetadataList', resp.kind) self.assertEqual('meta.k8s.io/v1', resp.apiVersion) + + +class TestDynamicClientSerialization(unittest.TestCase): + + @classmethod + def setUpClass(cls): + config = base.get_e2e_configuration() + cls.client = DynamicClient(api_client.ApiClient(configuration=config)) + cls.pod_manifest = { + 'apiVersion': 'v1', + 'kind': 'Pod', + 'metadata': {'name': 'foo-pod'}, + 'spec': {'containers': [{'name': "main", 'image': "busybox"}]}, + } + + def test_dict_type(self): + self.assertEqual(self.client.serialize_body(self.pod_manifest), self.pod_manifest) + + def test_resource_instance_type(self): + inst = ResourceInstance(self.client, self.pod_manifest) + self.assertEqual(self.client.serialize_body(inst), self.pod_manifest) + + def test_resource_field(self): + """`ResourceField` is a special type which overwrites `__getattr__` method to return `None` + when a non-existent attribute was accessed. which means it can pass any `hasattr(...)` tests. + """ + res = ResourceField(foo='bar') + # method will return original object when it doesn't know how to proceed + self.assertEqual(self.client.serialize_body(res), res) From 70b78cd8488068c014b6d762a0c8d358273865b4 Mon Sep 17 00:00:00 2001 From: Eric Menendez Date: Fri, 27 Aug 2021 15:25:07 -0600 Subject: [PATCH 07/24] Refresh exec-based API credentials when they expire This is a fix for kubernetes-client/python#741. As described in kubernetes-client/python#741, some of the authentication schemes supported by Kubernetes require updating the client's credentials from time to time. The Kubernetes Python client currently does not support this, except for when using the `gcp` auth scheme. This is because the OpenAPI-generated client code does not generally expect credentials to change after the client is configured. However, in OpenAPITools/openapi-generator#3594, the OpenAPI generator added a (undocumented) hook on the `Configuration` object which provides a method for the client credentials to be refreshed as needed. Now that this hook exists, the `load_kube_config()` function, used by the Kubernetes API to set up the `Configuration` object from the client's local k8s config, just needs to be updated to take advantage of this hook. This patch does this for `exec`-based authentication, which should resolve kubernetes-client/python#741. Also, as noted above, `load_kube_config()` already has a special-case monkeypatch to refresh GCP tokens. I presume this functionality was added before the OpenAPI generator added support for the refresh hook. This patch also refactors the GCP token refreshing code to use the new hook instead of the monkeypatch. Tests are also updated. --- config/kube_config.py | 38 +++++++--------- config/kube_config_test.py | 91 +++++++++++++++++++++++++------------- 2 files changed, 76 insertions(+), 53 deletions(-) diff --git a/config/kube_config.py b/config/kube_config.py index 584b8a4..f295dbc 100644 --- a/config/kube_config.py +++ b/config/kube_config.py @@ -359,6 +359,8 @@ def _load_gcp_token(self, provider): self._refresh_gcp_token() self.token = "Bearer %s" % provider['config']['access-token'] + if 'expiry' in provider['config']: + self.expiry = parse_rfc3339(provider['config']['expiry']) return self.token def _refresh_gcp_token(self): @@ -483,8 +485,7 @@ def _load_from_exec_plugin(self): status = ExecProvider(self._user['exec']).run() if 'token' in status: self.token = "Bearer %s" % status['token'] - return True - if 'clientCertificateData' in status: + elif 'clientCertificateData' in status: # https://kubernetes.io/docs/reference/access-authn-authz/authentication/#input-and-output-formats # Plugin has provided certificates instead of a token. if 'clientKeyData' not in status: @@ -504,10 +505,13 @@ def _load_from_exec_plugin(self): file_base_path=base_path, base64_file_content=False, temp_file_path=self._temp_file_path).as_file() - return True - logging.error('exec: missing token or clientCertificateData field ' - 'in plugin output') - return None + else: + logging.error('exec: missing token or clientCertificateData ' + 'field in plugin output') + return None + if 'expirationTimestamp' in status: + self.expiry = parse_rfc3339(status['expirationTimestamp']) + return True except Exception as e: logging.error(str(e)) @@ -560,25 +564,15 @@ def _load_cluster_info(self): if 'insecure-skip-tls-verify' in self._cluster: self.verify_ssl = not self._cluster['insecure-skip-tls-verify'] - def _using_gcp_auth_provider(self): - return self._user and \ - 'auth-provider' in self._user and \ - 'name' in self._user['auth-provider'] and \ - self._user['auth-provider']['name'] == 'gcp' - def _set_config(self, client_configuration): - if self._using_gcp_auth_provider(): - # GCP auth tokens must be refreshed regularly, but swagger expects - # a constant token. Replace the swagger-generated client config's - # get_api_key_with_prefix method with our own to allow automatic - # token refresh. - def _gcp_get_api_key(*args): - return self._load_gcp_token(self._user['auth-provider']) - client_configuration.get_api_key_with_prefix = _gcp_get_api_key if 'token' in self.__dict__: - # Note: this line runs for GCP auth tokens as well, but this entry - # will not be updated upon GCP token refresh. client_configuration.api_key['authorization'] = self.token + + def _refresh_api_key(client_configuration): + if ('expiry' in self.__dict__ and _is_expired(self.expiry)): + self._load_authentication() + self._set_config(client_configuration) + client_configuration.refresh_api_key_hook = _refresh_api_key # copy these keys directly from self to configuration object keys = ['host', 'ssl_ca_cert', 'cert_file', 'key_file', 'verify_ssl'] for key in keys: diff --git a/config/kube_config_test.py b/config/kube_config_test.py index c33ffed..8151f94 100644 --- a/config/kube_config_test.py +++ b/config/kube_config_test.py @@ -29,7 +29,7 @@ from kubernetes.client import Configuration from .config_exception import ConfigException -from .dateutil import parse_rfc3339 +from .dateutil import format_rfc3339, parse_rfc3339 from .kube_config import (ENV_KUBECONFIG_PATH_SEPARATOR, CommandTokenSource, ConfigNode, FileOrData, KubeConfigLoader, KubeConfigMerger, _cleanup_temp_files, @@ -346,9 +346,12 @@ def test_get_with_name_on_duplicate_name(self): class FakeConfig: FILE_KEYS = ["ssl_ca_cert", "key_file", "cert_file"] + IGNORE_KEYS = ["refresh_api_key_hook"] def __init__(self, token=None, **kwargs): self.api_key = {} + # Provided by the OpenAPI-generated Configuration class + self.refresh_api_key_hook = None if token: self.api_key['authorization'] = token @@ -358,6 +361,8 @@ def __eq__(self, other): if len(self.__dict__) != len(other.__dict__): return for k, v in self.__dict__.items(): + if k in self.IGNORE_KEYS: + continue if k not in other.__dict__: return if k in self.FILE_KEYS: @@ -956,17 +961,15 @@ def test_load_user_token(self): def test_gcp_no_refresh(self): fake_config = FakeConfig() - # swagger-generated config has this, but FakeConfig does not. - self.assertFalse(hasattr(fake_config, 'get_api_key_with_prefix')) + self.assertIsNone(fake_config.refresh_api_key_hook) KubeConfigLoader( config_dict=self.TEST_KUBE_CONFIG, active_context="gcp", get_google_credentials=lambda: _raise_exception( "SHOULD NOT BE CALLED")).load_and_set(fake_config) # Should now be populated with a gcp token fetcher. - self.assertIsNotNone(fake_config.get_api_key_with_prefix) + self.assertIsNotNone(fake_config.refresh_api_key_hook) self.assertEqual(TEST_HOST, fake_config.host) - # For backwards compatibility, authorization field should still be set. self.assertEqual(BEARER_TOKEN_FORMAT % TEST_DATA_BASE64, fake_config.api_key['authorization']) @@ -997,7 +1000,7 @@ def cred(): return None self.assertEqual(BEARER_TOKEN_FORMAT % TEST_ANOTHER_DATA_BASE64, loader.token) - def test_gcp_get_api_key_with_prefix(self): + def test_gcp_refresh_api_key_hook(self): class cred_old: token = TEST_DATA_BASE64 expiry = DATETIME_EXPIRY_PAST @@ -1015,15 +1018,13 @@ class cred_new: get_google_credentials=_get_google_credentials) loader.load_and_set(fake_config) original_expiry = _get_expiry(loader, "expired_gcp_refresh") - # Call GCP token fetcher. - token = fake_config.get_api_key_with_prefix() + # Refresh the GCP token. + fake_config.refresh_api_key_hook(fake_config) new_expiry = _get_expiry(loader, "expired_gcp_refresh") self.assertTrue(new_expiry > original_expiry) self.assertEqual(BEARER_TOKEN_FORMAT % TEST_ANOTHER_DATA_BASE64, loader.token) - self.assertEqual(BEARER_TOKEN_FORMAT % TEST_ANOTHER_DATA_BASE64, - token) def test_oidc_no_refresh(self): loader = KubeConfigLoader( @@ -1383,6 +1384,38 @@ def test_user_exec_auth(self, mock): active_context="exec_cred_user").load_and_set(actual) self.assertEqual(expected, actual) + @mock.patch('kubernetes.config.kube_config.ExecProvider.run') + def test_user_exec_auth_with_expiry(self, mock): + expired_token = "expired" + current_token = "current" + mock.side_effect = [ + { + "token": expired_token, + "expirationTimestamp": format_rfc3339(DATETIME_EXPIRY_PAST) + }, + { + "token": current_token, + "expirationTimestamp": format_rfc3339(DATETIME_EXPIRY_FUTURE) + } + ] + + fake_config = FakeConfig() + self.assertIsNone(fake_config.refresh_api_key_hook) + + KubeConfigLoader( + config_dict=self.TEST_KUBE_CONFIG, + active_context="exec_cred_user").load_and_set(fake_config) + # The kube config should use the first token returned from the + # exec provider. + self.assertEqual(fake_config.api_key["authorization"], + BEARER_TOKEN_FORMAT % expired_token) + # Should now be populated with a method to refresh expired tokens. + self.assertIsNotNone(fake_config.refresh_api_key_hook) + # Refresh the token; the kube config should be updated. + fake_config.refresh_api_key_hook(fake_config) + self.assertEqual(fake_config.api_key["authorization"], + BEARER_TOKEN_FORMAT % current_token) + @mock.patch('kubernetes.config.kube_config.ExecProvider.run') def test_user_exec_auth_certificates(self, mock): mock.return_value = { @@ -1412,7 +1445,6 @@ def test_user_cmd_path(self): KubeConfigLoader( config_dict=self.TEST_KUBE_CONFIG, active_context="contexttestcmdpath").load_and_set(actual) - del actual.get_api_key_with_prefix self.assertEqual(expected, actual) def test_user_cmd_path_empty(self): @@ -1490,31 +1522,28 @@ def test__get_kube_config_loader_dict_no_persist(self): class TestKubernetesClientConfiguration(BaseTestCase): # Verifies properties of kubernetes.client.Configuration. # These tests guard against changes to the upstream configuration class, - # since GCP authorization overrides get_api_key_with_prefix to refresh its - # token regularly. + # since GCP and Exec authorization use refresh_api_key_hook to refresh + # their tokens regularly. - def test_get_api_key_with_prefix_exists(self): - self.assertTrue(hasattr(Configuration, 'get_api_key_with_prefix')) + def test_refresh_api_key_hook_exists(self): + self.assertTrue(hasattr(Configuration(), 'refresh_api_key_hook')) - def test_get_api_key_with_prefix_returns_token(self): - expected_token = 'expected_token' - config = Configuration() - config.api_key['authorization'] = expected_token - self.assertEqual(expected_token, - config.get_api_key_with_prefix('authorization')) - - def test_auth_settings_calls_get_api_key_with_prefix(self): + def test_get_api_key_calls_refresh_api_key_hook(self): + identifier = 'authorization' expected_token = 'expected_token' old_token = 'old_token' + config = Configuration( + api_key={identifier: old_token}, + api_key_prefix={identifier: 'Bearer'} + ) + + def refresh_api_key_hook(client_config): + self.assertEqual(client_config, config) + client_config.api_key[identifier] = expected_token + config.refresh_api_key_hook = refresh_api_key_hook - def fake_get_api_key_with_prefix(identifier): - self.assertEqual('authorization', identifier) - return expected_token - config = Configuration() - config.api_key['authorization'] = old_token - config.get_api_key_with_prefix = fake_get_api_key_with_prefix - self.assertEqual(expected_token, - config.auth_settings()['BearerToken']['value']) + self.assertEqual('Bearer ' + expected_token, + config.get_api_key_with_prefix(identifier)) class TestKubeConfigMerger(BaseTestCase): From bd944a58a31f878c5bf4964f458d53512df2ece3 Mon Sep 17 00:00:00 2001 From: jamesgetx Date: Fri, 3 Sep 2021 17:30:56 +0800 Subject: [PATCH 08/24] fix: field extra_args recursive growth caused by Resource and Subresource to_dict method when cache with CacheDecoder --- dynamic/resource.py | 18 ++++++++++-------- dynamic/test_discovery.py | 21 +++++++++++++++++++++ 2 files changed, 31 insertions(+), 8 deletions(-) diff --git a/dynamic/resource.py b/dynamic/resource.py index c83ae9f..6dac1d8 100644 --- a/dynamic/resource.py +++ b/dynamic/resource.py @@ -48,7 +48,7 @@ def __init__(self, prefix=None, group=None, api_version=None, kind=None, self.extra_args = kwargs def to_dict(self): - return { + d = { '_type': 'Resource', 'prefix': self.prefix, 'group': self.group, @@ -58,12 +58,13 @@ def to_dict(self): 'verbs': self.verbs, 'name': self.name, 'preferred': self.preferred, - 'singular_name': self.singular_name, - 'short_names': self.short_names, + 'singularName': self.singular_name, + 'shortNames': self.short_names, 'categories': self.categories, 'subresources': {k: sr.to_dict() for k, sr in self.subresources.items()}, - 'extra_args': self.extra_args, } + d.update(self.extra_args) + return d @property def group_version(self): @@ -236,7 +237,7 @@ def __init__(self, parent, **kwargs): self.api_version = parent.api_version self.kind = kwargs.pop('kind') self.name = kwargs.pop('name') - self.subresource = self.name.split('/')[1] + self.subresource = kwargs.pop('subresource', None) or self.name.split('/')[1] self.namespaced = kwargs.pop('namespaced', False) self.verbs = kwargs.pop('verbs', None) self.extra_args = kwargs @@ -262,14 +263,15 @@ def __getattr__(self, name): return partial(getattr(self.parent.client, name), self) def to_dict(self): - return { + d = { 'kind': self.kind, 'name': self.name, 'subresource': self.subresource, 'namespaced': self.namespaced, - 'verbs': self.verbs, - 'extra_args': self.extra_args, + 'verbs': self.verbs } + d.update(self.extra_args) + return d class ResourceInstance(object): diff --git a/dynamic/test_discovery.py b/dynamic/test_discovery.py index 4897f24..639ccdd 100644 --- a/dynamic/test_discovery.py +++ b/dynamic/test_discovery.py @@ -38,3 +38,24 @@ def test_init_cache_from_file(self): # test no Discoverer._write_cache called self.assertTrue(mtime1 == mtime2) + + def test_cache_decoder_resource_and_subresource(self): + client = DynamicClient(api_client.ApiClient(configuration=self.config)) + # first invalidate cache + client.resources.invalidate_cache() + + # do Discoverer.__init__ + client = DynamicClient(api_client.ApiClient(configuration=self.config)) + # the resources of client will use _cache['resources'] in memory + deploy1 = client.resources.get(kind='Deployment') + + # do Discoverer.__init__ + client = DynamicClient(api_client.ApiClient(configuration=self.config)) + # the resources of client will use _cache['resources'] decode from cache file + deploy2 = client.resources.get(kind='Deployment') + + # test Resource is the same + self.assertTrue(deploy1 == deploy2) + + # test Subresource is the same + self.assertTrue(deploy1.status == deploy2.status) From c040d87bd847d5afe480dcc2d39ad46cb6234cc3 Mon Sep 17 00:00:00 2001 From: schneesu Date: Tue, 28 Sep 2021 10:05:17 +0800 Subject: [PATCH 09/24] fix: ignore ResourceNotFoundError in the first call of LazyDiscoverer.__search --- dynamic/discovery.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dynamic/discovery.py b/dynamic/discovery.py index 5c2f4ac..dbf9410 100644 --- a/dynamic/discovery.py +++ b/dynamic/discovery.py @@ -237,7 +237,11 @@ def api_groups(self): return self.parse_api_groups(request_resources=False, update=True)['apis'].keys() def search(self, **kwargs): - results = self.__search(self.__build_search(**kwargs), self.__resources, []) + # In first call, ignore ResourceNotFoundError and set default value for results + try: + results = self.__search(self.__build_search(**kwargs), self.__resources, []) + except ResourceNotFoundError: + results = [] if not results: self.invalidate_cache() results = self.__search(self.__build_search(**kwargs), self.__resources, []) From 281f17ab237384bc1f5b022555635710b6e6aff3 Mon Sep 17 00:00:00 2001 From: abikouo Date: Thu, 16 Sep 2021 10:52:44 +0200 Subject: [PATCH 10/24] add support for server side apply --- dynamic/client.py | 18 ++++++++++++++++++ dynamic/test_client.py | 24 ++++++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/dynamic/client.py b/dynamic/client.py index 353a481..a81039b 100644 --- a/dynamic/client.py +++ b/dynamic/client.py @@ -149,6 +149,20 @@ def patch(self, resource, body=None, name=None, namespace=None, **kwargs): return self.request('patch', path, body=body, content_type=content_type, **kwargs) + def server_side_apply(self, resource, body=None, name=None, namespace=None, force_conflicts=None, **kwargs): + body = self.serialize_body(body) + name = name or body.get('metadata', {}).get('name') + if not name: + raise ValueError("name is required to patch {}.{}".format(resource.group_version, resource.kind)) + if resource.namespaced: + namespace = self.ensure_namespace(resource, namespace, body) + + # force content type to 'application/apply-patch+yaml' + kwargs.update({'content_type': 'application/apply-patch+yaml'}) + path = resource.path(name=name, namespace=namespace) + + return self.request('patch', path, body=body, force_conflicts=force_conflicts, **kwargs) + def watch(self, resource, namespace=None, name=None, label_selector=None, field_selector=None, resource_version=None, timeout=None, watcher=None): """ Stream events for a resource from the Kubernetes API @@ -227,6 +241,10 @@ def request(self, method, path, body=None, **params): query_params.append(('orphanDependents', params['orphan_dependents'])) if params.get('dry_run') is not None: query_params.append(('dryRun', params['dry_run'])) + if params.get('field_manager') is not None: + query_params.append(('fieldManager', params['field_manager'])) + if params.get('force_conflicts') is not None: + query_params.append(('force', params['force_conflicts'])) header_params = params.get('header_params', {}) form_params = [] diff --git a/dynamic/test_client.py b/dynamic/test_client.py index ab1df93..c31270b 100644 --- a/dynamic/test_client.py +++ b/dynamic/test_client.py @@ -15,6 +15,7 @@ import time import unittest import uuid +import json from kubernetes.e2e_test import base from kubernetes.client import api_client @@ -394,6 +395,29 @@ def test_node_apis_partial_object_metadata(self): self.assertEqual('PartialObjectMetadataList', resp.kind) self.assertEqual('meta.k8s.io/v1', resp.apiVersion) + def test_server_side_apply_api(self): + client = DynamicClient(api_client.ApiClient(configuration=self.config)) + api = client.resources.get( + api_version='v1', kind='Pod') + + name = 'pod-' + short_uuid() + pod_manifest = { + 'apiVersion': 'v1', + 'kind': 'Pod', + 'metadata': {'labels': {'name': name}, + 'name': name}, + 'spec': {'containers': [{ + 'image': 'nginx', + 'name': 'nginx', + 'ports': [{'containerPort': 80, + 'protocol': 'TCP'}]}]}} + + body = json.dumps(pod_manifest).encode() + resp = api.server_side_apply( + name=name, namespace='default', body=body, + field_manager='kubernetes-unittests', dry_run="All") + self.assertEqual('kubernetes-unittests', resp.metadata.managedFields[0].manager) + class TestDynamicClientSerialization(unittest.TestCase): From 769bc57ec7b0271a7cb018becee8ad156cf82704 Mon Sep 17 00:00:00 2001 From: itaru2622 Date: Wed, 29 Sep 2021 09:18:55 +0900 Subject: [PATCH 11/24] add proxy authentication supporting for websocket (stream/ws_client.py) --- stream/ws_client.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/stream/ws_client.py b/stream/ws_client.py index 356440c..2a60a8b 100644 --- a/stream/ws_client.py +++ b/stream/ws_client.py @@ -29,6 +29,7 @@ from six import StringIO from websocket import WebSocket, ABNF, enableTrace +from base64 import b64decode STDIN_CHANNEL = 0 STDOUT_CHANNEL = 1 @@ -445,11 +446,20 @@ def create_websocket(configuration, url, headers=None): ssl_opts['keyfile'] = configuration.key_file websocket = WebSocket(sslopt=ssl_opts, skip_utf8_validation=False) + connect_opt = { + 'header': header + } if configuration.proxy: proxy_url = urlparse(configuration.proxy) - websocket.connect(url, header=header, http_proxy_host=proxy_url.hostname, http_proxy_port=proxy_url.port) - else: - websocket.connect(url, header=header) + connect_opt.update({'http_proxy_host': proxy_url.hostname, 'http_proxy_port': proxy_url.port}) + if configuration.proxy_headers: + for key,value in configuration.proxy_headers.items(): + if key == 'proxy-authorization' and value.startswith('Basic'): + b64value = value.split()[1] + auth = b64decode(b64value).decode().split(':') + connect_opt.update({'http_proxy_auth': (auth[0], auth[1]) }) + + websocket.connect(url, **connect_opt) return websocket From 877727110956253be05e45dfb0e18bd094c54e90 Mon Sep 17 00:00:00 2001 From: itaru2622 Date: Tue, 5 Oct 2021 20:50:01 +0900 Subject: [PATCH 12/24] proxy authentication supporting for websocket (stream/ws_client.py), with unittest --- stream/ws_client.py | 22 ++++++++++++++-------- stream/ws_client_test.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 8 deletions(-) diff --git a/stream/ws_client.py b/stream/ws_client.py index 2a60a8b..419d28b 100644 --- a/stream/ws_client.py +++ b/stream/ws_client.py @@ -449,18 +449,24 @@ def create_websocket(configuration, url, headers=None): connect_opt = { 'header': header } + + if configuration.proxy or coniguration.proxy_headers: + connect_opt = websocket_proxycare(connect_opt, configuration, url, headers) + + websocket.connect(url, **connect_opt) + return websocket + +def websocket_proxycare(connect_opt, configuration, url, headers): if configuration.proxy: proxy_url = urlparse(configuration.proxy) connect_opt.update({'http_proxy_host': proxy_url.hostname, 'http_proxy_port': proxy_url.port}) if configuration.proxy_headers: - for key,value in configuration.proxy_headers.items(): - if key == 'proxy-authorization' and value.startswith('Basic'): - b64value = value.split()[1] - auth = b64decode(b64value).decode().split(':') - connect_opt.update({'http_proxy_auth': (auth[0], auth[1]) }) - - websocket.connect(url, **connect_opt) - return websocket + for key,value in configuration.proxy_headers.items(): + if key == 'proxy-authorization' and value.startswith('Basic'): + b64value = value.split()[1] + auth = b64decode(b64value).decode().split(':') + connect_opt.update({'http_proxy_auth': (auth[0], auth[1]) }) + return(connect_opt) def websocket_call(configuration, _method, url, **kwargs): diff --git a/stream/ws_client_test.py b/stream/ws_client_test.py index a8f4049..bfcd64d 100644 --- a/stream/ws_client_test.py +++ b/stream/ws_client_test.py @@ -15,7 +15,21 @@ import unittest from .ws_client import get_websocket_url +from .ws_client import websocket_proxycare +from kubernetes.client.configuration import Configuration +try: + import urllib3 + urllib3.disable_warnings() +except ImportError: + pass + +def dictval(dict, key, default=None): + try: + val = dict[key] + except KeyError: + val = default + return val class WSClientTest(unittest.TestCase): @@ -32,6 +46,21 @@ def test_websocket_client(self): ]: self.assertEqual(get_websocket_http://github.com/url(url), ws_url) + def test_websocket_proxycare(self): + for proxy, idpass, expect_host, expect_port, expect_auth in [ + ( None, None, None, None, None ), + ( 'http://proxy.example.com:8080/', None, 'proxy.example.com', 8080, None ), + ( 'http://proxy.example.com:8080/', 'user:pass', 'proxy.example.com', 8080, ('user','pass')) + ]: + config = Configuration() + if proxy is not None: + setattr(config, 'proxy', proxy) + if idpass is not None: + setattr(config, 'proxy_headers', urllib3.util.make_headers(proxy_basic_auth=idpass)) + connect_opt = websocket_proxycare( {}, config, None, None) + self.assertEqual( dictval(connect_opt,'http_proxy_host'), expect_host) + self.assertEqual( dictval(connect_opt,'http_proxy_port'), expect_port) + self.assertEqual( dictval(connect_opt,'http_proxy_auth'), expect_auth) if __name__ == '__main__': unittest.main() From 59e7d115b22bcc2f640949ab880da39da5a0c046 Mon Sep 17 00:00:00 2001 From: itaru2622 Date: Sat, 9 Oct 2021 08:48:00 +0900 Subject: [PATCH 13/24] change base64decode to urlsafe_b64decode --- stream/ws_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stream/ws_client.py b/stream/ws_client.py index 419d28b..4b26ddd 100644 --- a/stream/ws_client.py +++ b/stream/ws_client.py @@ -29,7 +29,7 @@ from six import StringIO from websocket import WebSocket, ABNF, enableTrace -from base64 import b64decode +from base64 import urlsafe_b64decode STDIN_CHANNEL = 0 STDOUT_CHANNEL = 1 @@ -464,7 +464,7 @@ def websocket_proxycare(connect_opt, configuration, url, headers): for key,value in configuration.proxy_headers.items(): if key == 'proxy-authorization' and value.startswith('Basic'): b64value = value.split()[1] - auth = b64decode(b64value).decode().split(':') + auth = urlsafe_b64decode(b64value).decode().split(':') connect_opt.update({'http_proxy_auth': (auth[0], auth[1]) }) return(connect_opt) From f23b2840f88ee51d96089555fae6596d77242112 Mon Sep 17 00:00:00 2001 From: itaru2622 Date: Sun, 10 Oct 2021 11:46:38 +0900 Subject: [PATCH 14/24] fix typo in proxy auth (stream/ws_client.py) --- stream/ws_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/ws_client.py b/stream/ws_client.py index 4b26ddd..732ac47 100644 --- a/stream/ws_client.py +++ b/stream/ws_client.py @@ -450,7 +450,7 @@ def create_websocket(configuration, url, headers=None): 'header': header } - if configuration.proxy or coniguration.proxy_headers: + if configuration.proxy or configuration.proxy_headers: connect_opt = websocket_proxycare(connect_opt, configuration, url, headers) websocket.connect(url, **connect_opt) From 95e2e85af5928546b92b9fe06554b48db7f3baaf Mon Sep 17 00:00:00 2001 From: DiptoChakrabarty Date: Fri, 15 Oct 2021 19:14:37 +0530 Subject: [PATCH 15/24] closes open file descriptors to prevent leaks --- config/kube_config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/config/kube_config.py b/config/kube_config.py index 584b8a4..e5368f4 100644 --- a/config/kube_config.py +++ b/config/kube_config.py @@ -70,7 +70,8 @@ def _create_temp_file_with_content(content, temp_file_path=None): return _temp_files[content_key] if temp_file_path and not os.path.isdir(temp_file_path): os.makedirs(name=temp_file_path) - _, name = tempfile.mkstemp(dir=temp_file_path) + fd, name = tempfile.mkstemp(dir=temp_file_path) + os.close(fd) _temp_files[content_key] = name with open(name, 'wb') as fd: fd.write(content.encode() if isinstance(content, str) else content) From 4ef4139e77eb435faf74944be90ce7f8bbe2e58f Mon Sep 17 00:00:00 2001 From: itaru2622 Date: Mon, 18 Oct 2021 09:30:09 +0900 Subject: [PATCH 16/24] add no_proxy support to stream/ws_client.py --- stream/ws_client.py | 7 +++++++ stream/ws_client_test.py | 18 ++++++++++++++---- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/stream/ws_client.py b/stream/ws_client.py index 732ac47..6884059 100644 --- a/stream/ws_client.py +++ b/stream/ws_client.py @@ -30,6 +30,7 @@ from websocket import WebSocket, ABNF, enableTrace from base64 import urlsafe_b64decode +from requests.utils import should_bypass_proxies STDIN_CHANNEL = 0 STDOUT_CHANNEL = 1 @@ -457,6 +458,12 @@ def create_websocket(configuration, url, headers=None): return websocket def websocket_proxycare(connect_opt, configuration, url, headers): + """ An internal function to be called in api-client when a websocket + create is requested. + """ + if configuration.no_proxy: + connect_opt.update({ 'http_no_proxy': configuration.no_proxy.split(',') }) + if configuration.proxy: proxy_url = urlparse(configuration.proxy) connect_opt.update({'http_proxy_host': proxy_url.hostname, 'http_proxy_port': proxy_url.port}) diff --git a/stream/ws_client_test.py b/stream/ws_client_test.py index bfcd64d..a7a11f5 100644 --- a/stream/ws_client_test.py +++ b/stream/ws_client_test.py @@ -47,20 +47,30 @@ def test_websocket_client(self): self.assertEqual(get_websocket_http://github.com/url(url), ws_url) def test_websocket_proxycare(self): - for proxy, idpass, expect_host, expect_port, expect_auth in [ - ( None, None, None, None, None ), - ( 'http://proxy.example.com:8080/', None, 'proxy.example.com', 8080, None ), - ( 'http://proxy.example.com:8080/', 'user:pass', 'proxy.example.com', 8080, ('user','pass')) + for proxy, idpass, no_proxy, expect_host, expect_port, expect_auth, expect_noproxy in [ + ( None, None, None, None, None, None, None ), + ( 'http://proxy.example.com:8080/', None, None, 'proxy.example.com', 8080, None, None ), + ( 'http://proxy.example.com:8080/', 'user:pass', None, 'proxy.example.com', 8080, ('user','pass'), None), + ( 'http://proxy.example.com:8080/', 'user:pass', '', 'proxy.example.com', 8080, ('user','pass'), None), + ( 'http://proxy.example.com:8080/', 'user:pass', '*', 'proxy.example.com', 8080, ('user','pass'), ['*']), + ( 'http://proxy.example.com:8080/', 'user:pass', '.example.com', 'proxy.example.com', 8080, ('user','pass'), ['.example.com']), + ( 'http://proxy.example.com:8080/', 'user:pass', 'localhost,.local,.example.com', 'proxy.example.com', 8080, ('user','pass'), ['localhost','.local','.example.com']), ]: + # setup input config = Configuration() if proxy is not None: setattr(config, 'proxy', proxy) if idpass is not None: setattr(config, 'proxy_headers', urllib3.util.make_headers(proxy_basic_auth=idpass)) + if no_proxy is not None: + setattr(config, 'no_proxy', no_proxy) + # setup done + # test starts connect_opt = websocket_proxycare( {}, config, None, None) self.assertEqual( dictval(connect_opt,'http_proxy_host'), expect_host) self.assertEqual( dictval(connect_opt,'http_proxy_port'), expect_port) self.assertEqual( dictval(connect_opt,'http_proxy_auth'), expect_auth) + self.assertEqual( dictval(connect_opt,'http_no_proxy'), expect_noproxy) if __name__ == '__main__': unittest.main() From d47030ac835e00b1bc315349dffa66c252967b1e Mon Sep 17 00:00:00 2001 From: aagten Date: Tue, 9 Nov 2021 21:41:53 +0100 Subject: [PATCH 17/24] Make socket Windows-proof --- stream/ws_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stream/ws_client.py b/stream/ws_client.py index 6884059..4e164e8 100644 --- a/stream/ws_client.py +++ b/stream/ws_client.py @@ -280,7 +280,7 @@ def __init__(self, ix, port_number): # between the python application and the kubernetes websocket. The self.python # half of the socket pair is used by the _proxy method to receive and send data # to the running python application. - s, self.python = socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM) + s, self.python = socket.socketpair() # The self.socket half of the pair is used by the python application to send # and receive data to the eventual pod port. It is wrapped in the _Socket class # because a socket pair is an AF_UNIX socket, not a AF_INET socket. This allows From 8b306c0f570152d8bbf65736a74b7895d20cf246 Mon Sep 17 00:00:00 2001 From: WalkerWang731 Date: Wed, 17 Nov 2021 16:53:22 +0800 Subject: [PATCH 18/24] add a new method of config.kube_config.new_client_from_config_dict Signed-off-by: WalkerWang731 --- config/__init__.py | 2 +- config/kube_config.py | 18 ++++++++++++++++++ config/kube_config_test.py | 9 ++++++++- 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/config/__init__.py b/config/__init__.py index e1bf7f5..69ed7f1 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -18,7 +18,7 @@ from .incluster_config import load_incluster_config from .kube_config import (KUBE_CONFIG_DEFAULT_LOCATION, list_kube_config_contexts, load_kube_config, - load_kube_config_from_dict, new_client_from_config) + load_kube_config_from_dict, new_client_from_config, new_client_from_config_dict) def load_config(**kwargs): diff --git a/config/kube_config.py b/config/kube_config.py index e5368f4..0b6fe56 100644 --- a/config/kube_config.py +++ b/config/kube_config.py @@ -871,3 +871,21 @@ def new_client_from_config( client_configuration=client_config, persist_config=persist_config) return ApiClient(configuration=client_config) + + +def new_client_from_config_dict( + config_dict=None, + context=None, + persist_config=True, + temp_file_path=None): + """ + Loads configuration the same as load_kube_config_from_dict but returns an ApiClient + to be used with any API object. This will allow the caller to concurrently + talk with multiple clusters. + """ + client_config = type.__call__(Configuration) + load_kube_config_from_dict(config_dict=config_dict, context=context, + client_configuration=client_config, + persist_config=persist_config, + temp_file_path=temp_file_path) + return ApiClient(configuration=client_config) diff --git a/config/kube_config_test.py b/config/kube_config_test.py index c33ffed..b903075 100644 --- a/config/kube_config_test.py +++ b/config/kube_config_test.py @@ -37,7 +37,7 @@ _get_kube_config_loader, _get_kube_config_loader_for_yaml_file, list_kube_config_contexts, load_kube_config, - load_kube_config_from_dict, new_client_from_config) + load_kube_config_from_dict, new_client_from_config, new_client_from_config_dict) BEARER_TOKEN_FORMAT = "Bearer %s" @@ -1351,6 +1351,13 @@ def test_new_client_from_config(self): self.assertEqual(BEARER_TOKEN_FORMAT % TEST_DATA_BASE64, client.configuration.api_key['authorization']) + def test_new_client_from_config_dict(self): + client = new_client_from_config_dict( + config_dict=self.TEST_KUBE_CONFIG, context="simple_token") + self.assertEqual(TEST_HOST, client.configuration.host) + self.assertEqual(BEARER_TOKEN_FORMAT % TEST_DATA_BASE64, + client.configuration.api_key['authorization']) + def test_no_users_section(self): expected = FakeConfig(host=TEST_HOST) actual = FakeConfig() From bc697ae8f089b048a8feed0b73b0afc0be3435cf Mon Sep 17 00:00:00 2001 From: Ping He Date: Wed, 24 Nov 2021 15:14:10 +0800 Subject: [PATCH 19/24] Fix leaderelection/example.py, now works in package. Signed-off-by: Ping He --- leaderelection/example.py | 6 +++--- leaderelection/resourcelock/configmaplock.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/leaderelection/example.py b/leaderelection/example.py index b8d8e61..3b3336c 100644 --- a/leaderelection/example.py +++ b/leaderelection/example.py @@ -14,9 +14,9 @@ import uuid from kubernetes import client, config -from leaderelection import leaderelection -from leaderelection.resourcelock.configmaplock import ConfigMapLock -from leaderelection import electionconfig +from kubernetes.leaderelection import leaderelection +from kubernetes.leaderelection.resourcelock.configmaplock import ConfigMapLock +from kubernetes.leaderelection import electionconfig # Authenticate using config file diff --git a/leaderelection/resourcelock/configmaplock.py b/leaderelection/resourcelock/configmaplock.py index 8d155e2..54a7bb4 100644 --- a/leaderelection/resourcelock/configmaplock.py +++ b/leaderelection/resourcelock/configmaplock.py @@ -15,7 +15,7 @@ from kubernetes.client.rest import ApiException from kubernetes import client, config from kubernetes.client.api_client import ApiClient -from leaderelection.leaderelectionrecord import LeaderElectionRecord +from ..leaderelectionrecord import LeaderElectionRecord import json import logging logging.basicConfig(level=logging.INFO) From 18828d92cca7e9736d310aab5b2c1f22f0d7f9e7 Mon Sep 17 00:00:00 2001 From: John Sun Date: Mon, 29 Nov 2021 17:33:52 +1100 Subject: [PATCH 20/24] Use select.poll() for exec on linux/darwin --- stream/ws_client.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/stream/ws_client.py b/stream/ws_client.py index 356440c..9a9442e 100644 --- a/stream/ws_client.py +++ b/stream/ws_client.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import sys from kubernetes.client.rest import ApiException, ApiValueError @@ -165,8 +166,25 @@ def update(self, timeout=0): if not self.sock.connected: self._connected = False return - r, _, _ = select.select( - (self.sock.sock, ), (), (), timeout) + + # The options here are: + # select.select() - this will work on most OS, however, it has a + # limitation of only able to read fd numbers up to 1024. + # i.e. does not scale well. This was the original + # implementation. + # select.poll() - this will work on most unix based OS, but not as + # efficient as epoll. Will work for fd numbers above 1024. + # select.epoll() - newest and most efficient way of polling. + # However, only works on linux. + if sys.platform.startswith('linux') or sys.platform in ['darwin']: + poll = select.poll() + poll.register(self.sock.sock, select.POLLIN) + r = poll.poll(timeout) + poll.unregister(self.sock.sock) + else: + r, _, _ = select.select( + (self.sock.sock, ), (), (), timeout) + if r: op_code, frame = self.sock.recv_data_frame(True) if op_code == ABNF.OPCODE_CLOSE: From 79e066a0d46a8e7b84366fdd1903965d60ca92a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20Gasser?= Date: Mon, 13 Dec 2021 19:32:32 -0500 Subject: [PATCH 21/24] fix: WSClient.returncode not idempotent --- stream/ws_client.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/stream/ws_client.py b/stream/ws_client.py index 4e164e8..89ad5c2 100644 --- a/stream/ws_client.py +++ b/stream/ws_client.py @@ -63,6 +63,7 @@ def __init__(self, configuration, url, headers, capture_all): self._all = _IgnoredIO() self.sock = create_websocket(configuration, url, headers) self._connected = True + self._returncode = None def peek_channel(self, channel, timeout=0): """Peek a channel and return part of the input, @@ -210,12 +211,14 @@ def returncode(self): if self.is_open(): return None else: - err = self.read_channel(ERROR_CHANNEL) - err = yaml.safe_load(err) - if err['status'] == "Success": - return 0 - return int(err['details']['causes'][0]['message']) - + if self._returncode is None: + err = self.read_channel(ERROR_CHANNEL) + err = yaml.safe_load(err) + if err['status'] == "Success": + self._returncode = 0 + else: + self._returncode = int(err['details']['causes'][0]['message']) + return self._returncode def close(self, **kwargs): """ From 1c5bf586f0882c81c03181588830887345703ca5 Mon Sep 17 00:00:00 2001 From: April Schleck Date: Thu, 23 Dec 2021 14:46:23 -0800 Subject: [PATCH 22/24] Run kubeconfig exec commands in the correct directory. This fixes configs that rely on relative paths. --- config/exec_provider.py | 4 +++- config/exec_provider_test.py | 21 +++++++++++++++------ config/kube_config.py | 4 ++-- 3 files changed, 20 insertions(+), 9 deletions(-) diff --git a/config/exec_provider.py b/config/exec_provider.py index 4008f2e..ef3fac6 100644 --- a/config/exec_provider.py +++ b/config/exec_provider.py @@ -31,7 +31,7 @@ class ExecProvider(object): * caching """ - def __init__(self, exec_config): + def __init__(self, exec_config, cwd): """ exec_config must be of type ConfigNode because we depend on safe_get(self, key) to correctly handle optional exec provider @@ -53,6 +53,7 @@ def __init__(self, exec_config): value = item['value'] additional_vars[name] = value self.env.update(additional_vars) + self.cwd = cwd def run(self, previous_response=None): kubernetes_exec_info = { @@ -69,6 +70,7 @@ def run(self, previous_response=None): self.args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + cwd=self.cwd, env=self.env, universal_newlines=True) (stdout, stderr) = process.communicate() diff --git a/config/exec_provider_test.py b/config/exec_provider_test.py index 44579be..a545b55 100644 --- a/config/exec_provider_test.py +++ b/config/exec_provider_test.py @@ -47,7 +47,7 @@ def test_missing_input_keys(self): ConfigNode('test3', {'apiVersion': ''})] for exec_config in exec_configs: with self.assertRaises(ConfigException) as context: - ExecProvider(exec_config) + ExecProvider(exec_config, None) self.assertIn('exec: malformed request. missing key', context.exception.args[0]) @@ -57,7 +57,7 @@ def test_error_code_returned(self, mock): instance.wait.return_value = 1 instance.communicate.return_value = ('', '') with self.assertRaises(ConfigException) as context: - ep = ExecProvider(self.input_ok) + ep = ExecProvider(self.input_ok, None) ep.run() self.assertIn('exec: process returned %d' % instance.wait.return_value, context.exception.args[0]) @@ -68,7 +68,7 @@ def test_nonjson_output_returned(self, mock): instance.wait.return_value = 0 instance.communicate.return_value = ('', '') with self.assertRaises(ConfigException) as context: - ep = ExecProvider(self.input_ok) + ep = ExecProvider(self.input_ok, None) ep.run() self.assertIn('exec: failed to decode process output', context.exception.args[0]) @@ -102,7 +102,7 @@ def test_missing_output_keys(self, mock): for output in outputs: instance.communicate.return_value = (output, '') with self.assertRaises(ConfigException) as context: - ep = ExecProvider(self.input_ok) + ep = ExecProvider(self.input_ok, None) ep.run() self.assertIn('exec: malformed response. missing key', context.exception.args[0]) @@ -123,7 +123,7 @@ def test_mismatched_api_version(self, mock): """ % wrong_api_version instance.communicate.return_value = (output, '') with self.assertRaises(ConfigException) as context: - ep = ExecProvider(self.input_ok) + ep = ExecProvider(self.input_ok, None) ep.run() self.assertIn( 'exec: plugin api version %s does not match' % @@ -135,11 +135,20 @@ def test_ok_01(self, mock): instance = mock.return_value instance.wait.return_value = 0 instance.communicate.return_value = (self.output_ok, '') - ep = ExecProvider(self.input_ok) + ep = ExecProvider(self.input_ok, None) result = ep.run() self.assertTrue(isinstance(result, dict)) self.assertTrue('token' in result) + @mock.patch('subprocess.Popen') + def test_run_in_dir(self, mock): + instance = mock.return_value + instance.wait.return_value = 0 + instance.communicate.return_value = (self.output_ok, '') + ep = ExecProvider(self.input_ok, '/some/directory') + ep.run() + self.assertEqual(mock.call_args.kwargs['cwd'], '/some/directory') + if __name__ == '__main__': unittest.main() diff --git a/config/kube_config.py b/config/kube_config.py index a04a6e3..f37ed43 100644 --- a/config/kube_config.py +++ b/config/kube_config.py @@ -483,7 +483,8 @@ def _load_from_exec_plugin(self): if 'exec' not in self._user: return try: - status = ExecProvider(self._user['exec']).run() + base_path = self._get_base_path(self._cluster.path) + status = ExecProvider(self._user['exec'], base_path).run() if 'token' in status: self.token = "Bearer %s" % status['token'] elif 'clientCertificateData' in status: @@ -493,7 +494,6 @@ def _load_from_exec_plugin(self): logging.error('exec: missing clientKeyData field in ' 'plugin output') return None - base_path = self._get_base_path(self._cluster.path) self.cert_file = FileOrData( status, None, data_key_name='clientCertificateData', From 6efd33d5c16243929d32139d3b0d0bc34820ea7b Mon Sep 17 00:00:00 2001 From: April Schleck Date: Wed, 5 Jan 2022 17:56:07 -0800 Subject: [PATCH 23/24] Add a test to kube_config_test to check the cwd of the ExecProvider --- config/kube_config_test.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/config/kube_config_test.py b/config/kube_config_test.py index 6ac3db2..02127d1 100644 --- a/config/kube_config_test.py +++ b/config/kube_config_test.py @@ -1441,6 +1441,20 @@ def test_user_exec_auth_certificates(self, mock): active_context="exec_cred_user_certificate").load_and_set(actual) self.assertEqual(expected, actual) + @mock.patch('kubernetes.config.kube_config.ExecProvider.run', autospec=True) + def test_user_exec_cwd(self, mock): + capture = {} + def capture_cwd(exec_provider): + capture['cwd'] = exec_provider.cwd + mock.side_effect = capture_cwd + + expected = "/some/random/path" + KubeConfigLoader( + config_dict=self.TEST_KUBE_CONFIG, + active_context="exec_cred_user", + config_base_path=expected).load_and_set(FakeConfig()) + self.assertEqual(expected, capture['cwd']) + def test_user_cmd_path(self): A = namedtuple('A', ['token', 'expiry']) token = "dummy" From 4539902540c19bd824944e6aebad7c0998b648b2 Mon Sep 17 00:00:00 2001 From: Yu Liao Date: Tue, 1 Feb 2022 09:44:14 -0800 Subject: [PATCH 24/24] Update README.md --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index f916e34..9804e0d 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ [![Build Status](https://travis-ci.org/kubernetes-client/python-base.svg?branch=master)](https://travis-ci.org/kubernetes-client/python-base) +**This repo has been merged into the [python client](https://github.com/kubernetes-client/python/tree/master/kubernetes/base). Please file issues, contribute PRs there. This repo is kept open to provide the history of issues and PRs.** + This is the utility part of the [python client](https://github.com/kubernetes-client/python). It has been added to the main repo using git submodules. This structure allow other developers to create their own kubernetes client and still use standard kubernetes python utilities.