Merge "add internal/lib/android_build_client.py and internal/lib/android_build_client_test.py"
diff --git a/internal/lib/android_compute_client.py b/internal/lib/android_compute_client.py
index 0aef253..a3167fd 100755
--- a/internal/lib/android_compute_client.py
+++ b/internal/lib/android_compute_client.py
@@ -51,12 +51,15 @@
class AndroidComputeClient(gcompute_client.ComputeClient):
"""Client that manages Anadroid Virtual Device."""
- INSTANCE_NAME_FMT = "{build_target}-{build_id}-{uuid}"
- IMAGE_NAME_FMT = "image-{build_target}-{build_id}-{uuid}"
- DATA_DISK_NAME_FMT = "{instance}-data"
+ INSTANCE_NAME_FMT = "ins-{uuid}-{build_id}-{build_target}"
+ IMAGE_NAME_FMT = "img-{uuid}-{build_id}-{build_target}"
+ DATA_DISK_NAME_FMT = "data-{instance}"
BOOT_COMPLETED_MSG = "VIRTUAL_DEVICE_BOOT_COMPLETED"
BOOT_TIMEOUT_SECS = 5 * 60 # 5 mins, usually it should take ~2 mins
BOOT_CHECK_INTERVAL_SECS = 10
+ NAME_LENGTH_LIMIT = 63
+ # If the generated name ends with '-', replace it with REPLACER.
+ REPLACER = "e"
def __init__(self, acloud_config, oauth2_credentials):
"""Initialize.
@@ -75,6 +78,28 @@
self._resolution = acloud_config.resolution
self._metadata = acloud_config.metadata_variable.copy()
+ @classmethod
+ def _FormalizeName(cls, name):
+ """Formalize the name to comply with RFC1035.
+
+ The name must be 1-63 characters long and match the regular expression
+ [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a
+ lowercase letter, and all following characters must be a dash,
+ lowercase letter, or digit, except the last character, which cannot be
+ a dash.
+
+ Args:
+ name: A string.
+
+ Returns:
+ name: A string that complies with RFC1035.
+ """
+ name = name.replace("_", "-").lower()
+ name = name[:cls.NAME_LENGTH_LIMIT]
+ if name[-1] == "-":
+ name = name[:-1] + cls.REPLACER
+ return name
+
def _CheckMachineSize(self):
"""Check machine size.
@@ -107,7 +132,7 @@
name = cls.IMAGE_NAME_FMT.format(build_target=build_target,
build_id=build_id,
uuid=uuid.uuid4().hex[:8])
- return name.replace("_", "-").lower()
+ return cls._FormalizeName(name)
@classmethod
def GetDataDiskName(cls, instance):
@@ -119,7 +144,8 @@
Returns:
The corresponding data disk name.
"""
- return cls.DATA_DISK_NAME_FMT.format(instance=instance)
+ name = cls.DATA_DISK_NAME_FMT.format(instance=instance)
+ return cls._FormalizeName(name)
@classmethod
def GenerateInstanceName(cls, build_target=None, build_id=None):
@@ -136,10 +162,11 @@
"""
if not build_target and not build_id:
return "instance-" + uuid.uuid4().hex
- return cls.INSTANCE_NAME_FMT.format(
+ name = cls.INSTANCE_NAME_FMT.format(
build_target=build_target,
build_id=build_id,
uuid=uuid.uuid4().hex[:8]).replace("_", "-").lower()
+ return cls._FormalizeName(name)
def CreateDisk(self, disk_name, source_image, size_gb):
"""Create a gce disk.
diff --git a/internal/lib/android_compute_client_test.py b/internal/lib/android_compute_client_test.py
new file mode 100644
index 0000000..88a2d60
--- /dev/null
+++ b/internal/lib/android_compute_client_test.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for android_compute_client."""
+
+import uuid
+
+import mock
+
+import unittest
+from acloud.internal.lib import android_compute_client
+from acloud.internal.lib import driver_test_lib
+from acloud.internal.lib import gcompute_client
+from acloud.public import errors
+
+
+class AndroidComputeClientTest(driver_test_lib.BaseDriverTest):
+ """Test AndroidComputeClient."""
+
+ PROJECT = "fake-project"
+ SERVICE_ACCOUNT_NAME = "fake@fake.com"
+ PRIVATE_KEY_PATH = "/fake/key/path"
+ IMAGE = "fake-image"
+ GS_IMAGE_SOURCE_URI = "https://storage.googleapis.com/fake-bucket/fake.tar.gz"
+ MACHINE_TYPE = "fake-machine-type"
+ MIN_MACHINE_SIZE = "fake-min-machine-size"
+ METADATA = ("metadata_key", "metadata_value")
+ NETWORK = "fake-network"
+ ZONE = "fake-zone"
+ ORIENTATION = "portrait"
+ DEVICE_RESOLUTION = "1200x1200x1200x1200"
+ METADATA = ("metadata_key", "metadata_value")
+ TARGET = "gce_x86-userdebug"
+ BUILD_ID = "2263051"
+
+ def _GetFakeConfig(self):
+ """Create a fake configuration object.
+
+ Returns:
+ A fake configuration mock object.
+ """
+ fake_cfg = mock.MagicMock()
+ fake_cfg.project = self.PROJECT
+ fake_cfg.service_account_name = self.SERVICE_ACCOUNT_NAME
+ fake_cfg.service_account_private_key_path = self.PRIVATE_KEY_PATH
+ fake_cfg.zone = self.ZONE
+ fake_cfg.machine_type = self.MACHINE_TYPE
+ fake_cfg.min_machine_size = self.MIN_MACHINE_SIZE
+ fake_cfg.network = self.NETWORK
+ fake_cfg.orientation = self.ORIENTATION
+ fake_cfg.resolution = self.DEVICE_RESOLUTION
+ fake_cfg.metadata_variable = {self.METADATA[0]: self.METADATA[1]}
+ return fake_cfg
+
+ def setUp(self):
+ """Set up the test."""
+ super(AndroidComputeClientTest, self).setUp()
+ self.Patch(android_compute_client.AndroidComputeClient,
+ "InitResourceHandle")
+ self.android_compute_client = android_compute_client.AndroidComputeClient(
+ self._GetFakeConfig(), mock.MagicMock())
+
+ def testCreateImage(self):
+ """Test CreateImage."""
+ self.Patch(gcompute_client.ComputeClient, "CreateImage")
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "CheckImageExists",
+ return_value=False)
+ unique_id = uuid.uuid4()
+ image_name = "image-gce-x86-userdebug-2345-abcd"
+ self.android_compute_client.CreateImage(image_name,
+ self.GS_IMAGE_SOURCE_URI)
+ super(android_compute_client.AndroidComputeClient,
+ self.android_compute_client).CreateImage.assert_called_with(
+ image_name, self.GS_IMAGE_SOURCE_URI)
+ self.android_compute_client.CheckImageExists.assert_called_with(
+ image_name)
+
+ def testCreateInstance(self):
+ """Test CreateInstance."""
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "CompareMachineSize",
+ return_value=1)
+ self.Patch(gcompute_client.ComputeClient, "CreateInstance")
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "_GetDiskArgs",
+ return_value=[{"fake_arg": "fake_value"}])
+ self.Patch(
+ self.android_compute_client,
+ "_GetExtraDiskArgs",
+ return_value=[{"fake_extra_arg": "fake_extra_value"}])
+ instance_name = "gce-x86-userdebug-2345-abcd"
+ extra_disk_name = "gce-x86-userdebug-2345-abcd-data"
+ expected_metadata = {
+ self.METADATA[0]: self.METADATA[1],
+ "cfg_sta_display_resolution": self.DEVICE_RESOLUTION,
+ "t_force_orientation": self.ORIENTATION,
+ }
+
+ expected_disk_args = [
+ {"fake_arg": "fake_value"}, {"fake_extra_arg": "fake_extra_value"}
+ ]
+
+ self.android_compute_client.CreateInstance(instance_name, self.IMAGE,
+ extra_disk_name)
+ super(android_compute_client.AndroidComputeClient,
+ self.android_compute_client).CreateInstance.assert_called_with(
+ instance_name, self.IMAGE, self.MACHINE_TYPE,
+ expected_metadata, self.NETWORK, self.ZONE,
+ expected_disk_args)
+
+ def testCheckMachineSizeMeetsRequirement(self):
+ """Test CheckMachineSize when machine size meets requirement."""
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "CompareMachineSize",
+ return_value=1)
+ self.android_compute_client._CheckMachineSize()
+ self.android_compute_client.CompareMachineSize.assert_called_with(
+ self.MACHINE_TYPE, self.MIN_MACHINE_SIZE, self.ZONE)
+
+ def testCheckMachineSizeDoesNotMeetRequirement(self):
+ """Test CheckMachineSize when machine size does not meet requirement."""
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "CompareMachineSize",
+ return_value=-1)
+ self.assertRaisesRegexp(
+ errors.DriverError,
+ ".*does not meet the minimum required machine size.*",
+ self.android_compute_client._CheckMachineSize)
+ self.android_compute_client.CompareMachineSize.assert_called_with(
+ self.MACHINE_TYPE, self.MIN_MACHINE_SIZE, self.ZONE)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/internal/lib/auth.py b/internal/lib/auth.py
new file mode 100644
index 0000000..66d1c2c
--- /dev/null
+++ b/internal/lib/auth.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module for handling Authentication.
+
+Possible cases of authentication are noted below.
+
+--------------------------------------------------------
+ account | authentcation
+--------------------------------------------------------
+
+google account (e.g. gmail)* | normal oauth2
+
+
+service account* | oauth2 + private key
+
+--------------------------------------------------------
+
+* For now, non-google employees (i.e. non @google.com account) or
+ non-google-owned service account can not access Android Build API.
+ Only local build artifact can be used.
+
+* Google-owned service account, if used, needs to be whitelisted by
+ Android Build team so that acloud can access build api.
+"""
+
+import logging
+import os
+import sys
+
+import httplib2
+
+from oauth2client import client as oauth2_client
+from oauth2client.contrib import multistore_file
+from oauth2client import tools as oauth2_tools
+
+from acloud.public import errors
+
+logger = logging.getLogger(__name__)
+HOME_FOLDER = os.path.expanduser("~")
+
+
+def _CreateOauthServiceAccountCreds(email, private_key_path, scopes):
+ """Create credentials with a normal service account.
+
+ Args:
+ email: email address as the account.
+ private_key_path: Path to the service account P12 key.
+ scopes: string, multiple scopes should be saperated by space.
+ Api scopes to request for the oauth token.
+
+ Returns:
+ An oauth2client.OAuth2Credentials instance.
+
+ Raises:
+ errors.AuthentcationError: if failed to authenticate.
+ """
+ try:
+ with open(private_key_path) as f:
+ private_key = f.read()
+ credentials = oauth2_client.SignedJwtAssertionCredentials(
+ email, private_key, scopes)
+ except EnvironmentError as e:
+ raise errors.AuthentcationError(
+ "Could not authenticate using private key file %s, error message: %s",
+ private_key_path, str(e))
+ return credentials
+
+
+class RunFlowFlags(object):
+ """Flags for oauth2client.tools.run_flow."""
+
+ def __init__(self, browser_auth):
+ self.auth_host_port = [8080, 8090]
+ self.auth_host_name = "localhost"
+ self.logging_level = "ERROR"
+ self.noauth_local_webserver = not browser_auth
+
+
+def _RunAuthFlow(storage, client_id, client_secret, user_agent, scopes):
+ """Get user oauth2 credentials.
+
+ Args:
+ client_id: String, client id from the cloud project.
+ client_secret: String, client secret for the client_id.
+ user_agent: The user agent for the credential, e.g. "acloud"
+ scopes: String, scopes separated by space.
+
+ Returns:
+ An oauth2client.OAuth2Credentials instance.
+ """
+ flags = RunFlowFlags(browser_auth=False)
+ flow = oauth2_client.OAuth2WebServerFlow(
+ client_id=client_id,
+ client_secret=client_secret,
+ scope=scopes,
+ user_agent=user_agent)
+ credentials = oauth2_tools.run_flow(
+ flow=flow, storage=storage, flags=flags)
+ return credentials
+
+
+def _CreateOauthUserCreds(creds_cache_file, client_id, client_secret,
+ user_agent, scopes):
+ """Get user oauth2 credentials.
+
+ Args:
+ creds_cache_file: String, file name for the credential cache.
+ e.g. .acloud_oauth2.dat
+ Will be created at home folder.
+ client_id: String, client id from the cloud project.
+ client_secret: String, client secret for the client_id.
+ user_agent: The user agent for the credential, e.g. "acloud"
+ scopes: String, scopes separated by space.
+
+ Returns:
+ An oauth2client.OAuth2Credentials instance.
+ """
+ if not client_id or not client_secret:
+ raise errors.AuthentcationError(
+ "Could not authenticate using Oauth2 flow, please set client_id "
+ "and client_secret in your config file. Contact the cloud project's "
+ "admin if you don't have the client_id and client_secret.")
+ storage = multistore_file.get_credential_storage(
+ filename=os.path.abspath(creds_cache_file),
+ client_id=client_id,
+ user_agent=user_agent,
+ scope=scopes)
+ credentials = storage.get()
+ if credentials is not None:
+ try:
+ credentials.refresh(httplib2.Http())
+ except oauth2_client.AccessTokenRefreshError:
+ pass
+ if not credentials.invalid:
+ return credentials
+ return _RunAuthFlow(storage, client_id, client_secret, user_agent, scopes)
+
+
+def CreateCredentials(acloud_config, scopes):
+ """Create credentials.
+
+ Args:
+ acloud_config: An AcloudConfig object.
+ scopes: A string representing for scopes, separted by space,
+ like "SCOPE_1 SCOPE_2 SCOPE_3"
+
+ Returns:
+ An oauth2client.OAuth2Credentials instance.
+ """
+ if acloud_config.service_account_private_key_path:
+ return _CreateOauthServiceAccountCreds(
+ acloud_config.service_account_name,
+ acloud_config.service_account_private_key_path,
+ scopes=scopes)
+
+ creds_cache_file = os.path.join(HOME_FOLDER,
+ acloud_config.creds_cache_file)
+ return _CreateOauthUserCreds(
+ creds_cache_file=creds_cache_file,
+ client_id=acloud_config.client_id,
+ client_secret=acloud_config.client_secret,
+ user_agent=acloud_config.user_agent,
+ scopes=scopes)
diff --git a/internal/lib/base_cloud_client.py b/internal/lib/base_cloud_client.py
index 5566793..f876118 100755
--- a/internal/lib/base_cloud_client.py
+++ b/internal/lib/base_cloud_client.py
@@ -24,8 +24,6 @@
import socket
import ssl
-import google3
-
from apiclient import errors as gerrors
from apiclient.discovery import build
import apiclient.http
diff --git a/internal/lib/base_cloud_client_test.py b/internal/lib/base_cloud_client_test.py
new file mode 100644
index 0000000..96eb3c3
--- /dev/null
+++ b/internal/lib/base_cloud_client_test.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for acloud.internal.lib.base_cloud_client."""
+
+import time
+import apiclient
+import mock
+
+import unittest
+from acloud.internal.lib import base_cloud_client
+from acloud.internal.lib import driver_test_lib
+from acloud.public import errors
+
+
+class FakeError(Exception):
+ """Fake Error for testing retries."""
+
+
+class BaseCloudApiClientTest(driver_test_lib.BaseDriverTest):
+ """Test BaseCloudApiClient."""
+
+ def setUp(self):
+ """Set up test."""
+ super(BaseCloudApiClientTest, self).setUp()
+
+ def testInitResourceHandle(self):
+ """Test InitResourceHandle."""
+ # Setup mocks
+ mock_credentials = mock.MagicMock()
+ self.Patch(base_cloud_client, "build")
+ # Call the method
+ base_cloud_client.BaseCloudApiClient(mock.MagicMock())
+ base_cloud_client.build.assert_called_once_with(
+ serviceName=base_cloud_client.BaseCloudApiClient.API_NAME,
+ version=base_cloud_client.BaseCloudApiClient.API_VERSION,
+ http=mock.ANY)
+
+ def _SetupInitMocks(self):
+ """Setup mocks required to initialize a base cloud client.
+
+ Returns:
+ A base_cloud_client.BaseCloudApiClient mock.
+ """
+ self.Patch(
+ base_cloud_client.BaseCloudApiClient,
+ "InitResourceHandle",
+ return_value=mock.MagicMock())
+ return base_cloud_client.BaseCloudApiClient(mock.MagicMock())
+
+ def _SetupBatchHttpRequestMock(self, rid_to_responses, rid_to_exceptions):
+ """Setup BatchHttpRequest mock."""
+
+ rid_to_exceptions = rid_to_exceptions or {}
+ rid_to_responses = rid_to_responses or {}
+
+ def _CreatMockBatchHttpRequest():
+ """Create a mock BatchHttpRequest object."""
+ requests = {}
+
+ def _Add(request, callback, request_id):
+ requests[request_id] = (request, callback)
+
+ def _Execute():
+ for rid in requests:
+ requests[rid][0].execute()
+ _, callback = requests[rid]
+ callback(
+ request_id=rid,
+ response=rid_to_responses.get(rid),
+ exception=rid_to_exceptions.get(rid))
+
+ mock_batch = mock.MagicMock()
+ mock_batch.add = _Add
+ mock_batch.execute = _Execute
+ return mock_batch
+
+ self.Patch(
+ apiclient.http,
+ "BatchHttpRequest",
+ side_effect=_CreatMockBatchHttpRequest)
+
+ def testBatchExecute(self):
+ """Test BatchExecute."""
+ self.Patch(time, "sleep")
+ client = self._SetupInitMocks()
+ requests = {"r1": mock.MagicMock(),
+ "r2": mock.MagicMock(),
+ "r3": mock.MagicMock()}
+ response = {"name": "fake_response"}
+ error_1 = errors.HttpError(503, "fake retriable error.")
+ error_2 = FakeError("fake retriable error.")
+ responses = {"r1": response, "r2": None, "r3": None}
+ exceptions = {"r1": None, "r2": error_1, "r3": error_2}
+ self._SetupBatchHttpRequestMock(responses, exceptions)
+ results = client.BatchExecute(
+ requests, other_retriable_errors=(FakeError, ))
+ expected_results = {
+ "r1": (response, None),
+ "r2": (None, error_1),
+ "r3": (None, error_2)
+ }
+ self.assertEqual(results, expected_results)
+ self.assertEqual(requests["r1"].execute.call_count, 1)
+ self.assertEqual(requests["r2"].execute.call_count,
+ client.RETRY_COUNT + 1)
+ self.assertEqual(requests["r3"].execute.call_count,
+ client.RETRY_COUNT + 1)
+
+ def testListWithMultiPages(self):
+ """Test ListWithMultiPages."""
+ fake_token = "fake_next_page_token"
+ item_1 = "item_1"
+ item_2 = "item_2"
+ response_1 = {"items": [item_1], "nextPageToken": fake_token}
+ response_2 = {"items": [item_2]}
+
+ api_mock = mock.MagicMock()
+ api_mock.execute.side_effect = [response_1, response_2]
+ resource_mock = mock.MagicMock(return_value=api_mock)
+ client = self._SetupInitMocks()
+ items = client.ListWithMultiPages(
+ api_resource=resource_mock, fake_arg="fake_arg")
+ self.assertEqual(items, [item_1, item_2])
+
+ def testExecuteWithRetry(self):
+ """Test Execute is called and retries are triggered."""
+ self.Patch(time, "sleep")
+ client = self._SetupInitMocks()
+ api_mock = mock.MagicMock()
+ error = errors.HttpError(503, "fake retriable error.")
+ api_mock.execute.side_effect = error
+ self.assertRaises(errors.HttpError, client.Execute, api_mock)
+
+ api_mock = mock.MagicMock()
+ api_mock.execute.side_effect = FakeError("fake retriable error.")
+ self.assertRaises(
+ FakeError,
+ client.Execute,
+ api_mock,
+ other_retriable_errors=(FakeError, ))
+ self.assertEqual(api_mock.execute.call_count, client.RETRY_COUNT + 1)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/internal/lib/gcompute_client.py b/internal/lib/gcompute_client.py
index 76e3bf9..8cd32b3 100755
--- a/internal/lib/gcompute_client.py
+++ b/internal/lib/gcompute_client.py
@@ -30,8 +30,6 @@
import logging
import os
-import google3
-
from acloud.internal.lib import base_cloud_client
from acloud.internal.lib import utils
from acloud.public import errors
diff --git a/internal/lib/gcompute_client_test.py b/internal/lib/gcompute_client_test.py
new file mode 100644
index 0000000..2382eb1
--- /dev/null
+++ b/internal/lib/gcompute_client_test.py
@@ -0,0 +1,730 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for acloud.internal.lib.gcompute_client."""
+
+import os
+
+import apiclient.http
+import mock
+
+import unittest
+from acloud.internal.lib import driver_test_lib
+from acloud.internal.lib import gcompute_client
+from acloud.internal.lib import utils
+from acloud.public import errors
+
+
+class ComputeClientTest(driver_test_lib.BaseDriverTest):
+ """Test ComputeClient."""
+
+ PROJECT = "fake-project"
+ INSTANCE = "fake-instance"
+ IMAGE = "fake-image"
+ IMAGE_URL = "http://fake-image-url"
+ GS_IMAGE_SOURCE_URI = "https://storage.googleapis.com/fake-bucket/fake.tar.gz"
+ MACHINE_TYPE = "fake-machine-type"
+ MACHINE_TYPE_URL = "http://fake-machine-type-url"
+ METADATA = ("metadata_key", "metadata_value")
+ NETWORK = "fake-network"
+ NETWORK_URL = "http://fake-network-url"
+ ZONE = "fake-zone"
+ REGION = "fake-region"
+ OPERATION_NAME = "fake-op"
+
+ def setUp(self):
+ """Set up test."""
+ super(ComputeClientTest, self).setUp()
+ self.Patch(gcompute_client.ComputeClient, "InitResourceHandle")
+ fake_cfg = mock.MagicMock()
+ fake_cfg.project = self.PROJECT
+ self.compute_client = gcompute_client.ComputeClient(fake_cfg,
+ mock.MagicMock())
+ self.compute_client._service = mock.MagicMock()
+
+ def _SetupMocksForGetOperationStatus(self, mock_result, operation_scope):
+ """A helper class for setting up mocks for testGetOperationStatus*.
+
+ Args:
+ mock_result: The result to return by _GetOperationStatus.
+ operation_scope: A value of OperationScope.
+
+ Returns:
+ A mock for Resource object.
+ """
+ resource_mock = mock.MagicMock()
+ mock_api = mock.MagicMock()
+ if operation_scope == gcompute_client.OperationScope.GLOBAL:
+ self.compute_client._service.globalOperations = mock.MagicMock(
+ return_value=resource_mock)
+ elif operation_scope == gcompute_client.OperationScope.ZONE:
+ self.compute_client._service.zoneOperations = mock.MagicMock(
+ return_value=resource_mock)
+ elif operation_scope == gcompute_client.OperationScope.REGION:
+ self.compute_client._service.regionOperations = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.get = mock.MagicMock(return_value=mock_api)
+ mock_api.execute = mock.MagicMock(return_value=mock_result)
+ return resource_mock
+
+ def testGetOperationStatusGlobal(self):
+ """Test _GetOperationStatus for global."""
+ resource_mock = self._SetupMocksForGetOperationStatus(
+ {"status": "GOOD"}, gcompute_client.OperationScope.GLOBAL)
+ status = self.compute_client._GetOperationStatus(
+ {"name": self.OPERATION_NAME},
+ gcompute_client.OperationScope.GLOBAL)
+ self.assertEqual(status, "GOOD")
+ resource_mock.get.assert_called_with(
+ project=self.PROJECT, operation=self.OPERATION_NAME)
+
+ def testGetOperationStatusZone(self):
+ """Test _GetOperationStatus for zone."""
+ resource_mock = self._SetupMocksForGetOperationStatus(
+ {"status": "GOOD"}, gcompute_client.OperationScope.ZONE)
+ status = self.compute_client._GetOperationStatus(
+ {"name": self.OPERATION_NAME}, gcompute_client.OperationScope.ZONE,
+ self.ZONE)
+ self.assertEqual(status, "GOOD")
+ resource_mock.get.assert_called_with(
+ project=self.PROJECT,
+ operation=self.OPERATION_NAME,
+ zone=self.ZONE)
+
+ def testGetOperationStatusRegion(self):
+ """Test _GetOperationStatus for region."""
+ resource_mock = self._SetupMocksForGetOperationStatus(
+ {"status": "GOOD"}, gcompute_client.OperationScope.REGION)
+ self.compute_client._GetOperationStatus(
+ {"name": self.OPERATION_NAME},
+ gcompute_client.OperationScope.REGION, self.REGION)
+ resource_mock.get.assert_called_with(
+ project=self.PROJECT,
+ operation=self.OPERATION_NAME,
+ region=self.REGION)
+
+ def testGetOperationStatusError(self):
+ """Test _GetOperationStatus failed."""
+ self._SetupMocksForGetOperationStatus(
+ {"error": {"errors": ["error1", "error2"]}},
+ gcompute_client.OperationScope.GLOBAL)
+ self.assertRaisesRegexp(errors.DriverError,
+ "Get operation state failed.*error1.*error2",
+ self.compute_client._GetOperationStatus,
+ {"name": self.OPERATION_NAME},
+ gcompute_client.OperationScope.GLOBAL)
+
+ def testWaitOnOperation(self):
+ """Test WaitOnOperation."""
+ mock_error = mock.MagicMock()
+ self.Patch(utils, "PollAndWait")
+ self.Patch(errors, "GceOperationTimeoutError", return_value=mock_error)
+ self.compute_client.WaitOnOperation(
+ operation={"name": self.OPERATION_NAME},
+ operation_scope=gcompute_client.OperationScope.REGION,
+ scope_name=self.REGION)
+ utils.PollAndWait.assert_called_with(
+ func=self.compute_client._GetOperationStatus,
+ expected_return="DONE",
+ timeout_exception=mock_error,
+ timeout_secs=self.compute_client.OPERATION_TIMEOUT_SECS,
+ sleep_interval_secs=self.compute_client.
+ OPERATION_POLL_INTERVAL_SECS,
+ operation={"name": self.OPERATION_NAME},
+ operation_scope=gcompute_client.OperationScope.REGION,
+ scope_name=self.REGION)
+
+ def testCreateImage(self):
+ """Test CreateImage."""
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.images = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.insert = mock.MagicMock()
+
+ expected_body = {
+ "name": self.IMAGE,
+ "rawDisk": {
+ "source": self.GS_IMAGE_SOURCE_URI,
+ },
+ }
+ self.compute_client.CreateImage(
+ image_name=self.IMAGE, source_uri=self.GS_IMAGE_SOURCE_URI)
+ resource_mock.insert.assert_called_with(
+ project=self.PROJECT, body=expected_body)
+ self.compute_client.WaitOnOperation.assert_called_with(
+ operation=mock.ANY,
+ operation_scope=gcompute_client.OperationScope.GLOBAL)
+
+ def testCreateImageFail(self):
+ """Test CreateImage fails."""
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "WaitOnOperation",
+ side_effect=errors.DriverError("Expected fake error"))
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "CheckImageExists",
+ return_value=True)
+ self.Patch(gcompute_client.ComputeClient, "DeleteImage")
+
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.images = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.insert = mock.MagicMock()
+
+ expected_body = {
+ "name": self.IMAGE,
+ "rawDisk": {
+ "source": self.GS_IMAGE_SOURCE_URI,
+ },
+ }
+ self.assertRaisesRegexp(
+ errors.DriverError,
+ "Expected fake error",
+ self.compute_client.CreateImage,
+ image_name=self.IMAGE,
+ source_uri=self.GS_IMAGE_SOURCE_URI)
+ resource_mock.insert.assert_called_with(
+ project=self.PROJECT, body=expected_body)
+ self.compute_client.WaitOnOperation.assert_called_with(
+ operation=mock.ANY,
+ operation_scope=gcompute_client.OperationScope.GLOBAL)
+ self.compute_client.CheckImageExists.assert_called_with(self.IMAGE)
+ self.compute_client.DeleteImage.assert_called_with(self.IMAGE)
+
+ def testCheckImageExistsTrue(self):
+ """Test CheckImageExists return True."""
+ resource_mock = mock.MagicMock()
+ mock_api = mock.MagicMock()
+ self.compute_client._service.images = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.get = mock.MagicMock(return_value=mock_api)
+ mock_api.execute = mock.MagicMock(return_value={"name": self.IMAGE})
+ self.assertTrue(self.compute_client.CheckImageExists(self.IMAGE))
+
+ def testCheckImageExistsFalse(self):
+ """Test CheckImageExists return False."""
+ resource_mock = mock.MagicMock()
+ mock_api = mock.MagicMock()
+ self.compute_client._service.images = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.get = mock.MagicMock(return_value=mock_api)
+ mock_api.execute = mock.MagicMock(
+ side_effect=errors.ResourceNotFoundError(404, "no image"))
+ self.assertFalse(self.compute_client.CheckImageExists(self.IMAGE))
+
+ def testDeleteImage(self):
+ """Test DeleteImage."""
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.images = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.delete = mock.MagicMock()
+ self.compute_client.DeleteImage(self.IMAGE)
+ resource_mock.delete.assert_called_with(
+ project=self.PROJECT, image=self.IMAGE)
+ self.assertTrue(self.compute_client.WaitOnOperation.called)
+
+ def _SetupBatchHttpRequestMock(self):
+ """Setup BatchHttpRequest mock."""
+ requests = {}
+
+ def _Add(request, callback, request_id):
+ requests[request_id] = (request, callback)
+
+ def _Execute():
+ for rid in requests:
+ _, callback = requests[rid]
+ callback(
+ request_id=rid, response=mock.MagicMock(), exception=None)
+
+ mock_batch = mock.MagicMock()
+ mock_batch.add = _Add
+ mock_batch.execute = _Execute
+ self.Patch(apiclient.http, "BatchHttpRequest", return_value=mock_batch)
+
+ def testDeleteImages(self):
+ """Test DeleteImages."""
+ self._SetupBatchHttpRequestMock()
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ fake_images = ["fake_image_1", "fake_image_2"]
+ mock_api = mock.MagicMock()
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.images = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.delete = mock.MagicMock(return_value=mock_api)
+ # Call the API.
+ deleted, failed, error_msgs = self.compute_client.DeleteImages(
+ fake_images)
+ # Verify
+ calls = [mock.call(
+ project=self.PROJECT, image="fake_image_1"), mock.call(
+ project=self.PROJECT, image="fake_image_2")]
+ resource_mock.delete.assert_has_calls(calls, any_order=True)
+ self.assertEqual(
+ gcompute_client.ComputeClient.WaitOnOperation.call_count, 2)
+ self.assertEqual(error_msgs, [])
+ self.assertEqual(failed, [])
+ self.assertEqual(set(deleted), set(fake_images))
+
+ def testListImages(self):
+ """Test ListImages."""
+ fake_token = "fake_next_page_token"
+ image_1 = "image_1"
+ image_2 = "image_2"
+ response_1 = {"items": [image_1], "nextPageToken": fake_token}
+ response_2 = {"items": [image_2]}
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "Execute",
+ side_effect=[response_1, response_2])
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.images = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.list = mock.MagicMock()
+ images = self.compute_client.ListImages()
+ calls = [
+ mock.call(
+ project=self.PROJECT, filter=None, pageToken=None), mock.call(
+ project=self.PROJECT, filter=None, pageToken=fake_token)
+ ]
+ resource_mock.list.assert_has_calls(calls)
+ self.assertEqual(images, [image_1, image_2])
+
+ def testGetInstance(self):
+ """Test GetInstance."""
+ resource_mock = mock.MagicMock()
+ mock_api = mock.MagicMock()
+ self.compute_client._service.instances = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.get = mock.MagicMock(return_value=mock_api)
+ mock_api.execute = mock.MagicMock(return_value={"name": self.INSTANCE})
+ result = self.compute_client.GetInstance(self.INSTANCE, self.ZONE)
+ self.assertEqual(result, {"name": self.INSTANCE})
+ resource_mock.get.assert_called_with(
+ project=self.PROJECT, zone=self.ZONE, instance=self.INSTANCE)
+
+ def testListInstances(self):
+ """Test ListInstances."""
+ fake_token = "fake_next_page_token"
+ instance_1 = "instance_1"
+ instance_2 = "instance_2"
+ response_1 = {"items": [instance_1], "nextPageToken": fake_token}
+ response_2 = {"items": [instance_2]}
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "Execute",
+ side_effect=[response_1, response_2])
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.instances = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.list = mock.MagicMock()
+ instances = self.compute_client.ListInstances(self.ZONE)
+ calls = [
+ mock.call(
+ project=self.PROJECT,
+ zone=self.ZONE,
+ filter=None,
+ pageToken=None),
+ mock.call(
+ project=self.PROJECT,
+ zone=self.ZONE,
+ filter=None,
+ pageToken=fake_token),
+ ]
+ resource_mock.list.assert_has_calls(calls)
+ self.assertEqual(instances, [instance_1, instance_2])
+
+ def testCreateInstance(self):
+ """Test CreateInstance."""
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "GetMachineType",
+ return_value={"selfLink": self.MACHINE_TYPE_URL})
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "GetNetworkUrl",
+ return_value=self.NETWORK_URL)
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "GetImage",
+ return_value={"selfLink": self.IMAGE_URL})
+
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.instances = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.insert = mock.MagicMock()
+
+ expected_body = {
+ "machineType": self.MACHINE_TYPE_URL,
+ "name": self.INSTANCE,
+ "networkInterfaces": [
+ {
+ "network": self.NETWORK_URL,
+ "accessConfigs": [
+ {"name": "External NAT",
+ "type": "ONE_TO_ONE_NAT"}
+ ],
+ }
+ ],
+ "disks": [
+ {
+ "type": "PERSISTENT",
+ "boot": True,
+ "mode": "READ_WRITE",
+ "autoDelete": True,
+ "initializeParams": {
+ "diskName": self.INSTANCE,
+ "sourceImage": self.IMAGE_URL,
+ },
+ }
+ ],
+ "serviceAccounts": [
+ {"email": "default",
+ "scopes": self.compute_client.DEFAULT_INSTANCE_SCOPE}
+ ],
+ "metadata": {
+ "items": [{"key": self.METADATA[0],
+ "value": self.METADATA[1]}],
+ },
+ }
+
+ self.compute_client.CreateInstance(
+ instance=self.INSTANCE,
+ image_name=self.IMAGE,
+ machine_type=self.MACHINE_TYPE,
+ metadata={self.METADATA[0]: self.METADATA[1]},
+ network=self.NETWORK,
+ zone=self.ZONE)
+
+ resource_mock.insert.assert_called_with(
+ project=self.PROJECT, zone=self.ZONE, body=expected_body)
+ self.compute_client.WaitOnOperation.assert_called_with(
+ mock.ANY,
+ operation_scope=gcompute_client.OperationScope.ZONE,
+ scope_name=self.ZONE)
+
+ def testDeleteInstance(self):
+ """Test DeleteInstance."""
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.instances = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.delete = mock.MagicMock()
+ self.compute_client.DeleteInstance(
+ instance=self.INSTANCE, zone=self.ZONE)
+ resource_mock.delete.assert_called_with(
+ project=self.PROJECT, zone=self.ZONE, instance=self.INSTANCE)
+ self.compute_client.WaitOnOperation.assert_called_with(
+ mock.ANY,
+ operation_scope=gcompute_client.OperationScope.ZONE,
+ scope_name=self.ZONE)
+
+ def testDeleteInstances(self):
+ """Test DeleteInstances."""
+ self._SetupBatchHttpRequestMock()
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ fake_instances = ["fake_instance_1", "fake_instance_2"]
+ mock_api = mock.MagicMock()
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.instances = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.delete = mock.MagicMock(return_value=mock_api)
+ deleted, failed, error_msgs = self.compute_client.DeleteInstances(
+ fake_instances, self.ZONE)
+ calls = [
+ mock.call(
+ project=self.PROJECT,
+ instance="fake_instance_1",
+ zone=self.ZONE), mock.call(
+ project=self.PROJECT,
+ instance="fake_instance_2",
+ zone=self.ZONE)
+ ]
+ resource_mock.delete.assert_has_calls(calls, any_order=True)
+ self.assertEqual(
+ gcompute_client.ComputeClient.WaitOnOperation.call_count, 2)
+ self.assertEqual(error_msgs, [])
+ self.assertEqual(failed, [])
+ self.assertEqual(set(deleted), set(fake_instances))
+
+ def testBatchExecuteOnInstances(self):
+ self._SetupBatchHttpRequestMock()
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ action = mock.MagicMock(return_value=mock.MagicMock())
+ fake_instances = ["fake_instance_1", "fake_instance_2"]
+ done, failed, error_msgs = self.compute_client._BatchExecuteOnInstances(
+ fake_instances, self.ZONE, action)
+ calls = [mock.call(instance="fake_instance_1"),
+ mock.call(instance="fake_instance_2")]
+ action.assert_has_calls(calls, any_order=True)
+ self.assertEqual(
+ gcompute_client.ComputeClient.WaitOnOperation.call_count, 2)
+ self.assertEqual(set(done), set(fake_instances))
+ self.assertEqual(error_msgs, [])
+ self.assertEqual(failed, [])
+
+ def testResetInstance(self):
+ """Test ResetInstance."""
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.instances = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.reset = mock.MagicMock()
+ self.compute_client.ResetInstance(
+ instance=self.INSTANCE, zone=self.ZONE)
+ resource_mock.reset.assert_called_with(
+ project=self.PROJECT, zone=self.ZONE, instance=self.INSTANCE)
+ self.compute_client.WaitOnOperation.assert_called_with(
+ mock.ANY,
+ operation_scope=gcompute_client.OperationScope.ZONE,
+ scope_name=self.ZONE)
+
+ def _CompareMachineSizeTestHelper(self,
+ machine_info_1,
+ machine_info_2,
+ expected_result=None,
+ expected_error_type=None):
+ """Helper class for testing CompareMachineSize.
+
+ Args:
+ machine_info_1: A dictionary representing the first machine size.
+ machine_info_2: A dictionary representing the second machine size.
+ expected_result: An integer, 0, 1 or -1, or None if not set.
+ expected_error_type: An exception type, if set will check for exception.
+ """
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "GetMachineType",
+ side_effect=[machine_info_1, machine_info_2])
+ if expected_error_type:
+ self.assertRaises(expected_error_type,
+ self.compute_client.CompareMachineSize, "name1",
+ "name2", self.ZONE)
+ else:
+ result = self.compute_client.CompareMachineSize("name1", "name2",
+ self.ZONE)
+ self.assertEqual(result, expected_result)
+
+ self.compute_client.GetMachineType.assert_has_calls(
+ [mock.call("name1", self.ZONE), mock.call("name2", self.ZONE)])
+
+ def testCompareMachineSizeSmall(self):
+ """Test CompareMachineSize where the first one is smaller."""
+ machine_info_1 = {"guestCpus": 10, "memoryMb": 100}
+ machine_info_2 = {"guestCpus": 10, "memoryMb": 200}
+ self._CompareMachineSizeTestHelper(machine_info_1, machine_info_2, -1)
+
+ def testCompareMachineSizeLarge(self):
+ """Test CompareMachineSize where the first one is larger."""
+ machine_info_1 = {"guestCpus": 10, "memoryMb": 200}
+ machine_info_2 = {"guestCpus": 10, "memoryMb": 100}
+ self._CompareMachineSizeTestHelper(machine_info_1, machine_info_2, 1)
+
+ def testCompareMachineSizeEqual(self):
+ """Test CompareMachineSize where two machine sizes are equal."""
+ machine_info = {"guestCpus": 10, "memoryMb": 100}
+ self._CompareMachineSizeTestHelper(machine_info, machine_info, 0)
+
+ def testCompareMachineSizeBadMetric(self):
+ """Test CompareMachineSize with bad metric."""
+ machine_info = {"unkown_metric": 10, "memoryMb": 100}
+ self._CompareMachineSizeTestHelper(
+ machine_info, machine_info, expected_error_type=errors.DriverError)
+
+ def testGetMachineType(self):
+ """Test GetMachineType."""
+ resource_mock = mock.MagicMock()
+ mock_api = mock.MagicMock()
+ self.compute_client._service.machineTypes = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.get = mock.MagicMock(return_value=mock_api)
+ mock_api.execute = mock.MagicMock(
+ return_value={"name": self.MACHINE_TYPE})
+ result = self.compute_client.GetMachineType(self.MACHINE_TYPE,
+ self.ZONE)
+ self.assertEqual(result, {"name": self.MACHINE_TYPE})
+ resource_mock.get.assert_called_with(
+ project=self.PROJECT,
+ zone=self.ZONE,
+ machineType=self.MACHINE_TYPE)
+
+ def _GetSerialPortOutputTestHelper(self, response):
+ """Helper function for testing GetSerialPortOutput.
+
+ Args:
+ response: A dictionary representing a fake response.
+ """
+ resource_mock = mock.MagicMock()
+ mock_api = mock.MagicMock()
+ self.compute_client._service.instances = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.getSerialPortOutput = mock.MagicMock(
+ return_value=mock_api)
+ mock_api.execute = mock.MagicMock(return_value=response)
+
+ if "contents" in response:
+ result = self.compute_client.GetSerialPortOutput(
+ instance=self.INSTANCE, zone=self.ZONE)
+ self.assertEqual(result, "fake contents")
+ else:
+ self.assertRaisesRegexp(
+ errors.DriverError,
+ "Malformed response.*",
+ self.compute_client.GetSerialPortOutput,
+ instance=self.INSTANCE,
+ zone=self.ZONE)
+ resource_mock.getSerialPortOutput.assert_called_with(
+ project=self.PROJECT,
+ zone=self.ZONE,
+ instance=self.INSTANCE,
+ port=1)
+
+ def testGetSerialPortOutput(self):
+ response = {"contents": "fake contents"}
+ self._GetSerialPortOutputTestHelper(response)
+
+ def testGetSerialPortOutputFail(self):
+ response = {"malformed": "fake contents"}
+ self._GetSerialPortOutputTestHelper(response)
+
+ def testGetInstanceNamesByIPs(self):
+ """Test GetInstanceNamesByIPs."""
+ good_instance = {
+ "name": "instance_1",
+ "networkInterfaces": [
+ {
+ "accessConfigs": [
+ {"natIP": "172.22.22.22"},
+ ],
+ },
+ ],
+ }
+ bad_instance = {"name": "instance_2"}
+ self.Patch(
+ gcompute_client.ComputeClient,
+ "ListInstances",
+ return_value=[good_instance, bad_instance])
+ ip_name_map = self.compute_client.GetInstanceNamesByIPs(
+ ips=["172.22.22.22", "172.22.22.23"], zone=self.ZONE)
+ self.assertEqual(ip_name_map, {"172.22.22.22": "instance_1",
+ "172.22.22.23": None})
+
+ def testAddSshRsa(self):
+ """Test AddSshRsa.."""
+ fake_user = "fake_user"
+ sshkey = (
+ "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDBkTOTRze9v2VOqkkf7RG"
+ "jSkg6Z2kb9Q9UHsDGatvend3fmjIw1Tugg0O7nnjlPkskmlgyd4a/j99WOeLL"
+ "CPk6xPyoVjrPUVBU/pAk09ORTC4Zqk6YjlW7LOfzvqmXhmIZfYu6Q4Yt50pZzhl"
+ "lllfu26nYjY7Tg12D019nJi/kqPX5+NKgt0LGXTu8T1r2Gav/q4V7QRWQrB8Eiu"
+ "pxXR7I2YhynqovkEt/OXG4qWgvLEXGsWtSQs0CtCzqEVxz0Y9ECr7er4VdjSQxV"
+ "AaeLAsQsK9ROae8hMBFZ3//8zLVapBwpuffCu+fUoql9qeV9xagZcc9zj8XOUOW"
+ "ApiihqNL1111 test@test1.org")
+ project = {
+ "commonInstanceMetadata": {
+ "kind": "compute#metadata",
+ "fingerprint": "a-23icsyx4E=",
+ "items": [
+ {
+ "key": "sshKeys",
+ "value": "user:key"
+ }
+ ]
+ }
+ }
+ expected = {
+ "kind": "compute#metadata",
+ "fingerprint": "a-23icsyx4E=",
+ "items": [
+ {
+ "key": "sshKeys",
+ "value": "user:key\n%s:%s" % (fake_user, sshkey)
+ }
+ ]
+ }
+
+ self.Patch(os.path, "exists", return_value=True)
+ m = mock.mock_open(read_data=sshkey)
+ self.Patch(__builtins__, "open", m, create=True)
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ self.Patch(
+ gcompute_client.ComputeClient, "GetProject", return_value=project)
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.projects = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.setCommonInstanceMetadata = mock.MagicMock()
+
+ self.compute_client.AddSshRsa(fake_user, "/path/to/test_rsa.pub")
+ resource_mock.setCommonInstanceMetadata.assert_called_with(
+ project=self.PROJECT, body=expected)
+
+ def testAddSshRsaInvalidKey(self):
+ """Test AddSshRsa.."""
+ fake_user = "fake_user"
+ sshkey = "ssh-rsa v2VOqkkf7RGL1111 test@test1.org"
+ project = {
+ "commonInstanceMetadata": {
+ "kind": "compute#metadata",
+ "fingerprint": "a-23icsyx4E=",
+ "items": [
+ {
+ "key": "sshKeys",
+ "value": "user:key"
+ }
+ ]
+ }
+ }
+ self.Patch(os.path, "exists", return_value=True)
+ m = mock.mock_open(read_data=sshkey)
+ self.Patch(__builtins__, "open", m, create=True)
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ self.Patch(
+ gcompute_client.ComputeClient, "GetProject", return_value=project)
+ self.assertRaisesRegexp(errors.DriverError, "rsa key is invalid:*",
+ self.compute_client.AddSshRsa, fake_user,
+ "/path/to/test_rsa.pub")
+
+ def testDeleteDisks(self):
+ """Test DeleteDisks."""
+ self._SetupBatchHttpRequestMock()
+ self.Patch(gcompute_client.ComputeClient, "WaitOnOperation")
+ fake_disks = ["fake_disk_1", "fake_disk_2"]
+ mock_api = mock.MagicMock()
+ resource_mock = mock.MagicMock()
+ self.compute_client._service.disks = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.delete = mock.MagicMock(return_value=mock_api)
+ # Call the API.
+ deleted, failed, error_msgs = self.compute_client.DeleteDisks(
+ fake_disks, zone=self.ZONE)
+ # Verify
+ calls = [mock.call(
+ project=self.PROJECT, disk="fake_disk_1", zone=self.ZONE),
+ mock.call(
+ project=self.PROJECT, disk="fake_disk_2", zone=self.ZONE)]
+ resource_mock.delete.assert_has_calls(calls, any_order=True)
+ self.assertEqual(
+ gcompute_client.ComputeClient.WaitOnOperation.call_count, 2)
+ self.assertEqual(error_msgs, [])
+ self.assertEqual(failed, [])
+ self.assertEqual(set(deleted), set(fake_disks))
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/internal/lib/gstorage_client.py b/internal/lib/gstorage_client.py
index 2dbaaf6..686d8af 100755
--- a/internal/lib/gstorage_client.py
+++ b/internal/lib/gstorage_client.py
@@ -20,11 +20,10 @@
import logging
import os
-import google3
-
import apiclient
from acloud.internal.lib import base_cloud_client
+from acloud.internal.lib import utils
from acloud.public import errors
logger = logging.getLogger(__name__)
diff --git a/internal/lib/gstorage_client_test.py b/internal/lib/gstorage_client_test.py
new file mode 100644
index 0000000..8577dab
--- /dev/null
+++ b/internal/lib/gstorage_client_test.py
@@ -0,0 +1,148 @@
+"""Tests for acloud.internal.lib.gstorage_client."""
+
+import io
+import time
+
+import apiclient
+import mock
+
+import unittest
+from acloud.internal.lib import driver_test_lib
+from acloud.internal.lib import gstorage_client
+from acloud.public import errors
+
+
+class StorageClientTest(driver_test_lib.BaseDriverTest):
+ """Test StorageClient."""
+
+ LOCAL_SRC = "/fake/local/path"
+ BUCKET = "fake_bucket"
+ OBJECT = "fake_obj"
+ MIME_TYPE = "fake_mimetype"
+
+ def setUp(self):
+ """Set up test."""
+ super(StorageClientTest, self).setUp()
+ self.Patch(gstorage_client.StorageClient, "InitResourceHandle")
+ self.client = gstorage_client.StorageClient(mock.MagicMock())
+ self.client._service = mock.MagicMock()
+
+ def testGet(self):
+ """Test Get."""
+ mock_api = mock.MagicMock()
+ resource_mock = mock.MagicMock()
+ self.client._service.objects = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.get = mock.MagicMock(return_value=mock_api)
+ self.client.Get(self.BUCKET, self.OBJECT)
+ resource_mock.get.assert_called_with(
+ bucket=self.BUCKET, object=self.OBJECT)
+ self.assertTrue(mock_api.execute.called)
+
+ def testList(self):
+ """Test List."""
+ mock_items = ["fake/return"]
+ self.Patch(
+ gstorage_client.StorageClient,
+ "ListWithMultiPages",
+ return_value=mock_items)
+ resource_mock = mock.MagicMock()
+ self.client._service.objects = mock.MagicMock(
+ return_value=resource_mock)
+ items = self.client.List(self.BUCKET, self.OBJECT)
+ self.client.ListWithMultiPages.assert_called_once_with(
+ api_resource=resource_mock.list,
+ bucket=self.BUCKET,
+ prefix=self.OBJECT)
+ self.assertEqual(mock_items, items)
+
+ def testUpload(self):
+ """Test Upload."""
+ # Create mocks
+ mock_file = mock.MagicMock()
+ mock_file_io = mock.MagicMock()
+ mock_file_io.__enter__.return_value = mock_file
+ mock_media = mock.MagicMock()
+ mock_api = mock.MagicMock()
+ mock_response = mock.MagicMock()
+
+ self.Patch(io, "FileIO", return_value=mock_file_io)
+ self.Patch(
+ apiclient.http, "MediaIoBaseUpload", return_value=mock_media)
+ resource_mock = mock.MagicMock()
+ self.client._service.objects = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.insert = mock.MagicMock(return_value=mock_api)
+ mock_api.execute = mock.MagicMock(return_value=mock_response)
+
+ # Make the call to the api
+ response = self.client.Upload(self.LOCAL_SRC, self.BUCKET, self.OBJECT,
+ self.MIME_TYPE)
+
+ # Verify
+ self.assertEqual(response, mock_response)
+ io.FileIO.assert_called_with(self.LOCAL_SRC, mode="rb")
+ apiclient.http.MediaIoBaseUpload.assert_called_with(mock_file,
+ self.MIME_TYPE)
+ resource_mock.insert.assert_called_with(
+ bucket=self.BUCKET, name=self.OBJECT, media_body=mock_media)
+
+ def testUploadOSError(self):
+ """Test Upload when OSError is raised."""
+ self.Patch(io, "FileIO", side_effect=OSError("fake OSError"))
+ self.assertRaises(errors.DriverError, self.client.Upload,
+ self.LOCAL_SRC, self.BUCKET, self.OBJECT,
+ self.MIME_TYPE)
+
+ def testDelete(self):
+ """Test Delete."""
+ mock_api = mock.MagicMock()
+ resource_mock = mock.MagicMock()
+ self.client._service.objects = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.delete = mock.MagicMock(return_value=mock_api)
+ self.client.Delete(self.BUCKET, self.OBJECT)
+ resource_mock.delete.assert_called_with(
+ bucket=self.BUCKET, object=self.OBJECT)
+ self.assertTrue(mock_api.execute.called)
+
+ def testDeleteMultipleFiles(self):
+ """Test Delete multiple files."""
+ fake_objs = ["fake_obj1", "fake_obj2"]
+ mock_api = mock.MagicMock()
+ resource_mock = mock.MagicMock()
+ self.client._service.objects = mock.MagicMock(
+ return_value=resource_mock)
+ resource_mock.delete = mock.MagicMock(return_value=mock_api)
+ deleted, failed, error_msgs = self.client.DeleteFiles(self.BUCKET,
+ fake_objs)
+ self.assertEqual(deleted, fake_objs)
+ self.assertEqual(failed, [])
+ self.assertEqual(error_msgs, [])
+ calls = [mock.call(
+ bucket=self.BUCKET, object="fake_obj1"), mock.call(
+ bucket=self.BUCKET, object="fake_obj2")]
+ resource_mock.delete.assert_has_calls(calls)
+ self.assertEqual(mock_api.execute.call_count, 2)
+
+ def testGetUrl(self):
+ """Test GetUrl."""
+ fake_item = {"name": "fake-item-1", "selfLink": "link1"}
+ self.Patch(
+ gstorage_client.StorageClient, "Get", return_value=fake_item)
+ self.assertEqual(
+ self.client.GetUrl("fake_bucket", "fake-item-1"), "link1")
+
+ def testGetUrlNotFound(self):
+ """Test GetUrl when object is not found."""
+ self.Patch(
+ gstorage_client.StorageClient,
+ "Get",
+ side_effect=errors.ResourceNotFoundError(404, "expected error"))
+ self.Patch(time, "sleep")
+ self.assertRaises(errors.ResourceNotFoundError, self.client.GetUrl,
+ "fake_bucket", "fake-item-1")
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/internal/proto/internal_config_pb2.py b/internal/proto/internal_config_pb2.py
new file mode 100644
index 0000000..1fcb955
--- /dev/null
+++ b/internal/proto/internal_config_pb2.py
@@ -0,0 +1,466 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: internal_config.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='internal_config.proto',
+ package='acloud.internal.proto',
+ syntax='proto2',
+ serialized_pb=_b('\n\x15internal_config.proto\x12\x15\x61\x63loud.internal.proto\"\xef\x01\n\x11\x44\x65\x66\x61ultUserConfig\x12\x14\n\x0cmachine_type\x18\x01 \x01(\t\x12\x0f\n\x07network\x18\x02 \x01(\t\x12\x1f\n\x17\x65xtra_data_disk_size_gb\x18\x03 \x01(\x05\x12Y\n\x11metadata_variable\x18\x04 \x03(\x0b\x32>.acloud.internal.proto.DefaultUserConfig.MetadataVariableEntry\x1a\x37\n\x15MetadataVariableEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0eInternalConfig\x12\x41\n\x0f\x64\x65\x66\x61ult_usr_cfg\x18\x01 \x01(\x0b\x32(.acloud.internal.proto.DefaultUserConfig\x12]\n\x15\x64\x65vice_resolution_map\x18\x02 \x03(\x0b\x32>.acloud.internal.proto.InternalConfig.DeviceResolutionMapEntry\x12n\n\x1e\x64\x65vice_default_orientation_map\x18\x03 \x03(\x0b\x32\x46.acloud.internal.proto.InternalConfig.DeviceDefaultOrientationMapEntry\x12\x18\n\x10min_machine_size\x18\x04 \x01(\t\x12\x17\n\x0f\x64isk_image_name\x18\x05 \x01(\t\x12\x1c\n\x14\x64isk_image_mime_type\x18\x06 \x01(\t\x12\x1c\n\x14\x64isk_image_extension\x18\x07 \x01(\t\x12\x1b\n\x13\x64isk_raw_image_name\x18\x08 \x01(\t\x12 \n\x18\x64isk_raw_image_extension\x18\t \x01(\t\x12&\n\x1e\x64\x65\x66\x61ult_extra_data_disk_device\x18\n \x01(\t\x12]\n\x15precreated_data_image\x18\x0b \x03(\x0b\x32>.acloud.internal.proto.InternalConfig.PrecreatedDataImageEntry\x12j\n\x1dvalid_branch_and_min_build_id\x18\x0c \x03(\x0b\x32\x43.acloud.internal.proto.InternalConfig.ValidBranchAndMinBuildIdEntry\x12\x18\n\x10\x63reds_cache_file\x18\r \x01(\t\x12\x12\n\nuser_agent\x18\x0e \x01(\t\x1a:\n\x18\x44\x65viceResolutionMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x42\n DeviceDefaultOrientationMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a:\n\x18PrecreatedDataImageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a?\n\x1dValidBranchAndMinBuildIdEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_DEFAULTUSERCONFIG_METADATAVARIABLEENTRY = _descriptor.Descriptor(
+ name='MetadataVariableEntry',
+ full_name='acloud.internal.proto.DefaultUserConfig.MetadataVariableEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='acloud.internal.proto.DefaultUserConfig.MetadataVariableEntry.key', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='acloud.internal.proto.DefaultUserConfig.MetadataVariableEntry.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=233,
+ serialized_end=288,
+)
+
+_DEFAULTUSERCONFIG = _descriptor.Descriptor(
+ name='DefaultUserConfig',
+ full_name='acloud.internal.proto.DefaultUserConfig',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='machine_type', full_name='acloud.internal.proto.DefaultUserConfig.machine_type', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='network', full_name='acloud.internal.proto.DefaultUserConfig.network', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='extra_data_disk_size_gb', full_name='acloud.internal.proto.DefaultUserConfig.extra_data_disk_size_gb', index=2,
+ number=3, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='metadata_variable', full_name='acloud.internal.proto.DefaultUserConfig.metadata_variable', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_DEFAULTUSERCONFIG_METADATAVARIABLEENTRY, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=49,
+ serialized_end=288,
+)
+
+
+_INTERNALCONFIG_DEVICERESOLUTIONMAPENTRY = _descriptor.Descriptor(
+ name='DeviceResolutionMapEntry',
+ full_name='acloud.internal.proto.InternalConfig.DeviceResolutionMapEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='acloud.internal.proto.InternalConfig.DeviceResolutionMapEntry.key', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='acloud.internal.proto.InternalConfig.DeviceResolutionMapEntry.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1046,
+ serialized_end=1104,
+)
+
+_INTERNALCONFIG_DEVICEDEFAULTORIENTATIONMAPENTRY = _descriptor.Descriptor(
+ name='DeviceDefaultOrientationMapEntry',
+ full_name='acloud.internal.proto.InternalConfig.DeviceDefaultOrientationMapEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='acloud.internal.proto.InternalConfig.DeviceDefaultOrientationMapEntry.key', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='acloud.internal.proto.InternalConfig.DeviceDefaultOrientationMapEntry.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1106,
+ serialized_end=1172,
+)
+
+_INTERNALCONFIG_PRECREATEDDATAIMAGEENTRY = _descriptor.Descriptor(
+ name='PrecreatedDataImageEntry',
+ full_name='acloud.internal.proto.InternalConfig.PrecreatedDataImageEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='acloud.internal.proto.InternalConfig.PrecreatedDataImageEntry.key', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='acloud.internal.proto.InternalConfig.PrecreatedDataImageEntry.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1174,
+ serialized_end=1232,
+)
+
+_INTERNALCONFIG_VALIDBRANCHANDMINBUILDIDENTRY = _descriptor.Descriptor(
+ name='ValidBranchAndMinBuildIdEntry',
+ full_name='acloud.internal.proto.InternalConfig.ValidBranchAndMinBuildIdEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='acloud.internal.proto.InternalConfig.ValidBranchAndMinBuildIdEntry.key', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='acloud.internal.proto.InternalConfig.ValidBranchAndMinBuildIdEntry.value', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1234,
+ serialized_end=1297,
+)
+
+_INTERNALCONFIG = _descriptor.Descriptor(
+ name='InternalConfig',
+ full_name='acloud.internal.proto.InternalConfig',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='default_usr_cfg', full_name='acloud.internal.proto.InternalConfig.default_usr_cfg', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='device_resolution_map', full_name='acloud.internal.proto.InternalConfig.device_resolution_map', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='device_default_orientation_map', full_name='acloud.internal.proto.InternalConfig.device_default_orientation_map', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='min_machine_size', full_name='acloud.internal.proto.InternalConfig.min_machine_size', index=3,
+ number=4, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='disk_image_name', full_name='acloud.internal.proto.InternalConfig.disk_image_name', index=4,
+ number=5, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='disk_image_mime_type', full_name='acloud.internal.proto.InternalConfig.disk_image_mime_type', index=5,
+ number=6, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='disk_image_extension', full_name='acloud.internal.proto.InternalConfig.disk_image_extension', index=6,
+ number=7, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='disk_raw_image_name', full_name='acloud.internal.proto.InternalConfig.disk_raw_image_name', index=7,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='disk_raw_image_extension', full_name='acloud.internal.proto.InternalConfig.disk_raw_image_extension', index=8,
+ number=9, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='default_extra_data_disk_device', full_name='acloud.internal.proto.InternalConfig.default_extra_data_disk_device', index=9,
+ number=10, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='precreated_data_image', full_name='acloud.internal.proto.InternalConfig.precreated_data_image', index=10,
+ number=11, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='valid_branch_and_min_build_id', full_name='acloud.internal.proto.InternalConfig.valid_branch_and_min_build_id', index=11,
+ number=12, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='creds_cache_file', full_name='acloud.internal.proto.InternalConfig.creds_cache_file', index=12,
+ number=13, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='user_agent', full_name='acloud.internal.proto.InternalConfig.user_agent', index=13,
+ number=14, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_INTERNALCONFIG_DEVICERESOLUTIONMAPENTRY, _INTERNALCONFIG_DEVICEDEFAULTORIENTATIONMAPENTRY, _INTERNALCONFIG_PRECREATEDDATAIMAGEENTRY, _INTERNALCONFIG_VALIDBRANCHANDMINBUILDIDENTRY, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=291,
+ serialized_end=1297,
+)
+
+_DEFAULTUSERCONFIG_METADATAVARIABLEENTRY.containing_type = _DEFAULTUSERCONFIG
+_DEFAULTUSERCONFIG.fields_by_name['metadata_variable'].message_type = _DEFAULTUSERCONFIG_METADATAVARIABLEENTRY
+_INTERNALCONFIG_DEVICERESOLUTIONMAPENTRY.containing_type = _INTERNALCONFIG
+_INTERNALCONFIG_DEVICEDEFAULTORIENTATIONMAPENTRY.containing_type = _INTERNALCONFIG
+_INTERNALCONFIG_PRECREATEDDATAIMAGEENTRY.containing_type = _INTERNALCONFIG
+_INTERNALCONFIG_VALIDBRANCHANDMINBUILDIDENTRY.containing_type = _INTERNALCONFIG
+_INTERNALCONFIG.fields_by_name['default_usr_cfg'].message_type = _DEFAULTUSERCONFIG
+_INTERNALCONFIG.fields_by_name['device_resolution_map'].message_type = _INTERNALCONFIG_DEVICERESOLUTIONMAPENTRY
+_INTERNALCONFIG.fields_by_name['device_default_orientation_map'].message_type = _INTERNALCONFIG_DEVICEDEFAULTORIENTATIONMAPENTRY
+_INTERNALCONFIG.fields_by_name['precreated_data_image'].message_type = _INTERNALCONFIG_PRECREATEDDATAIMAGEENTRY
+_INTERNALCONFIG.fields_by_name['valid_branch_and_min_build_id'].message_type = _INTERNALCONFIG_VALIDBRANCHANDMINBUILDIDENTRY
+DESCRIPTOR.message_types_by_name['DefaultUserConfig'] = _DEFAULTUSERCONFIG
+DESCRIPTOR.message_types_by_name['InternalConfig'] = _INTERNALCONFIG
+
+DefaultUserConfig = _reflection.GeneratedProtocolMessageType('DefaultUserConfig', (_message.Message,), dict(
+
+ MetadataVariableEntry = _reflection.GeneratedProtocolMessageType('MetadataVariableEntry', (_message.Message,), dict(
+ DESCRIPTOR = _DEFAULTUSERCONFIG_METADATAVARIABLEENTRY,
+ __module__ = 'internal_config_pb2'
+ # @@protoc_insertion_point(class_scope:acloud.internal.proto.DefaultUserConfig.MetadataVariableEntry)
+ ))
+ ,
+ DESCRIPTOR = _DEFAULTUSERCONFIG,
+ __module__ = 'internal_config_pb2'
+ # @@protoc_insertion_point(class_scope:acloud.internal.proto.DefaultUserConfig)
+ ))
+_sym_db.RegisterMessage(DefaultUserConfig)
+_sym_db.RegisterMessage(DefaultUserConfig.MetadataVariableEntry)
+
+InternalConfig = _reflection.GeneratedProtocolMessageType('InternalConfig', (_message.Message,), dict(
+
+ DeviceResolutionMapEntry = _reflection.GeneratedProtocolMessageType('DeviceResolutionMapEntry', (_message.Message,), dict(
+ DESCRIPTOR = _INTERNALCONFIG_DEVICERESOLUTIONMAPENTRY,
+ __module__ = 'internal_config_pb2'
+ # @@protoc_insertion_point(class_scope:acloud.internal.proto.InternalConfig.DeviceResolutionMapEntry)
+ ))
+ ,
+
+ DeviceDefaultOrientationMapEntry = _reflection.GeneratedProtocolMessageType('DeviceDefaultOrientationMapEntry', (_message.Message,), dict(
+ DESCRIPTOR = _INTERNALCONFIG_DEVICEDEFAULTORIENTATIONMAPENTRY,
+ __module__ = 'internal_config_pb2'
+ # @@protoc_insertion_point(class_scope:acloud.internal.proto.InternalConfig.DeviceDefaultOrientationMapEntry)
+ ))
+ ,
+
+ PrecreatedDataImageEntry = _reflection.GeneratedProtocolMessageType('PrecreatedDataImageEntry', (_message.Message,), dict(
+ DESCRIPTOR = _INTERNALCONFIG_PRECREATEDDATAIMAGEENTRY,
+ __module__ = 'internal_config_pb2'
+ # @@protoc_insertion_point(class_scope:acloud.internal.proto.InternalConfig.PrecreatedDataImageEntry)
+ ))
+ ,
+
+ ValidBranchAndMinBuildIdEntry = _reflection.GeneratedProtocolMessageType('ValidBranchAndMinBuildIdEntry', (_message.Message,), dict(
+ DESCRIPTOR = _INTERNALCONFIG_VALIDBRANCHANDMINBUILDIDENTRY,
+ __module__ = 'internal_config_pb2'
+ # @@protoc_insertion_point(class_scope:acloud.internal.proto.InternalConfig.ValidBranchAndMinBuildIdEntry)
+ ))
+ ,
+ DESCRIPTOR = _INTERNALCONFIG,
+ __module__ = 'internal_config_pb2'
+ # @@protoc_insertion_point(class_scope:acloud.internal.proto.InternalConfig)
+ ))
+_sym_db.RegisterMessage(InternalConfig)
+_sym_db.RegisterMessage(InternalConfig.DeviceResolutionMapEntry)
+_sym_db.RegisterMessage(InternalConfig.DeviceDefaultOrientationMapEntry)
+_sym_db.RegisterMessage(InternalConfig.PrecreatedDataImageEntry)
+_sym_db.RegisterMessage(InternalConfig.ValidBranchAndMinBuildIdEntry)
+
+
+_DEFAULTUSERCONFIG_METADATAVARIABLEENTRY.has_options = True
+_DEFAULTUSERCONFIG_METADATAVARIABLEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
+_INTERNALCONFIG_DEVICERESOLUTIONMAPENTRY.has_options = True
+_INTERNALCONFIG_DEVICERESOLUTIONMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
+_INTERNALCONFIG_DEVICEDEFAULTORIENTATIONMAPENTRY.has_options = True
+_INTERNALCONFIG_DEVICEDEFAULTORIENTATIONMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
+_INTERNALCONFIG_PRECREATEDDATAIMAGEENTRY.has_options = True
+_INTERNALCONFIG_PRECREATEDDATAIMAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
+_INTERNALCONFIG_VALIDBRANCHANDMINBUILDIDENTRY.has_options = True
+_INTERNALCONFIG_VALIDBRANCHANDMINBUILDIDENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
+# @@protoc_insertion_point(module_scope)
diff --git a/internal/proto/user_config_pb2.py b/internal/proto/user_config_pb2.py
new file mode 100644
index 0000000..1e69cf3
--- /dev/null
+++ b/internal/proto/user_config_pb2.py
@@ -0,0 +1,209 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: user_config.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='user_config.proto',
+ package='acloud.internal.proto',
+ syntax='proto2',
+ serialized_pb=_b('\n\x11user_config.proto\x12\x15\x61\x63loud.internal.proto\"\xd6\x03\n\nUserConfig\x12\x1c\n\x14service_account_name\x18\x01 \x01(\t\x12(\n service_account_private_key_path\x18\x02 \x01(\t\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x0c\n\x04zone\x18\x04 \x01(\t\x12\x14\n\x0cmachine_type\x18\x05 \x01(\t\x12\x0f\n\x07network\x18\x06 \x01(\t\x12\x1c\n\x14ssh_private_key_path\x18\x07 \x01(\t\x12\x1b\n\x13storage_bucket_name\x18\x08 \x01(\t\x12\x13\n\x0borientation\x18\t \x01(\t\x12\x12\n\nresolution\x18\n \x01(\t\x12\x1f\n\x17\x65xtra_data_disk_size_gb\x18\x0b \x01(\x05\x12R\n\x11metadata_variable\x18\x0c \x03(\x0b\x32\x37.acloud.internal.proto.UserConfig.MetadataVariableEntry\x12\x11\n\tclient_id\x18\r \x01(\t\x12\x15\n\rclient_secret\x18\x0e \x01(\t\x1a\x37\n\x15MetadataVariableEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_USERCONFIG_METADATAVARIABLEENTRY = _descriptor.Descriptor(
+ name='MetadataVariableEntry',
+ full_name='acloud.internal.proto.UserConfig.MetadataVariableEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='acloud.internal.proto.UserConfig.MetadataVariableEntry.key', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='acloud.internal.proto.UserConfig.MetadataVariableEntry.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=460,
+ serialized_end=515,
+)
+
+_USERCONFIG = _descriptor.Descriptor(
+ name='UserConfig',
+ full_name='acloud.internal.proto.UserConfig',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='service_account_name', full_name='acloud.internal.proto.UserConfig.service_account_name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='service_account_private_key_path', full_name='acloud.internal.proto.UserConfig.service_account_private_key_path', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='project', full_name='acloud.internal.proto.UserConfig.project', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='zone', full_name='acloud.internal.proto.UserConfig.zone', index=3,
+ number=4, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='machine_type', full_name='acloud.internal.proto.UserConfig.machine_type', index=4,
+ number=5, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='network', full_name='acloud.internal.proto.UserConfig.network', index=5,
+ number=6, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='ssh_private_key_path', full_name='acloud.internal.proto.UserConfig.ssh_private_key_path', index=6,
+ number=7, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='storage_bucket_name', full_name='acloud.internal.proto.UserConfig.storage_bucket_name', index=7,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='orientation', full_name='acloud.internal.proto.UserConfig.orientation', index=8,
+ number=9, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='resolution', full_name='acloud.internal.proto.UserConfig.resolution', index=9,
+ number=10, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='extra_data_disk_size_gb', full_name='acloud.internal.proto.UserConfig.extra_data_disk_size_gb', index=10,
+ number=11, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='metadata_variable', full_name='acloud.internal.proto.UserConfig.metadata_variable', index=11,
+ number=12, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='client_id', full_name='acloud.internal.proto.UserConfig.client_id', index=12,
+ number=13, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='client_secret', full_name='acloud.internal.proto.UserConfig.client_secret', index=13,
+ number=14, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_USERCONFIG_METADATAVARIABLEENTRY, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=45,
+ serialized_end=515,
+)
+
+_USERCONFIG_METADATAVARIABLEENTRY.containing_type = _USERCONFIG
+_USERCONFIG.fields_by_name['metadata_variable'].message_type = _USERCONFIG_METADATAVARIABLEENTRY
+DESCRIPTOR.message_types_by_name['UserConfig'] = _USERCONFIG
+
+UserConfig = _reflection.GeneratedProtocolMessageType('UserConfig', (_message.Message,), dict(
+
+ MetadataVariableEntry = _reflection.GeneratedProtocolMessageType('MetadataVariableEntry', (_message.Message,), dict(
+ DESCRIPTOR = _USERCONFIG_METADATAVARIABLEENTRY,
+ __module__ = 'user_config_pb2'
+ # @@protoc_insertion_point(class_scope:acloud.internal.proto.UserConfig.MetadataVariableEntry)
+ ))
+ ,
+ DESCRIPTOR = _USERCONFIG,
+ __module__ = 'user_config_pb2'
+ # @@protoc_insertion_point(class_scope:acloud.internal.proto.UserConfig)
+ ))
+_sym_db.RegisterMessage(UserConfig)
+_sym_db.RegisterMessage(UserConfig.MetadataVariableEntry)
+
+
+_USERCONFIG_METADATAVARIABLEENTRY.has_options = True
+_USERCONFIG_METADATAVARIABLEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
+# @@protoc_insertion_point(module_scope)
diff --git a/public/acloud_kernel/kernel_swapper_test.py b/public/acloud_kernel/kernel_swapper_test.py
new file mode 100644
index 0000000..bddb722
--- /dev/null
+++ b/public/acloud_kernel/kernel_swapper_test.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests acloud.public.acloud_kernel.kernel_swapper."""
+
+import subprocess
+import mock
+
+import unittest
+from acloud.internal.lib import android_compute_client
+from acloud.internal.lib import auth
+from acloud.internal.lib import driver_test_lib
+from acloud.public.acloud_kernel import kernel_swapper
+
+
+class KernelSwapperTest(driver_test_lib.BaseDriverTest):
+ """Test kernel_swapper."""
+
+ def setUp(self):
+ """Set up the test."""
+ super(KernelSwapperTest, self).setUp()
+ self.cfg = mock.MagicMock()
+ self.credentials = mock.MagicMock()
+ self.Patch(auth, 'CreateCredentials', return_value=self.credentials)
+ self.compute_client = mock.MagicMock()
+ self.Patch(
+ android_compute_client,
+ 'AndroidComputeClient',
+ return_value=self.compute_client)
+ self.subprocess_call = self.Patch(subprocess, 'check_call')
+
+ self.fake_ip = '123.456.789.000'
+ self.fake_instance = 'fake-instance'
+ self.compute_client.GetInstanceIP.return_value = self.fake_ip
+
+ self.kswapper = kernel_swapper.KernelSwapper(self.cfg,
+ self.fake_instance)
+ self.ssh_cmd_prefix = 'ssh %s root@%s' % (
+ ' '.join(kernel_swapper.SSH_FLAGS), self.fake_ip)
+ self.scp_cmd_prefix = 'scp %s' % ' '.join(kernel_swapper.SSH_FLAGS)
+
+ def testPushFile(self):
+ """Test RebootTarget."""
+ fake_src_path = 'fake-src'
+ fake_dest_path = 'fake-dest'
+ scp_cmd = ' '.join([self.scp_cmd_prefix, '%s root@%s:%s' %
+ (fake_src_path, self.fake_ip, fake_dest_path)])
+
+ self.kswapper.PushFile(fake_src_path, fake_dest_path)
+ self.subprocess_call.assert_called_once_with(scp_cmd, shell=True)
+
+ def testRebootTarget(self):
+ """Test RebootTarget."""
+ self.kswapper.RebootTarget()
+ reboot_cmd = ' '.join([
+ self.ssh_cmd_prefix, '"%s"' % kernel_swapper.REBOOT_CMD
+ ])
+
+ self.subprocess_call.assert_called_once_with(reboot_cmd, shell=True)
+ self.compute_client.WaitForBoot.assert_called_once_with(
+ self.fake_instance)
+
+ def testSwapKernel(self):
+ """Test SwapKernel."""
+ fake_local_kernel_image = 'fake-kernel'
+ mount_cmd = ' '.join([
+ self.ssh_cmd_prefix, '"%s"' % kernel_swapper.MOUNT_CMD
+ ])
+ scp_cmd = ' '.join([self.scp_cmd_prefix, '%s root@%s:%s' %
+ (fake_local_kernel_image, self.fake_ip, '/boot')])
+ reboot_cmd = ' '.join([
+ self.ssh_cmd_prefix, '"%s"' % kernel_swapper.REBOOT_CMD
+ ])
+
+ self.kswapper.SwapKernel(fake_local_kernel_image)
+ self.subprocess_call.assert_has_calls([
+ mock.call(
+ mount_cmd, shell=True), mock.call(
+ scp_cmd, shell=True), mock.call(
+ reboot_cmd, shell=True)
+ ])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/public/acloud.py b/public/acloud_main.py
similarity index 98%
rename from public/acloud.py
rename to public/acloud_main.py
index 5badcef..bd89929 100755
--- a/public/acloud.py
+++ b/public/acloud_main.py
@@ -74,7 +74,7 @@
from acloud.public import errors
LOGGING_FMT = "%(asctime)s |%(levelname)s| %(module)s:%(lineno)s| %(message)s"
-LOGGER_NAME = "google3.cloud.android.driver"
+LOGGER_NAME = "acloud_main"
# Commands
CMD_CREATE = "create"
@@ -349,3 +349,7 @@
sys.stderr.write("Encountered the following errors:\n%s\n" % msg)
return 1
return 0
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/public/config_test.py b/public/config_test.py
new file mode 100644
index 0000000..554e751
--- /dev/null
+++ b/public/config_test.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for acloud.public.config."""
+
+import mock
+
+import unittest
+from acloud.internal.proto import internal_config_pb2
+from acloud.internal.proto import user_config_pb2
+from acloud.public import config
+from acloud.public import errors
+
+
+class AcloudConfigManagerTest(unittest.TestCase):
+ """Test acloud configuration manager."""
+
+ USER_CONFIG = """
+service_account_name: "fake@developer.gserviceaccount.com"
+service_account_private_key_path: "/path/to/service/account/key"
+project: "fake-project"
+zone: "us-central1-f"
+machine_type: "n1-standard-1"
+network: "default"
+ssh_private_key_path: "/path/to/ssh/key"
+storage_bucket_name: "fake_bucket"
+orientation: "portrait"
+resolution: "1200x1200x1200x1200"
+client_id: "fake_client_id"
+client_secret: "fake_client_secret"
+metadata_variable {
+ key: "metadata_1"
+ value: "metadata_value_1"
+}
+"""
+
+ INTERNAL_CONFIG = """
+min_machine_size: "n1-standard-1"
+disk_image_name: "avd-system.tar.gz"
+disk_image_mime_type: "application/x-tar"
+disk_image_extension: ".tar.gz"
+disk_raw_image_name: "disk.raw"
+disk_raw_image_extension: ".img"
+creds_cache_file: ".fake_oauth2.dat"
+user_agent: "fake_user_agent"
+
+default_usr_cfg {
+ machine_type: "n1-standard-1"
+ network: "default"
+ metadata_variable {
+ key: "metadata_1"
+ value: "metadata_value_1"
+ }
+
+ metadata_variable {
+ key: "metadata_2"
+ value: "metadata_value_2"
+ }
+}
+
+device_resolution_map {
+ key: "nexus5"
+ value: "1080x1920x32x480"
+}
+
+device_default_orientation_map {
+ key: "nexus5"
+ value: "portrait"
+}
+
+valid_branch_and_min_build_id {
+ key: "git_jb-gce-dev"
+ value: 0
+}
+"""
+
+ def setUp(self):
+ self.config_file = mock.MagicMock()
+
+ def testLoadUserConfig(self):
+ """Test loading user config."""
+ self.config_file.read.return_value = self.USER_CONFIG
+ cfg = config.AcloudConfigManager.LoadConfigFromProtocolBuffer(
+ self.config_file, user_config_pb2.UserConfig)
+ self.assertEquals(cfg.service_account_name,
+ "fake@developer.gserviceaccount.com")
+ self.assertEquals(cfg.service_account_private_key_path,
+ "/path/to/service/account/key")
+ self.assertEquals(cfg.project, "fake-project")
+ self.assertEquals(cfg.zone, "us-central1-f")
+ self.assertEquals(cfg.machine_type, "n1-standard-1")
+ self.assertEquals(cfg.network, "default")
+ self.assertEquals(cfg.ssh_private_key_path, "/path/to/ssh/key")
+ self.assertEquals(cfg.storage_bucket_name, "fake_bucket")
+ self.assertEquals(cfg.orientation, "portrait")
+ self.assertEquals(cfg.resolution, "1200x1200x1200x1200")
+ self.assertEquals(cfg.client_id, "fake_client_id")
+ self.assertEquals(cfg.client_secret, "fake_client_secret")
+ self.assertEquals({key: val
+ for key, val in cfg.metadata_variable.iteritems()},
+ {"metadata_1": "metadata_value_1"})
+
+ def testLoadInternalConfig(self):
+ """Test loading internal config."""
+ self.config_file.read.return_value = self.INTERNAL_CONFIG
+ cfg = config.AcloudConfigManager.LoadConfigFromProtocolBuffer(
+ self.config_file, internal_config_pb2.InternalConfig)
+ self.assertEquals(cfg.min_machine_size, "n1-standard-1")
+ self.assertEquals(cfg.disk_image_name, "avd-system.tar.gz")
+ self.assertEquals(cfg.disk_image_mime_type, "application/x-tar")
+ self.assertEquals(cfg.disk_image_extension, ".tar.gz")
+ self.assertEquals(cfg.disk_raw_image_name, "disk.raw")
+ self.assertEquals(cfg.disk_raw_image_extension, ".img")
+ self.assertEquals(cfg.creds_cache_file, ".fake_oauth2.dat")
+ self.assertEquals(cfg.user_agent, "fake_user_agent")
+ self.assertEquals(cfg.default_usr_cfg.machine_type, "n1-standard-1")
+ self.assertEquals(cfg.default_usr_cfg.network, "default")
+ self.assertEquals({
+ key: val
+ for key, val in cfg.default_usr_cfg.metadata_variable.iteritems()
+ }, {"metadata_1": "metadata_value_1",
+ "metadata_2": "metadata_value_2"})
+ self.assertEquals(
+ {key: val
+ for key, val in cfg.device_resolution_map.iteritems()},
+ {"nexus5": "1080x1920x32x480"})
+ device_resolution = {
+ key: val
+ for key, val in cfg.device_default_orientation_map.iteritems()
+ }
+ self.assertEquals(device_resolution, {"nexus5": "portrait"})
+ valid_branch_and_min_build_id = {
+ key: val
+ for key, val in cfg.valid_branch_and_min_build_id.iteritems()
+ }
+ self.assertEquals(valid_branch_and_min_build_id, {"git_jb-gce-dev": 0})
+
+ def testLoadConfigFails(self):
+ """Test loading a bad file."""
+ self.config_file.read.return_value = "malformed text"
+ with self.assertRaises(errors.ConfigError):
+ config.AcloudConfigManager.LoadConfigFromProtocolBuffer(
+ self.config_file, internal_config_pb2.InternalConfig)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/public/data/default.config b/public/data/default.config
new file mode 100644
index 0000000..88ee324
--- /dev/null
+++ b/public/data/default.config
@@ -0,0 +1,119 @@
+min_machine_size: "n1-standard-1"
+disk_image_name: "avd-system.tar.gz"
+disk_image_mime_type: "application/x-tar"
+disk_image_extension: ".tar.gz"
+disk_raw_image_name: "disk.raw"
+disk_raw_image_extension: ".img"
+default_extra_data_disk_device: "/dev/block/sdb"
+creds_cache_file: ".acloud_oauth2.dat"
+user_agent: "acloud"
+
+default_usr_cfg {
+ machine_type: "n1-standard-1"
+ network: "default"
+ extra_data_disk_size_gb: 0
+
+ metadata_variable {
+ key: "camera_front"
+ value: "1,32,24,checker-sliding"
+ }
+
+ metadata_variable {
+ key: "camera_back"
+ value: "1,640,480,checker-fixed"
+ }
+
+ metadata_variable {
+ key: "cfg_sta_ephemeral_cache_size_mb"
+ value: "512"
+ }
+
+ metadata_variable {
+ key: "cfg_sta_ephemeral_data_size_mb"
+ value: "2048"
+ }
+
+ metadata_variable {
+ key: "cfg_sta_persistent_data_device"
+ value: "default"
+ }
+
+ metadata_variable {
+ key: "gps_coordinates"
+ value: "37.422,122.084,100,0,1,1"
+ }
+}
+
+# Device resolution
+device_resolution_map {
+ key: "nexus5"
+ value: "1080x1920x32x480"
+}
+
+device_resolution_map {
+ key: "nexus6"
+ value: "1440x2560x32x560"
+}
+
+# nexus7 (2012)
+device_resolution_map {
+ key: "nexus7_2012"
+ value: "800x1280x32x213"
+}
+
+device_resolution_map {
+ key: "nexus7_2013"
+ value: "1200x1920x32x320"
+}
+
+device_resolution_map {
+ key: "nexus9"
+ value: "1536x2048x32x320"
+}
+
+device_resolution_map {
+ key: "nexus10"
+ value: "1600x2560x32x320"
+}
+
+# Default orientation
+
+device_default_orientation_map {
+ key: "nexus5"
+ value: "portrait"
+}
+
+device_default_orientation_map {
+ key: "nexus6"
+ value: "landscape"
+}
+
+device_default_orientation_map {
+ key: "nexus7_2012"
+ value: "landscape"
+}
+
+device_default_orientation_map {
+ key: "nexus7_2013"
+ value: "landscape"
+}
+
+device_default_orientation_map {
+ key: "nexus9"
+ value: "landscape"
+}
+
+device_default_orientation_map {
+ key: "nexus10"
+ value: "landscape"
+}
+
+# Precreated data images.
+precreated_data_image {
+ key: 4
+ value: "extradisk-image-4gb"
+}
+precreated_data_image {
+ key: 10
+ value: "extradisk-image-10gb"
+}
diff --git a/public/device_driver.py b/public/device_driver.py
index b0fb139..d0bdf35 100755
--- a/public/device_driver.py
+++ b/public/device_driver.py
@@ -31,8 +31,6 @@
import logging
import os
-import google3
-
import dateutil.parser
import dateutil.tz
@@ -348,9 +346,8 @@
cleanup: boolean, if True clean up compute engine image and
disk image in storage after creating the instance.
serial_log_file: A path to a file where serial output should
- be saved to. Logs will be fetch only on boot failure.
+ be saved to.
logcat_file: A path to a file where logcat logs should be saved.
- Logs will be fetch only on boot failure.
Returns:
A Report instance.
@@ -388,15 +385,17 @@
# Dump serial and logcat logs.
if serial_log_file:
- _FetchSerialLogsFromDevices(compute_client,
- instance_names=failures.keys(),
- port=constants.DEFAULT_SERIAL_PORT,
- output_file=serial_log_file)
+ _FetchSerialLogsFromDevices(
+ compute_client,
+ instance_names=[d.instance_name for d in device_pool.devices],
+ port=constants.DEFAULT_SERIAL_PORT,
+ output_file=serial_log_file)
if logcat_file:
- _FetchSerialLogsFromDevices(compute_client,
- instance_names=failures.keys(),
- port=constants.LOGCAT_SERIAL_PORT,
- output_file=logcat_file)
+ _FetchSerialLogsFromDevices(
+ compute_client,
+ instance_names=[d.instance_name for d in device_pool.devices],
+ port=constants.LOGCAT_SERIAL_PORT,
+ output_file=logcat_file)
except errors.DriverError as e:
r.AddError(str(e))
r.SetStatus(report.Status.FAIL)
diff --git a/public/device_driver_test.py b/public/device_driver_test.py
new file mode 100644
index 0000000..eb0a64f
--- /dev/null
+++ b/public/device_driver_test.py
@@ -0,0 +1,249 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for acloud.public.device_driver."""
+
+import datetime
+import uuid
+
+import dateutil.parser
+import mock
+
+import unittest
+from acloud.internal.lib import auth
+from acloud.internal.lib import andorid_build_client
+from acloud.internal.lib import android_compute_client
+from acloud.internal.lib import driver_test_lib
+from acloud.internal.lib import gstorage_client
+from acloud.public import device_driver
+
+
+class DeviceDriverTest(driver_test_lib.BaseDriverTest):
+ """Test device_driver."""
+
+ def setUp(self):
+ """Set up the test."""
+ super(DeviceDriverTest, self).setUp()
+ self.build_client = mock.MagicMock()
+ self.Patch(android_build_client, "AndroidBuildClient",
+ return_value=self.build_client)
+ self.storage_client = mock.MagicMock()
+ self.Patch(
+ gstorage_client, "StorageClient", return_value=self.storage_client)
+ self.compute_client = mock.MagicMock()
+ self.Patch(
+ android_compute_client,
+ "AndroidComputeClient",
+ return_value=self.compute_client)
+ self.Patch(auth, "CreateCredentials", return_value=mock.MagicMock())
+
+ def _CreateCfg(self):
+ """A helper method that creates a mock configuration object."""
+ cfg = mock.MagicMock()
+ cfg.service_account_name = "fake@service.com"
+ cfg.service_account_private_key_path = "/fake/path/to/key"
+ cfg.zone = "fake_zone"
+ cfg.disk_image_name = "fake_image.tar.gz"
+ cfg.disk_image_mime_type = "fake/type"
+ cfg.storage_bucket_name = "fake_bucket"
+ cfg.extra_data_disk_size_gb = 4
+ cfg.precreated_data_image_map = {
+ 4: "extradisk-image-4gb",
+ 10: "extradisk-image-10gb"
+ }
+
+ return cfg
+
+ def testCreateAndroidVirtualDevices(self):
+ """Test CreateAndroidVirtualDevices."""
+ cfg = self._CreateCfg()
+ fake_gs_url = "fake_gs_url"
+ fake_ip = "140.1.1.1"
+ fake_instance = "fake-instance"
+ fake_image = "fake-image"
+ fake_build_target = "fake_target"
+ fake_build_id = "12345"
+
+ # Mock uuid
+ fake_uuid = mock.MagicMock(hex="1234")
+ self.Patch(uuid, "uuid4", return_value=fake_uuid)
+ fake_gs_object = fake_uuid.hex + "-" + cfg.disk_image_name
+ self.storage_client.GetUrl.return_value = fake_gs_url
+
+ # Mock compute client methods
+ disk_name = "extradisk-image-4gb"
+ self.compute_client.GetInstanceIP.return_value = fake_ip
+ self.compute_client.GenerateImageName.return_value = fake_image
+ self.compute_client.GenerateInstanceName.return_value = fake_instance
+ self.compute_client.GetDataDiskName.return_value = disk_name
+
+ # Verify
+ r = device_driver.CreateAndroidVirtualDevices(
+ cfg, fake_build_target, fake_build_id)
+ self.build_client.CopyTo.assert_called_with(
+ fake_build_target, fake_build_id, artifact_name=cfg.disk_image_name,
+ destination_bucket=cfg.storage_bucket_name,
+ destination_path=fake_gs_object)
+ self.compute_client.CreateImage.assert_called_with(
+ image_name=fake_image, source_uri=fake_gs_url)
+ self.compute_client.CreateInstance.assert_called_with(
+ fake_instance, fake_image, disk_name)
+ self.compute_client.DeleteImage.assert_called_with(fake_image)
+ self.storage_client.Delete(cfg.storage_bucket_name, fake_gs_object)
+
+ self.assertEquals(
+ r.data,
+ {
+ "devices": [
+ {
+ "instance_name": fake_instance,
+ "ip": fake_ip,
+ },
+ ],
+ }
+ )
+ self.assertEquals(r.command, "create")
+ self.assertEquals(r.status, "SUCCESS")
+
+
+ def testDeleteAndroidVirtualDevices(self):
+ """Test DeleteAndroidVirtualDevices."""
+ instance_names = ["fake-instance-1", "fake-instance-2"]
+ self.compute_client.DeleteInstances.return_value = (instance_names, [],
+ [])
+ cfg = self._CreateCfg()
+ r = device_driver.DeleteAndroidVirtualDevices(cfg, instance_names)
+ self.compute_client.DeleteInstances.assert_called_once_with(
+ instance_names, cfg.zone)
+ self.assertEquals(r.data, {
+ "deleted": [
+ {
+ "name": instance_names[0],
+ "type": "instance",
+ },
+ {
+ "name": instance_names[1],
+ "type": "instance",
+ },
+ ],
+ })
+ self.assertEquals(r.command, "delete")
+ self.assertEquals(r.status, "SUCCESS")
+
+ def testCleanup(self):
+ expiration_mins = 30
+ before_deadline = "2015-10-29T12:00:30.018-07:00"
+ after_deadline = "2015-10-29T12:45:30.018-07:00"
+ now = "2015-10-29T13:00:30.018-07:00"
+ self.Patch(device_driver, "datetime")
+ device_driver.datetime.datetime.now.return_value = dateutil.parser.parse(
+ now)
+ device_driver.datetime.timedelta.return_value = datetime.timedelta(
+ minutes=expiration_mins)
+ fake_instances = [
+ {
+ "name": "fake_instance_1",
+ "creationTimestamp": before_deadline,
+ }, {
+ "name": "fake_instance_2",
+ "creationTimestamp": after_deadline,
+ }
+ ]
+ fake_images = [
+ {
+ "name": "extradisk-image-4gb",
+ "creationTimestamp": before_deadline,
+ }, {
+ "name": "fake_image_1",
+ "creationTimestamp": before_deadline,
+ }, {
+ "name": "fake_image_2",
+ "creationTimestamp": after_deadline,
+ }
+ ]
+ fake_disks = [
+ {
+ "name": "fake_disk_1",
+ "creationTimestamp": before_deadline,
+ }, {
+ "name": "fake_disk_2",
+ "creationTimestamp": before_deadline,
+ "users": ["some-instance-using-the-disk"]
+ }, {
+ "name": "fake_disk_3",
+ "creationTimestamp": after_deadline,
+ }
+ ]
+ fake_objects = [
+ {
+ "name": "fake_object_1",
+ "timeCreated": before_deadline,
+ }, {
+ "name": "fake_object_2",
+ "timeCreated": after_deadline,
+ }
+ ]
+ self.compute_client.ListInstances.return_value = fake_instances
+ self.compute_client.ListImages.return_value = fake_images
+ self.compute_client.ListDisks.return_value = fake_disks
+ self.storage_client.List.return_value = fake_objects
+ self.compute_client.DeleteInstances.return_value = (
+ ["fake_instance_1"], [], [])
+ self.compute_client.DeleteImages.return_value = (["fake_image_1"], [],
+ [])
+ self.compute_client.DeleteDisks.return_value = (["fake_disk_1"], [],
+ [])
+ self.storage_client.DeleteFiles.return_value = (["fake_object_1"], [],
+ [])
+ cfg = self._CreateCfg()
+ r = device_driver.Cleanup(cfg, expiration_mins)
+ self.assertEqual(r.errors, [])
+ expected_report_data = {
+ "deleted": [
+ {"name": "fake_instance_1",
+ "type": "instance"},
+ {"name": "fake_image_1",
+ "type": "image"},
+ {"name": "fake_disk_1",
+ "type": "disk"},
+ {"name": "fake_object_1",
+ "type": "cached_build_artifact"},
+ ]
+ }
+ self.assertEqual(r.data, expected_report_data)
+
+ self.compute_client.ListInstances.assert_called_once_with(
+ zone=cfg.zone)
+ self.compute_client.DeleteInstances.assert_called_once_with(
+ instances=["fake_instance_1"], zone=cfg.zone)
+
+ self.compute_client.ListImages.assert_called_once_with()
+ self.compute_client.DeleteImages.assert_called_once_with(
+ image_names=["fake_image_1"])
+
+ self.compute_client.ListDisks.assert_called_once_with(zone=cfg.zone)
+ self.compute_client.DeleteDisks.assert_called_once_with(
+ disk_names=["fake_disk_1"], zone=cfg.zone)
+
+ self.storage_client.List.assert_called_once_with(
+ bucket_name=cfg.storage_bucket_name)
+ self.storage_client.DeleteFiles.assert_called_once_with(
+ bucket_name=cfg.storage_bucket_name,
+ object_names=["fake_object_1"])
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/public/report_test.py b/public/report_test.py
new file mode 100644
index 0000000..e1fdc55
--- /dev/null
+++ b/public/report_test.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for acloud.public.report."""
+
+import unittest
+from acloud.public import report
+
+
+class ReportTest(unittest.TestCase):
+ """Test Report class."""
+
+ def testAddData(self):
+ """test AddData."""
+ r = report.Report("create")
+ r.AddData("devices", {"instance_name": "instance_1"})
+ r.AddData("devices", {"instance_name": "instance_2"})
+ expected = {
+ "devices": [
+ {"instance_name": "instance_1"},
+ {"instance_name": "instance_2"}
+ ]
+ }
+ self.assertEqual(r.data, expected)
+
+ def testAddError(self):
+ """test AddError."""
+ r = report.Report("create")
+ r.errors.append("some errors")
+ r.errors.append("some errors")
+ self.assertEqual(r.errors, ["some errors", "some errors"])
+
+ def testSetStatus(self):
+ """test SetStatus."""
+ r = report.Report("create")
+ r.SetStatus(report.Status.SUCCESS)
+ self.assertEqual(r.status, "SUCCESS")
+
+ r.SetStatus(report.Status.FAIL)
+ self.assertEqual(r.status, "FAIL")
+
+ r.SetStatus(report.Status.BOOT_FAIL)
+ self.assertEqual(r.status, "BOOT_FAIL")
+
+ # Test that more severe status won't get overriden.
+ r.SetStatus(report.Status.FAIL)
+ self.assertEqual(r.status, "BOOT_FAIL")
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/regen_proto.sh b/regen_proto.sh
new file mode 100644
index 0000000..9a38eab
--- /dev/null
+++ b/regen_proto.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+## Compiles proto files to .py files
+## Note that proto version 3.0.0 is needed for successful compilation.
+protoc -I=internal/proto --python_out=internal/proto internal/proto/internal_config.proto
+protoc -I=internal/proto --python_out=internal/proto internal/proto/user_config.proto