HEX
Server: Apache/2.4.59 (Debian)
System: Linux keymana 4.19.0-21-cloud-amd64 #1 SMP Debian 4.19.249-2 (2022-06-30) x86_64
User: lijunjie (1003)
PHP: 7.4.33
Disabled: pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,pcntl_unshare,
Upload Files
File: //lib/google-cloud-sdk/lib/googlecloudsdk/api_lib/dataproc/compute_helpers.py
# -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Constants for the dataproc tool."""

from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals

from googlecloudsdk.api_lib.compute import base_classes as compute_base
from googlecloudsdk.api_lib.compute import constants as compute_constants
from googlecloudsdk.api_lib.compute import utils as compute_utils
from googlecloudsdk.command_lib.compute import flags
from googlecloudsdk.command_lib.compute import scope as compute_scope
from googlecloudsdk.command_lib.compute import scope_prompter
from googlecloudsdk.core import properties


# Copy into dataproc for cleaner separation
SCOPE_ALIASES = compute_constants.SCOPES
SCOPES_HELP = compute_constants.ScopesHelp()


def ExpandScopeAliases(scopes):
  """Replace known aliases in the list of scopes provided by the user."""
  scopes = scopes or []
  expanded_scopes = []
  for scope in scopes:
    if scope in SCOPE_ALIASES:
      expanded_scopes += SCOPE_ALIASES[scope]
    else:
      # Validate scopes server side.
      expanded_scopes.append(scope)
  return sorted(expanded_scopes)


def GetComputeResources(release_track, cluster_name, dataproc_region):
  """Returns a resources object with resolved GCE zone and region."""
  holder = compute_base.ComputeApiHolder(release_track)
  region_prop = properties.VALUES.compute.region
  zone_prop = properties.VALUES.compute.zone
  resources = holder.resources

  # Prompt for scope if necessary. If Dataproc regional stack is used, omitting
  # the zone allows the server to pick a zone
  zone = properties.VALUES.compute.zone.Get()
  if not zone and dataproc_region == 'global':
    _, zone = scope_prompter.PromptForScope(
        resource_name='cluster',
        underspecified_names=[cluster_name],
        scopes=[compute_scope.ScopeEnum.ZONE],
        default_scope=None,
        scope_lister=flags.GetDefaultScopeLister(
            holder.client))
    if not zone:
      # Still no zone, just raise error generated by this property.
      zone = properties.VALUES.compute.zone.GetOrFail()

  if zone:
    zone_ref = resources.Parse(
        zone,
        params={
            'project': properties.VALUES.core.project.GetOrFail,
        },
        collection='compute.zones')

    zone_name = zone_ref.Name()
    zone_prop.Set(zone_name)
    region_name = compute_utils.ZoneNameToRegionName(zone_name)
    region_prop.Set(region_name)
  else:
    # Auto zone
    zone_prop.Set('')
    # Set GCE region to dataproc region (which is a 1:1 mapping)
    region_prop.Set(dataproc_region)

  return resources