Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions perfkitbenchmarker/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def ArchiveRun(
run_temp_directory,
target_bucket,
prefix='',
gsutil_path='gsutil',
gsutil_path='gcloud',
aws_path=AWS_PATH,
):
"""Archive a run directory to GCS or S3.
Expand All @@ -52,7 +52,7 @@ def ArchiveRun(
)

prefix_len = 5
prefixes = {'s3://': [aws_path, 's3', 'cp'], 'gs://': [gsutil_path, 'cp']}
prefixes = {'s3://': [aws_path, 's3', 'cp'], 'gs://': [gsutil_path, 'storage', 'cp']}

assert all(len(key) == prefix_len for key in prefixes), prefixes

Expand Down
2 changes: 1 addition & 1 deletion perfkitbenchmarker/data/beam/flink-presubmit.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash
echo 'Executing $0'
echo 'Copying job jar file $1 into VM...'
gsutil cp "$1" ./job.jar
gcloud storage cp "$1" ./job.jar
echo 'Finished copying job jar file.'
2 changes: 1 addition & 1 deletion perfkitbenchmarker/linux_benchmarks/gcsfuse_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def _ReadThroughputTest(vm, mountpoint):
"""Read the files in the directory via tf.io.gfile or gcsfuse."""
data_dir = FLAGS.gcsfuse_data_dir
options = f'--mountpoint="{mountpoint}"'
cmd = f'gsutil ls "{data_dir}" | {_DLVM_PYTHON} {_REMOTE_SCRIPT} {options}'
cmd = f'gcloud storage ls "{data_dir}" | {_DLVM_PYTHON} {_REMOTE_SCRIPT} {options}'
logging.info(cmd)
stdout, stderr = vm.RemoteCommand(cmd)
logging.info(stdout)
Expand Down
2 changes: 1 addition & 1 deletion perfkitbenchmarker/linux_benchmarks/resnet_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def Prepare(benchmark_spec):
vm.RemoteCommand(
'sudo mkdir -p {data_path} && '
'sudo chmod a+w {data_path} && '
'gsutil -m cp -r {data_dir}/* {data_path}'.format(
'gcloud storage cp --recursive {data_dir}/* {data_path}'.format(
data_dir=benchmark_spec.data_dir, data_path=local_data_path
)
)
Expand Down
2 changes: 1 addition & 1 deletion perfkitbenchmarker/linux_packages/tcmalloc.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def AptInstall(vm):
vm.Install('google_cloud_sdk')
local_path = '/tmp/libtcmalloc.so'
vm.RemoteCommand(
'gsutil cp {url} {path} '
'gcloud storage cp {url} {path} '
'&& echo "export LD_PRELOAD={path}" | sudo tee -a {tmp}'.format(
url=FLAGS.tcmalloc_experimental_url,
path=local_path,
Expand Down
16 changes: 8 additions & 8 deletions perfkitbenchmarker/log_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,13 @@ def CollectPKBLogs(run_uri: str, log_local_path: str) -> None:
# Generate the log path to the cloud bucket based on the invocation date of
# this function.
gcs_log_path = GetLogCloudPath(PKB_LOG_BUCKET.value, f'{run_uri}-pkb.log')
# The gsutil top-level flag '-h' is not supported by 'gcloud storage cp' or 'gcloud storage mv'.
vm_util.IssueCommand(
[
'gsutil',
'-h',
'Content-Type:text/plain',
'gcloud',
'storage',
_SAVE_LOG_TO_BUCKET_OPERATION.value,
'-Z',
'--gzip-local-all',
log_local_path,
gcs_log_path,
],
Expand All @@ -94,13 +94,13 @@ def CollectVMLogs(run_uri: str, source_path: str) -> None:
source_filename = source_path.split('/')[-1]
gcs_directory_path = GetLogCloudPath(VM_LOG_BUCKET.value, run_uri)
gcs_path = f'{gcs_directory_path}/{source_filename}'
# The gsutil top-level flag '-h' is not supported by 'gcloud storage mv'.
vm_util.IssueCommand(
[
'gsutil',
'-h',
'Content-Type:text/plain',
'gcloud',
'storage',
'mv',
'-Z',
'--gzip-local-all',
source_path,
gcs_path,
],
Expand Down
2 changes: 1 addition & 1 deletion perfkitbenchmarker/object_storage_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def MakeBucket(self, bucket, raise_on_failure=True, tag_bucket=True):
def Copy(self, src_url, dst_url, recursive=False):
"""Copy files, objects and directories.

Note: Recursive copy behavior mimics gsutil cp -r where:
Note: Recursive copy behavior mimics gcloud storage cp --recursive where:
Copy(/foo/bar, /baz, True) copies the directory bar into /baz/bar whereas
aws s3 cp --recursive would copy the contents of bar into /baz.

Expand Down
1 change: 1 addition & 0 deletions perfkitbenchmarker/providers/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -427,6 +427,7 @@ To create a new `~/.boto` file, issue the following command and follow the
instructions given by this command:

```bash
# "gsutil config" has no direct "gcloud storage" equivalent. Its functionality is handled by "gcloud auth" and "gcloud config".
$ gsutil config
```

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from perfkitbenchmarker.providers.gcp import gcs
from perfkitbenchmarker.providers.gcp import util

_WINDOWS_SHUTDOWN_SCRIPT_PS1 = 'Write-Host | gsutil cp - {preempt_marker}'
_WINDOWS_SHUTDOWN_SCRIPT_PS1 = 'Write-Host | gcloud storage cp - {preempt_marker}'

_METADATA_PREEMPT_CMD_WIN = (
'Invoke-RestMethod -Uri'
Expand Down
4 changes: 2 additions & 2 deletions perfkitbenchmarker/scripts/gcsfuse_scripts/read.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
Example usages:

(1) Specify `--mountpoint` to read from gcsfuse.
> gsutil ls gs://gcsfuse-benchmark/10M/ | python read.py --mountpoint=/gcs/
> gcloud storage ls gs://gcsfuse-benchmark/10M/ | python read.py --mountpoint=/gcs/

(2) Omit `--mountpoint` to read from GCS using tf.io.gfile; specify
`--iterations` to run it multiple times.
> gsutil ls gs://gcsfuse-benchmark/10M/ | python read.py --iterations=3
> gcloud storage ls gs://gcsfuse-benchmark/10M/ | python read.py --iterations=3
"""

import concurrent.futures
Expand Down