Skip to content

Commit

Permalink
Upgrade usage of encoding in docs and examples
Browse files Browse the repository at this point in the history
Signed-off-by: Pablo Sichert <[email protected]>
  • Loading branch information
pablosichert committed Jul 12, 2022
1 parent 2d0cd6b commit 2065569
Show file tree
Hide file tree
Showing 59 changed files with 164 additions and 152 deletions.
17 changes: 9 additions & 8 deletions config/examples/docs_example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,13 @@ index = "vector-%Y-%m-%d" # daily indices

# Send structured data to a cost-effective long-term storage
[sinks.s3_archives]
inputs = ["apache_parser"] # don't sample for S3
type = "aws_s3"
region = "us-east-1"
bucket = "my-log-archives"
key_prefix = "date=%Y-%m-%d" # daily partitions, hive friendly format
compression = "gzip" # compress final objects
encoding = "ndjson" # new line delimited JSON
inputs = ["apache_parser"] # don't sample for S3
type = "aws_s3"
region = "us-east-1"
bucket = "my-log-archives"
key_prefix = "date=%Y-%m-%d" # daily partitions, hive friendly format
compression = "gzip" # compress final objects
framing.method = "newline_delimited" # new line delimited...
encoding.codec = "json" # ...JSON
[sinks.s3_archives.batch]
max_bytes = 10000000 # 10mb uncompressed
max_bytes = 10000000 # 10mb uncompressed
6 changes: 3 additions & 3 deletions config/examples/environment_variables.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,6 @@ data_dir = "/var/lib/vector"
# Print the data to STDOUT for inspection
# Docs: https://vector.dev/docs/reference/sinks/console
[sinks.out]
inputs = ["add_host"]
type = "console"
encoding = "json"
inputs = ["add_host"]
type = "console"
encoding.codec = "json"
15 changes: 8 additions & 7 deletions config/examples/es_s3_hybrid.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,12 @@ data_dir = "/var/lib/vector"

# Send structured data to S3, a durable long-term storage
[sinks.s3_archives]
inputs = ["apache_logs"] # don't sample
type = "aws_s3"
region = "us-east-1"
bucket = "my_log_archives"
encoding = "ndjson"
compression = "gzip"
inputs = ["apache_logs"] # don't sample
type = "aws_s3"
region = "us-east-1"
bucket = "my_log_archives"
framing.method = "newline_delimited"
encoding.codec = "json"
compression = "gzip"
[sinks.s3_archives.batch]
max_size = 10000000 # 10mb uncompressed
max_size = 10000000 # 10mb uncompressed
4 changes: 2 additions & 2 deletions config/examples/file_to_cloudwatch_metrics.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,12 @@ tags = {method = "{{method}}", status = "{{status}}"}
[sinks.console_metrics]
inputs = ["log_to_metric"]
type = "console"
encoding = "json"
encoding.codec = "json"

[sinks.console_logs]
inputs = ["remap"]
type = "console"
encoding = "json"
encoding.codec = "json"

[sinks.cloudwatch]
inputs = ["log_to_metric"]
Expand Down
4 changes: 2 additions & 2 deletions config/examples/file_to_prometheus.toml
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,12 @@ name = "bytes_out_histogram"
[sinks.console_metrics]
inputs = ["log_to_metric"]
type = "console"
encoding = "json"
encoding.codec = "json"

[sinks.console_logs]
inputs = ["remap"]
type = "console"
encoding = "text"
encoding.codec = "text"

[sinks.prometheus]
inputs = ["log_to_metric"]
Expand Down
17 changes: 9 additions & 8 deletions config/examples/namespacing/sinks/s3_archives.toml
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
# Send structured data to a cost-effective long-term storage
inputs = ["apache_parser"] # don't sample for S3
type = "aws_s3"
region = "us-east-1"
bucket = "my-log-archives"
key_prefix = "date=%Y-%m-%d" # daily partitions, hive friendly format
compression = "gzip" # compress final objects
encoding = "ndjson" # new line delimited JSON
inputs = ["apache_parser"] # don't sample for S3
type = "aws_s3"
region = "us-east-1"
bucket = "my-log-archives"
key_prefix = "date=%Y-%m-%d" # daily partitions, hive friendly format
compression = "gzip" # compress final objects
framing.method = "newline_delimited" # new line delimited...
encoding.codec = "json" # ...JSON
[batch]
max_bytes = 10000000 # 10mb uncompressed
max_bytes = 10000000 # 10mb uncompressed
2 changes: 1 addition & 1 deletion config/examples/prometheus_to_console.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ scrape_interval_secs = 2
[sinks.console]
inputs = ["prometheus"]
type = "console"
encoding = "json"
encoding.codec = "json"
2 changes: 1 addition & 1 deletion config/examples/stdio.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@
[sinks.out]
inputs = ["in"]
type = "console"
encoding = "text"
encoding.codec = "text"
6 changes: 3 additions & 3 deletions config/examples/wrapped_json.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,6 @@ data_dir = "/var/lib/vector"
# Print the data to STDOUT for inspection
# Docs: https://vector.dev/docs/reference/sinks/console
[sinks.out]
inputs = ["parse_json"]
type = "console"
encoding = "json"
inputs = ["parse_json"]
type = "console"
encoding.codec = "json"
2 changes: 1 addition & 1 deletion docs/ARCHITECTURE.md
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ type = "stdin"
[sinks.bar]
type = "console"
inputs = ["foo"]
encoding = "json"
encoding.codec = "json"
```

After the component construction phase, we'll be left with the tasks for each
Expand Down
2 changes: 1 addition & 1 deletion lib/k8s-e2e-tests/tests/vector-agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ const CUSTOM_RESOURCE_VECTOR_CONFIG: &str = indoc! {r#"
[sinks.stdout]
type = "console"
inputs = ["kubernetes_logs"]
encoding = "json"
encoding.codec = "json"
"#};

/// This test validates that vector picks up logs at the simplest case
Expand Down
2 changes: 1 addition & 1 deletion rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md
Original file line number Diff line number Diff line change
Expand Up @@ -475,7 +475,7 @@ Here `event` is an encoded event to be produced by the transform, and `lane` is
> [sinks.example_console]
> type = "console"
> inputs = ["example_transform.example_lane"] # would output the event from `example_lane`
> encoding = "text"
> encoding.codec = "text"
> ```
>
> Other components connected to the same transform, but with different lanes names or without lane names at all would not receive any event.
Expand Down
2 changes: 1 addition & 1 deletion rfcs/2020-04-15-2341-wasm-plugins.md
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ module = "target/wasm32-wasi/release/echo.wasm"
healthcheck = true
inputs = ["demo"]
type = "console"
encoding = "json"
encoding.codec = "json"
buffer.type = "memory"
buffer.max_events = 500
buffer.when_full = "block"
Expand Down
2 changes: 1 addition & 1 deletion rfcs/2021-07-20-8288-csv-enrichment.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ To represent the CSV file we have a new top level configuration option.
```toml
[enrichment_tables.csv_file]
type = "file"
encoding = "csv"
encoding.codec = "csv"
path = "\path_to_csv"
delimiter = ","
```
Expand Down
2 changes: 1 addition & 1 deletion skaffold/manifests/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ data:
type = "console"
inputs = ["kubernetes_logs", "internal_metrics"]
target = "stdout"
encoding = "json"
encoding.codec = "json"
2 changes: 1 addition & 1 deletion soaks/disabled-tests/fluent_remap_aws_firehose/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ stream_name = "soak_fluent_remap_firehose"
endpoint = "http://localhost:8080"
healthcheck.enabled = true
compression = "none"
encoding = "json"
encoding.codec = "json"
region = "us-east-2"
auth.access_key_id = "totallyanaccesskeyid"
auth.secret_access_key = "alsoasecretaccesskey"
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ type = "internal_metrics"
[sources.logs]
type = "http"
address = "0.0.0.0:8282"
encoding = "ndjson"
framing.method = "newline_delimited"
encoding.codec = "json"

##
## Transforms
Expand Down
2 changes: 1 addition & 1 deletion soaks/tests/http_pipelines_blackhole/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ type = "internal_metrics"
[sources.logs]
type = "http"
address = "0.0.0.0:8282"
encoding = "text"
encoding.codec = "text"

##
## Transforms
Expand Down
2 changes: 1 addition & 1 deletion soaks/tests/http_pipelines_blackhole_acks/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ type = "internal_metrics"
[sources.logs]
type = "http"
address = "0.0.0.0:8282"
encoding = "text"
encoding.codec = "text"
acknowledgements = true

##
Expand Down
2 changes: 1 addition & 1 deletion soaks/tests/http_pipelines_no_grok_blackhole/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ type = "internal_metrics"
[sources.logs]
type = "http"
address = "0.0.0.0:8282"
encoding = "text"
encoding.codec = "text"

##
## Transforms
Expand Down
2 changes: 1 addition & 1 deletion soaks/tests/http_text_to_http_json/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ data_dir = "/var/lib/vector"
[sources.logs]
type = "http"
address = "0.0.0.0:8282"
encoding = "text"
encoding.codec = "text"

##
## Sinks
Expand Down
2 changes: 1 addition & 1 deletion soaks/tests/http_to_http_acks/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ address = "0.0.0.0:9090"
type = "http"
inputs = ["http_source"]
uri = "http://localhost:8080"
encoding = "text"
encoding.codec = "text"
healthcheck.enabled = false
buffer.type = "memory"
buffer.max_events = 50000 # buffer 50 payloads at a time
2 changes: 1 addition & 1 deletion soaks/tests/http_to_http_noack/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ address = "0.0.0.0:9090"
type = "http"
inputs = ["http_source"]
uri = "http://localhost:8080"
encoding = "text"
encoding.codec = "text"
healthcheck.enabled = false
buffer.type = "memory"
buffer.max_events = 50000 # buffer 50 payloads at a time
2 changes: 1 addition & 1 deletion soaks/tests/socket_to_socket_blackhole/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,4 @@ type = "socket"
inputs = ["socket_source"]
mode = "tcp"
address = "localhost:8080"
encoding = "json"
encoding.codec = "json"
6 changes: 4 additions & 2 deletions soaks/tests/splunk_hec_route_s3/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ inputs = ["container_type.sidecar"]
endpoint = "http://localhost:8080"
bucket = "vector-soak-sidecar"

encoding.codec = "ndjson"
framing.method = "newline_delimited"
encoding.codec = "json"
encoding.except_fields = ["timestamp"]
key_prefix = "v1/source_type/sidecar/aws_account_id/{{attrs.aws_account}}/system_id/{{attrs.systemid}}/service/{{attrs.c2cService}}/partition/{{attrs.c2cPartition}}/stage/{{attrs.c2cStage}}/year/%Y/month/%m/day/%d/hour/%H"

Expand All @@ -54,7 +55,8 @@ inputs = ["container_type.service"]
endpoint = "http://localhost:8080"
bucket = "vector-soak-service"

encoding.codec = "ndjson"
framing.method = "newline_delimited"
encoding.codec = "json"
encoding.except_fields = ["timestamp"]
key_prefix = "v1/source_type/app/system_id/{{attrs.systemid}}/service/{{attrs.c2cService}}/partition/{{attrs.c2cPartition}}/stage/{{attrs.c2cStage}}/year/%Y/month/%m/day/%d/hour/%H"

Expand Down
2 changes: 1 addition & 1 deletion soaks/tests/splunk_hec_to_splunk_hec_logs_acks/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ address = "0.0.0.0:9090"
type = "splunk_hec_logs"
inputs = ["splunk_hec"]
endpoint = "http://localhost:8080"
encoding = "json"
encoding.codec = "json"
token = "abcd1234"
healthcheck.enabled = false
acknowledgements.indexer_acknowledgements_enabled = true
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ address = "0.0.0.0:9090"
type = "splunk_hec_logs"
inputs = ["splunk_hec"]
endpoint = "http://localhost:8080"
encoding = "json"
encoding.codec = "json"
token = "abcd1234"
healthcheck.enabled = false
acknowledgements.indexer_acknowledgements_enabled = false
2 changes: 1 addition & 1 deletion soaks/tests/syslog_humio_logs/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,6 @@ address = "0.0.0.0:9090"
type = "humio_logs"
inputs = ["syslog"]
endpoint = "http://localhost:8080"
encoding = "json"
encoding.codec = "json"
token = "humio_token"
healthcheck.enabled = false
2 changes: 1 addition & 1 deletion soaks/tests/syslog_splunk_hec_logs/vector.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,6 @@ address = "0.0.0.0:9090"
type = "splunk_hec_logs"
inputs = ["syslog"]
endpoint = "http://localhost:8080"
encoding = "json"
encoding.codec = "json"
token = "abcd1234"
healthcheck.enabled = false
9 changes: 6 additions & 3 deletions src/config/format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ mod tests {
type = "socket"
mode = "tcp"
inputs = ["sample"]
encoding = "text"
encoding.codec = "text"
address = "127.0.0.1:9999"
"#;

Expand Down Expand Up @@ -192,7 +192,8 @@ mod tests {
r#" type: "socket""#,
r#" mode: "tcp""#,
r#" inputs: ["sample"]"#,
r#" encoding: "text""#,
r#" encoding:"#,
r#" codec: "text""#,
r#" address: "127.0.0.1:9999""#,
),
Format::Yaml,
Expand Down Expand Up @@ -231,7 +232,9 @@ mod tests {
"type": "socket",
"mode": "tcp",
"inputs": ["sample"],
"encoding": "text",
"encoding": {
"codec": "text"
},
"address": "127.0.0.1:9999"
}
}
Expand Down
Loading

0 comments on commit 2065569

Please sign in to comment.