From 8813135d21215799844e146d211f5d42aec6d16a Mon Sep 17 00:00:00 2001 From: Anton Parkhomenko Date: Sun, 12 Jul 2020 19:27:03 +0700 Subject: [PATCH] Beam: reduce MaximumRecordSize to 6900000 bytes (close #287) --- .../com.snowplowanalytics.snowplow.enrich.beam/Enrich.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/beam/src/main/scala/com.snowplowanalytics.snowplow.enrich.beam/Enrich.scala b/modules/beam/src/main/scala/com.snowplowanalytics.snowplow.enrich.beam/Enrich.scala index b846f1e8f..d67944530 100644 --- a/modules/beam/src/main/scala/com.snowplowanalytics.snowplow.enrich.beam/Enrich.scala +++ b/modules/beam/src/main/scala/com.snowplowanalytics.snowplow.enrich.beam/Enrich.scala @@ -55,7 +55,8 @@ object Enrich { implicit val badRowScioCodec: Coder[BadRow] = Coder.kryo[BadRow] // the maximum record size in Google PubSub is 10Mb - private val MaxRecordSize = 10000000 + // the maximum PubSubIO size is 7Mb to overcome base64-encoding + private val MaxRecordSize = 6900000 private val MetricsNamespace = "snowplow" val enrichedEventSizeDistribution =